diff --git a/myenv/bin/Activate.ps1 b/myenv/bin/Activate.ps1 new file mode 100644 index 0000000..b49d77b --- /dev/null +++ b/myenv/bin/Activate.ps1 @@ -0,0 +1,247 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove VIRTUAL_ENV_PROMPT altogether. + if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { + Remove-Item -Path env:VIRTUAL_ENV_PROMPT + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } + $env:VIRTUAL_ENV_PROMPT = $Prompt +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/myenv/bin/activate b/myenv/bin/activate new file mode 100644 index 0000000..7181507 --- /dev/null +++ b/myenv/bin/activate @@ -0,0 +1,70 @@ +# This file must be used with "source bin/activate" *from bash* +# You cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # Call hash to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + hash -r 2> /dev/null + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + unset VIRTUAL_ENV_PROMPT + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +# on Windows, a path can contain colons and backslashes and has to be converted: +if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then + # transform D:\path\to\venv to /d/path/to/venv on MSYS + # and to /cygdrive/d/path/to/venv on Cygwin + export VIRTUAL_ENV=$(cygpath "/home/norbert/solax/myenv") +else + # use the path as-is + export VIRTUAL_ENV="/home/norbert/solax/myenv" +fi + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + PS1="(myenv) ${PS1:-}" + export PS1 + VIRTUAL_ENV_PROMPT="(myenv) " + export VIRTUAL_ENV_PROMPT +fi + +# Call hash to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +hash -r 2> /dev/null diff --git a/myenv/bin/activate.csh b/myenv/bin/activate.csh new file mode 100644 index 0000000..3b04529 --- /dev/null +++ b/myenv/bin/activate.csh @@ -0,0 +1,27 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. + +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/home/norbert/solax/myenv" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + set prompt = "(myenv) $prompt" + setenv VIRTUAL_ENV_PROMPT "(myenv) " +endif + +alias pydoc python -m pydoc + +rehash diff --git a/myenv/bin/activate.fish b/myenv/bin/activate.fish new file mode 100644 index 0000000..4696e01 --- /dev/null +++ b/myenv/bin/activate.fish @@ -0,0 +1,69 @@ +# This file must be used with "source /bin/activate.fish" *from fish* +# (https://fishshell.com/). You cannot run it directly. + +function deactivate -d "Exit virtual environment and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + set -e _OLD_FISH_PROMPT_OVERRIDE + # prevents error when using nested fish instances (Issue #93858) + if functions -q _old_fish_prompt + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + end + + set -e VIRTUAL_ENV + set -e VIRTUAL_ENV_PROMPT + if test "$argv[1]" != "nondestructive" + # Self-destruct! + functions -e deactivate + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV "/home/norbert/solax/myenv" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# Unset PYTHONHOME if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # Save the current fish_prompt function as the function _old_fish_prompt. + functions -c fish_prompt _old_fish_prompt + + # With the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command. + set -l old_status $status + + # Output the venv prompt; color taken from the blue of the Python logo. + printf "%s%s%s" (set_color 4B8BBE) "(myenv) " (set_color normal) + + # Restore the return status of the previous command. + echo "exit $old_status" | . + # Output the original/"old" prompt. + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" + set -gx VIRTUAL_ENV_PROMPT "(myenv) " +end diff --git a/myenv/bin/httpx b/myenv/bin/httpx new file mode 100755 index 0000000..4fda488 --- /dev/null +++ b/myenv/bin/httpx @@ -0,0 +1,8 @@ +#!/home/norbert/solax/myenv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from httpx import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/pip b/myenv/bin/pip new file mode 100755 index 0000000..8f3cb50 --- /dev/null +++ b/myenv/bin/pip @@ -0,0 +1,8 @@ +#!/home/norbert/solax/myenv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/pip3 b/myenv/bin/pip3 new file mode 100755 index 0000000..8f3cb50 --- /dev/null +++ b/myenv/bin/pip3 @@ -0,0 +1,8 @@ +#!/home/norbert/solax/myenv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/pip3.12 b/myenv/bin/pip3.12 new file mode 100755 index 0000000..8f3cb50 --- /dev/null +++ b/myenv/bin/pip3.12 @@ -0,0 +1,8 @@ +#!/home/norbert/solax/myenv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/pymodbus.simulator b/myenv/bin/pymodbus.simulator new file mode 100755 index 0000000..571ad92 --- /dev/null +++ b/myenv/bin/pymodbus.simulator @@ -0,0 +1,8 @@ +#!/home/norbert/solax/myenv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pymodbus.server.simulator.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/pyserial-miniterm b/myenv/bin/pyserial-miniterm new file mode 100755 index 0000000..6e2febf --- /dev/null +++ b/myenv/bin/pyserial-miniterm @@ -0,0 +1,8 @@ +#!/home/norbert/solax/myenv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from serial.tools.miniterm import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/pyserial-ports b/myenv/bin/pyserial-ports new file mode 100755 index 0000000..c43cef5 --- /dev/null +++ b/myenv/bin/pyserial-ports @@ -0,0 +1,8 @@ +#!/home/norbert/solax/myenv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from serial.tools.list_ports import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/python b/myenv/bin/python new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/myenv/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/myenv/bin/python3 b/myenv/bin/python3 new file mode 120000 index 0000000..ae65fda --- /dev/null +++ b/myenv/bin/python3 @@ -0,0 +1 @@ +/usr/bin/python3 \ No newline at end of file diff --git a/myenv/bin/python3.12 b/myenv/bin/python3.12 new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/myenv/bin/python3.12 @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/LICENSE b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/LICENSE new file mode 100644 index 0000000..4244199 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2017, Hsiaoming Yang +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/METADATA b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/METADATA new file mode 100644 index 0000000..3aca531 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/METADATA @@ -0,0 +1,106 @@ +Metadata-Version: 2.1 +Name: Authlib +Version: 1.3.2 +Summary: The ultimate Python library in building OAuth and OpenID Connect servers and clients. +Author-email: Hsiaoming Yang +License: BSD-3-Clause +Project-URL: Documentation, https://docs.authlib.org/ +Project-URL: Purchase, https://authlib.org/plans +Project-URL: Issues, https://github.com/lepture/authlib/issues +Project-URL: Source, https://github.com/lepture/authlib +Project-URL: Donate, https://github.com/sponsors/lepture +Project-URL: Blog, https://blog.authlib.org/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security +Classifier: Topic :: Security :: Cryptography +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: cryptography + +Authlib +======= + +The ultimate Python library in building OAuth and OpenID Connect servers. +JWS, JWK, JWA, JWT are included. + +Useful Links +------------ + +1. Homepage: https://authlib.org/ +2. Documentation: https://docs.authlib.org/ +3. Purchase Commercial License: https://authlib.org/plans +4. Blog: https://blog.authlib.org/ +5. More Repositories: https://github.com/authlib +6. Twitter: https://twitter.com/authlib +7. Donate: https://www.patreon.com/lepture + +Specifications +-------------- + +- RFC5849: The OAuth 1.0 Protocol +- RFC6749: The OAuth 2.0 Authorization Framework +- RFC6750: The OAuth 2.0 Authorization Framework: Bearer Token Usage +- RFC7009: OAuth 2.0 Token Revocation +- RFC7515: JSON Web Signature +- RFC7516: JSON Web Encryption +- RFC7517: JSON Web Key +- RFC7518: JSON Web Algorithms +- RFC7519: JSON Web Token +- RFC7521: Assertion Framework for OAuth 2.0 Client Authentication and Authorization Grants +- RFC7523: JSON Web Token (JWT) Profile for OAuth 2.0 Client Authentication and Authorization Grants +- RFC7591: OAuth 2.0 Dynamic Client Registration Protocol +- RFC7636: Proof Key for Code Exchange by OAuth Public Clients +- RFC7638: JSON Web Key (JWK) Thumbprint +- RFC7662: OAuth 2.0 Token Introspection +- RFC8037: CFRG Elliptic Curve Diffie-Hellman (ECDH) and Signatures in JSON Object Signing and Encryption (JOSE) +- RFC8414: OAuth 2.0 Authorization Server Metadata +- RFC8628: OAuth 2.0 Device Authorization Grant +- OpenID Connect 1.0 +- OpenID Connect Discovery 1.0 +- draft-madden-jose-ecdh-1pu-04: Public Key Authenticated Encryption for JOSE: ECDH-1PU + +Implementations +--------------- + +- Requests OAuth 1 Session +- Requests OAuth 2 Session +- Requests Assertion Session +- HTTPX OAuth 1 Session +- HTTPX OAuth 2 Session +- HTTPX Assertion Session +- Flask OAuth 1/2 Client +- Django OAuth 1/2 Client +- Starlette OAuth 1/2 Client +- Flask OAuth 1.0 Server +- Flask OAuth 2.0 Server +- Flask OpenID Connect 1.0 +- Django OAuth 1.0 Server +- Django OAuth 2.0 Server +- Django OpenID Connect 1.0 + +License +------- + +Authlib is licensed under BSD. Please see LICENSE for licensing details. + +If this license does not fit your company, consider to purchase a commercial +license. Find more information on `Authlib Plans`_. + +.. _`Authlib Plans`: https://authlib.org/plans diff --git a/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/RECORD b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/RECORD new file mode 100644 index 0000000..bc8bde7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/RECORD @@ -0,0 +1,386 @@ +Authlib-1.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Authlib-1.3.2.dist-info/LICENSE,sha256=jhtIUY3pxs0Ay0jH_luAI_2Q1VUsoS6-c2Kg3zDdvkU,1514 +Authlib-1.3.2.dist-info/METADATA,sha256=I4_btH4F4UtlH1ubX5gQL270qcWBOsjAf_rHRJIukyg,3876 +Authlib-1.3.2.dist-info/RECORD,, +Authlib-1.3.2.dist-info/WHEEL,sha256=GUeE9LxUgRABPG7YM0jCNs9cBsAIx0YAkzCB88PMLgc,109 +Authlib-1.3.2.dist-info/top_level.txt,sha256=Rj3mJn0jhRuCs6x7ysI6hYE2PePbuxey6y6jswadAEY,8 +authlib/__init__.py,sha256=CoObQJQX-YGSJy-HWbJPtK6XbpfKDBc21DJwjhLnIcM,476 +authlib/__pycache__/__init__.cpython-312.pyc,, +authlib/__pycache__/consts.cpython-312.pyc,, +authlib/__pycache__/deprecate.cpython-312.pyc,, +authlib/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +authlib/common/__pycache__/__init__.cpython-312.pyc,, +authlib/common/__pycache__/encoding.cpython-312.pyc,, +authlib/common/__pycache__/errors.cpython-312.pyc,, +authlib/common/__pycache__/security.cpython-312.pyc,, +authlib/common/__pycache__/urls.cpython-312.pyc,, +authlib/common/encoding.py,sha256=sdiaZwuXZI-ruNPGAhJ0oIuZTcrzdawneS_PoJAnHYk,1546 +authlib/common/errors.py,sha256=z8kGl0qRBnimrMYqVgi1aqLsqSng8YaMtcqCy6MHff8,1684 +authlib/common/security.py,sha256=2xcxtJWVE26kosNJTWtnN3skeSzm3Jjtpm4wxoTCBYs,493 +authlib/common/urls.py,sha256=gUpc_VB9emhmCE0EunlxDiQHHZhegGYdLVPT-qoEkco,4501 +authlib/consts.py,sha256=u151FZN3ns8C2bOsuxkB1vVTqb4uiByLVX0zu19Vkkg,300 +authlib/deprecate.py,sha256=dIjr5VmDMK3bua0cOzJh0Q2RAlAtMhW6iM6ENIynIQ8,481 +authlib/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +authlib/integrations/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/base_client/__init__.py,sha256=xCaZt-rH5n4g0tBVgrM4KKMzWUQ6NHUcLIHwlIuKqMM,653 +authlib/integrations/base_client/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/base_client/__pycache__/async_app.cpython-312.pyc,, +authlib/integrations/base_client/__pycache__/async_openid.cpython-312.pyc,, +authlib/integrations/base_client/__pycache__/errors.cpython-312.pyc,, +authlib/integrations/base_client/__pycache__/framework_integration.cpython-312.pyc,, +authlib/integrations/base_client/__pycache__/registry.cpython-312.pyc,, +authlib/integrations/base_client/__pycache__/sync_app.cpython-312.pyc,, +authlib/integrations/base_client/__pycache__/sync_openid.cpython-312.pyc,, +authlib/integrations/base_client/async_app.py,sha256=3MbucTGkyEBz8W7SIvJSwYxdGR_8wquxNKee8BHt4i8,5847 +authlib/integrations/base_client/async_openid.py,sha256=-OZl3g_8EYJNlLxCukcPAvOPd-o-OWP9b-updMs6Z-c,2803 +authlib/integrations/base_client/errors.py,sha256=fwXW7ldF-TeCIHeANGWYqv5hhaFpXzLLsGRUBwgcy4c,632 +authlib/integrations/base_client/framework_integration.py,sha256=12rBh8a-cj2r0mJkFKhREH49gv2D2v5aF1UZSUAD430,1871 +authlib/integrations/base_client/registry.py,sha256=FAjZBN0n_e-3MbC2ZYCUSUxeHCrKSdoAE21taWxSAeA,4273 +authlib/integrations/base_client/sync_app.py,sha256=IEatF5mP2WwmYGHa5yiBJDh0uWn7HAfnz7Ms3HWSQ1g,12546 +authlib/integrations/base_client/sync_openid.py,sha256=V7RpQKAV6GXG1uS1lmTafzoqBz6CRszXcHGM7OnBcbM,2828 +authlib/integrations/django_client/__init__.py,sha256=ff_Kol2-pT-7E0zav2A6tkgqMT1YYLpmVuNaKh-Fz5g,458 +authlib/integrations/django_client/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/django_client/__pycache__/apps.cpython-312.pyc,, +authlib/integrations/django_client/__pycache__/integration.cpython-312.pyc,, +authlib/integrations/django_client/apps.py,sha256=AR7-2aa1xMJj6UX_dl8PreAVw_H_rxABPV9paqioOuw,3334 +authlib/integrations/django_client/integration.py,sha256=TfgtYs2X_IwismSdX8TI73EcpoYCfjS9OovOdEP8XLA,650 +authlib/integrations/django_oauth1/__init__.py,sha256=yp66WLC43YdsICGgVDbu6AfIRyPR17M43umIUcxjH10,221 +authlib/integrations/django_oauth1/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/django_oauth1/__pycache__/authorization_server.cpython-312.pyc,, +authlib/integrations/django_oauth1/__pycache__/nonce.cpython-312.pyc,, +authlib/integrations/django_oauth1/__pycache__/resource_protector.cpython-312.pyc,, +authlib/integrations/django_oauth1/authorization_server.py,sha256=UKkJuE4nwTPNrC8emL5ENqxwP1t3-Qu7kZn6c59zdng,4536 +authlib/integrations/django_oauth1/nonce.py,sha256=m6j4FWsSeQ1S-LJEgF4BF0TPGRZQuseOumkrbuF6KhY,396 +authlib/integrations/django_oauth1/resource_protector.py,sha256=TL1kRvuuF91kZBBe4FZzo7nY0iOKpGWIdjKvdlOcXAY,2343 +authlib/integrations/django_oauth2/__init__.py,sha256=HGqxRud5D9EGZIthXmySYhB7d-90qzZlXZDrd4mPHnQ,278 +authlib/integrations/django_oauth2/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/django_oauth2/__pycache__/authorization_server.cpython-312.pyc,, +authlib/integrations/django_oauth2/__pycache__/endpoints.cpython-312.pyc,, +authlib/integrations/django_oauth2/__pycache__/requests.cpython-312.pyc,, +authlib/integrations/django_oauth2/__pycache__/resource_protector.cpython-312.pyc,, +authlib/integrations/django_oauth2/__pycache__/signals.cpython-312.pyc,, +authlib/integrations/django_oauth2/authorization_server.py,sha256=rtlQlDyeNME3Hdv4Tnu5tYBZ2zJ9GUn-kHEvNCF5Jh8,4388 +authlib/integrations/django_oauth2/endpoints.py,sha256=lKkDmQklHNTCXK_L-6-_PrHa6XvnFBT7a2BAciYkv7o,1853 +authlib/integrations/django_oauth2/requests.py,sha256=AzZNUT9JVmF0AdopC1MjyncoYNECJcQ-CUah_5Dk9Cw,1321 +authlib/integrations/django_oauth2/resource_protector.py,sha256=O3Snq0LO4divy6sZwDjh4UoMoDVn3qhLvGILaqNUWnM,2595 +authlib/integrations/django_oauth2/signals.py,sha256=8SlnOsi1IuBPmrCi7dOLXK70N_m9y6B3msjMCtBMnSk,236 +authlib/integrations/flask_client/__init__.py,sha256=DCSIvVck7aBh-zDiGILsYJp_pJSl1qHzR3p-pDGfhNk,1677 +authlib/integrations/flask_client/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/flask_client/__pycache__/apps.cpython-312.pyc,, +authlib/integrations/flask_client/__pycache__/integration.cpython-312.pyc,, +authlib/integrations/flask_client/apps.py,sha256=yfHYORyAcpJFZn7WCY55taEDpoFLAUZ4WCDwTc7-eDk,3615 +authlib/integrations/flask_client/integration.py,sha256=T_O0-YZbezOvvyJUJZKhW_lyHTbSkEZvdQkAGwHLyjY,805 +authlib/integrations/flask_oauth1/__init__.py,sha256=PGDVdNJ9oGs5bYJr7oGdpQX0tE2nQJNcHJ8t-SxEAEY,260 +authlib/integrations/flask_oauth1/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/flask_oauth1/__pycache__/authorization_server.cpython-312.pyc,, +authlib/integrations/flask_oauth1/__pycache__/cache.cpython-312.pyc,, +authlib/integrations/flask_oauth1/__pycache__/resource_protector.cpython-312.pyc,, +authlib/integrations/flask_oauth1/authorization_server.py,sha256=qa0C73Qu7lEOM24-dYQ6wOqmXoHiSdlwxcmtqTMYNug,6299 +authlib/integrations/flask_oauth1/cache.py,sha256=x1bOuGhHKrCUO0X2XrR80f898ca8sM2edQfT6nuDFwY,2996 +authlib/integrations/flask_oauth1/resource_protector.py,sha256=feMGEpwRI5DfgJjJhyumI3XdoaAEjhZHzId_-sFDkBE,3805 +authlib/integrations/flask_oauth2/__init__.py,sha256=8xa6R7Otk9DSHzKFcyGeDv-H7OLUbFwtF73ybpe9jNY,243 +authlib/integrations/flask_oauth2/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/flask_oauth2/__pycache__/authorization_server.cpython-312.pyc,, +authlib/integrations/flask_oauth2/__pycache__/errors.cpython-312.pyc,, +authlib/integrations/flask_oauth2/__pycache__/requests.cpython-312.pyc,, +authlib/integrations/flask_oauth2/__pycache__/resource_protector.cpython-312.pyc,, +authlib/integrations/flask_oauth2/__pycache__/signals.cpython-312.pyc,, +authlib/integrations/flask_oauth2/authorization_server.py,sha256=Lz4b_77aGIJ7Rmh3LxWQzWZeyWhtKpFpjBzy18JRibI,5859 +authlib/integrations/flask_oauth2/errors.py,sha256=2pVX8e0Tts4j5I3g8RMuDkxAwRfaDfVkuMiAsCBffso,1099 +authlib/integrations/flask_oauth2/requests.py,sha256=EdfZJ2VSslkmZxgZir6mvyu83rLjo8SPquCIbanzcfw,1022 +authlib/integrations/flask_oauth2/resource_protector.py,sha256=Kpgw5UAy1dPXv3T2f5o2KbPXO6naRmsc25KL3VuMkC4,3843 +authlib/integrations/flask_oauth2/signals.py,sha256=CKao8F778CkUzl7mKjF96smr7WKJ0nfxBT0onVFq10Y,341 +authlib/integrations/httpx_client/__init__.py,sha256=zuO_FIAdLEJ9Ch25kTa-Nsyi2gqkPuAsXdHAeYc0nBI,804 +authlib/integrations/httpx_client/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/httpx_client/__pycache__/assertion_client.cpython-312.pyc,, +authlib/integrations/httpx_client/__pycache__/oauth1_client.cpython-312.pyc,, +authlib/integrations/httpx_client/__pycache__/oauth2_client.cpython-312.pyc,, +authlib/integrations/httpx_client/__pycache__/utils.cpython-312.pyc,, +authlib/integrations/httpx_client/assertion_client.py,sha256=1T2ZEM3cboubUu2Zme-6KKTY1Y06BCIQMhHSQ_LC6ZY,3179 +authlib/integrations/httpx_client/oauth1_client.py,sha256=rkb-wwh3Oqryj3QZop9bo3q0td2OjFptf6rDX5VUUBY,4084 +authlib/integrations/httpx_client/oauth2_client.py,sha256=H7cB8BSdGHmxkXxShNI_KizFiYfKqPa9M65_3aqiXyg,8483 +authlib/integrations/httpx_client/utils.py,sha256=XF8d4xz4d7tDoaaq3LQ3GZonED1_EtW8zH_FoTcUPL4,888 +authlib/integrations/requests_client/__init__.py,sha256=Nco-Q1_wOswQ9qCgBgh0rVWPYO9ddEKnlnnJ9rGZE_U,652 +authlib/integrations/requests_client/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/requests_client/__pycache__/assertion_session.cpython-312.pyc,, +authlib/integrations/requests_client/__pycache__/oauth1_session.cpython-312.pyc,, +authlib/integrations/requests_client/__pycache__/oauth2_session.cpython-312.pyc,, +authlib/integrations/requests_client/__pycache__/utils.cpython-312.pyc,, +authlib/integrations/requests_client/assertion_session.py,sha256=OukMQJ1kj1uk5firduI7nc0reg9knNwnlL6xX01Bo_M,1895 +authlib/integrations/requests_client/oauth1_session.py,sha256=aXC3vFLy4ytV8G3yL-on6dJitNfdKmkttcItTHTejfU,2209 +authlib/integrations/requests_client/oauth2_session.py,sha256=TXEDzUVvH42_BxUmbUsR5syImbMfaYbgB4w446Ajsck,4700 +authlib/integrations/requests_client/utils.py,sha256=4ohGF-9JUR9Ayw63glEednsZSbscrSD4Rx0BPkPrSeA,274 +authlib/integrations/sqla_oauth2/__init__.py,sha256=d8g3ipiiPtyk1JnmTDzeAKCdm-ozjNoERDWLEkpBgL8,548 +authlib/integrations/sqla_oauth2/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/sqla_oauth2/__pycache__/client_mixin.cpython-312.pyc,, +authlib/integrations/sqla_oauth2/__pycache__/functions.cpython-312.pyc,, +authlib/integrations/sqla_oauth2/__pycache__/tokens_mixins.cpython-312.pyc,, +authlib/integrations/sqla_oauth2/client_mixin.py,sha256=qxoJLpnH3ncdvQPYnk8HoqllOoiUZAl_md3RteELQpQ,4116 +authlib/integrations/sqla_oauth2/functions.py,sha256=0smHFpiQ6srSLR-ynLCmbFIq7iPvbBjoQUDibtD_4e8,3195 +authlib/integrations/sqla_oauth2/tokens_mixins.py,sha256=9cBNIUxKLsaP8DTkOKlkZQ1m5guxeOnpJ9t-K8Kuy0c,2008 +authlib/integrations/starlette_client/__init__.py,sha256=3bOTtevT4LmMurKF6dz0HusPbiVrPjBLVRuf6P0wHXk,666 +authlib/integrations/starlette_client/__pycache__/__init__.cpython-312.pyc,, +authlib/integrations/starlette_client/__pycache__/apps.cpython-312.pyc,, +authlib/integrations/starlette_client/__pycache__/integration.cpython-312.pyc,, +authlib/integrations/starlette_client/apps.py,sha256=BHO3DRhLv8x14DAF6CCJKaTVI6Lr9WKh4vIm4eTJPjY,3556 +authlib/integrations/starlette_client/integration.py,sha256=Hvkp3wn5jFWnBxbOyBQrCVAxuuPgMeYJIhhJgi5r4sg,2207 +authlib/jose/__init__.py,sha256=qpIwbdODthy50cit_FeHECvwRGd7pIABiYEA4aiFV48,1399 +authlib/jose/__pycache__/__init__.cpython-312.pyc,, +authlib/jose/__pycache__/errors.cpython-312.pyc,, +authlib/jose/__pycache__/jwk.cpython-312.pyc,, +authlib/jose/__pycache__/util.cpython-312.pyc,, +authlib/jose/drafts/__init__.py,sha256=A--_H-kGg_s_Yf4m_P3Whb7HRQbKynDGtQYZlnOtkFc,518 +authlib/jose/drafts/__pycache__/__init__.cpython-312.pyc,, +authlib/jose/drafts/__pycache__/_jwe_algorithms.cpython-312.pyc,, +authlib/jose/drafts/__pycache__/_jwe_enc_cryptodome.cpython-312.pyc,, +authlib/jose/drafts/__pycache__/_jwe_enc_cryptography.cpython-312.pyc,, +authlib/jose/drafts/_jwe_algorithms.py,sha256=zbDaWazb4HdYtQmstf0MofDDaMexVwqkmnfIPfc6luY,6917 +authlib/jose/drafts/_jwe_enc_cryptodome.py,sha256=a4Vb0AUZWZWlCoTKqOfBaLZ-o1D1bgS0hKuBKJT7Kqo,1860 +authlib/jose/drafts/_jwe_enc_cryptography.py,sha256=N2Bm9zp7MMSe_kbPwjyUAyD3MMOTj5BmpEUc1k_IqUw,1743 +authlib/jose/errors.py,sha256=j-vg5TV7uiIS-xBgq-0pwxuerCDV31n5VKerCsddJR4,2977 +authlib/jose/jwk.py,sha256=VZiMATxt4UPyU6spf7wKE0rwql4udvl3jZqW85Endwo,490 +authlib/jose/rfc7515/__init__.py,sha256=0NhWGkry69LiJH6cAkwIUNmQUpuGh463TNwIpoQjtE4,360 +authlib/jose/rfc7515/__pycache__/__init__.cpython-312.pyc,, +authlib/jose/rfc7515/__pycache__/jws.cpython-312.pyc,, +authlib/jose/rfc7515/__pycache__/models.cpython-312.pyc,, +authlib/jose/rfc7515/jws.py,sha256=0JEEGLkI6vsERYdViRGRrZBQD9hJUS9j8OPoOxD0-2c,11270 +authlib/jose/rfc7515/models.py,sha256=B3HygZbTamiXc-rhnncMwCHL1LlblU69TCkyV2gezTo,2445 +authlib/jose/rfc7516/__init__.py,sha256=SCAxvSIWD0NF2_Gcq1BrIkKx3Bqr_6WYhFpDFt3AQ6A,465 +authlib/jose/rfc7516/__pycache__/__init__.cpython-312.pyc,, +authlib/jose/rfc7516/__pycache__/jwe.cpython-312.pyc,, +authlib/jose/rfc7516/__pycache__/models.cpython-312.pyc,, +authlib/jose/rfc7516/jwe.py,sha256=j26ji9QSRTuWbH3E3vz10YtDzIoteqZ5bY5VDA7Vp7U,29706 +authlib/jose/rfc7516/models.py,sha256=RdtLB8_KzxZ35DcgkXFCy25rkXEFp9tg22m5oqeBv-A,4341 +authlib/jose/rfc7517/__init__.py,sha256=LfowmYyTdC0t5wB6ptJs45Qkj-6nRmHmhRONAh-UFxY,424 +authlib/jose/rfc7517/__pycache__/__init__.cpython-312.pyc,, +authlib/jose/rfc7517/__pycache__/_cryptography_key.cpython-312.pyc,, +authlib/jose/rfc7517/__pycache__/asymmetric_key.cpython-312.pyc,, +authlib/jose/rfc7517/__pycache__/base_key.cpython-312.pyc,, +authlib/jose/rfc7517/__pycache__/jwk.cpython-312.pyc,, +authlib/jose/rfc7517/__pycache__/key_set.cpython-312.pyc,, +authlib/jose/rfc7517/_cryptography_key.py,sha256=1-EQ1YD7ZR7Gp7FBntrWEN-76Su_vlunuzC77VPJ_sg,1257 +authlib/jose/rfc7517/asymmetric_key.py,sha256=cP5ka_7Ez5gUvVsb54WqVsuuSLDxXGwMsqGqXAvTQlw,6229 +authlib/jose/rfc7517/base_key.py,sha256=udNnaEw7_pHCfUPNc2SyfgVwZDm32DWH7p3J3Ssgk80,3261 +authlib/jose/rfc7517/jwk.py,sha256=xUPIgYiAuyo7kDHFuVfaKeyY3MY9tJURfCIMZMo70o0,2042 +authlib/jose/rfc7517/key_set.py,sha256=CAStzaaCCq5IcPwc_6I3lqYWJdhw1E_JuMqouAF0IW8,883 +authlib/jose/rfc7518/__init__.py,sha256=69w8-62wS4DJNaCCHBUYiueiORtRl6Mlek36CNoTKOQ,879 +authlib/jose/rfc7518/__pycache__/__init__.cpython-312.pyc,, +authlib/jose/rfc7518/__pycache__/ec_key.cpython-312.pyc,, +authlib/jose/rfc7518/__pycache__/jwe_algs.cpython-312.pyc,, +authlib/jose/rfc7518/__pycache__/jwe_encs.cpython-312.pyc,, +authlib/jose/rfc7518/__pycache__/jwe_zips.cpython-312.pyc,, +authlib/jose/rfc7518/__pycache__/jws_algs.cpython-312.pyc,, +authlib/jose/rfc7518/__pycache__/oct_key.cpython-312.pyc,, +authlib/jose/rfc7518/__pycache__/rsa_key.cpython-312.pyc,, +authlib/jose/rfc7518/__pycache__/util.cpython-312.pyc,, +authlib/jose/rfc7518/ec_key.py,sha256=_cot3xwrpuER_tj8eiHkx-L2EJHnYSy4iWieD0RvYKo,3511 +authlib/jose/rfc7518/jwe_algs.py,sha256=yOgfh7WYo7MCHENw4EiucP9ep5nbCd1-zKmyUSbn6Ko,11311 +authlib/jose/rfc7518/jwe_encs.py,sha256=0Mjl9wD3VYpc8ApgjWF_T3n7m2S-qtNH-cnyI-P8pAw,5047 +authlib/jose/rfc7518/jwe_zips.py,sha256=9Qron3QW-1GHqoZHGa6EjLF1VAMMlB2gvHlr2anWqtY,561 +authlib/jose/rfc7518/jws_algs.py,sha256=dfWu1QwmMtlJDB4J1JFnmKxYbCzXNmicn8djnKKNqMM,6493 +authlib/jose/rfc7518/oct_key.py,sha256=jer7R85DqkUrWJamgMziZdgFyZE_ukJljkiD_2nLJ7Q,2678 +authlib/jose/rfc7518/rsa_key.py,sha256=hxCJAs-Ljl_RRv6D5ezAhQCpjwYhp2OU3zXtCqfO_6k,4192 +authlib/jose/rfc7518/util.py,sha256=LpOgX10QHuqss6x015QPgApTlnYJ_cQyd0hzeYALnaE,265 +authlib/jose/rfc7519/__init__.py,sha256=vJKdsUGkdKlGBKctdh5CLM2jc20903rIfAlsHjbr-hA,309 +authlib/jose/rfc7519/__pycache__/__init__.cpython-312.pyc,, +authlib/jose/rfc7519/__pycache__/claims.cpython-312.pyc,, +authlib/jose/rfc7519/__pycache__/jwt.cpython-312.pyc,, +authlib/jose/rfc7519/claims.py,sha256=CCm4EJiv_BKxaLFhc-34sH5cIQ6Y960I1yRaB3d9_vM,8709 +authlib/jose/rfc7519/jwt.py,sha256=myFXsrFfQLV6xxsOAVv--tQF4FuwhLtvCajZtRcIKSs,5950 +authlib/jose/rfc8037/__init__.py,sha256=MV25hs0RY6HU0pE5UKB2hmRI0Avv-V8BaPtNxIshh0A,119 +authlib/jose/rfc8037/__pycache__/__init__.cpython-312.pyc,, +authlib/jose/rfc8037/__pycache__/jws_eddsa.cpython-312.pyc,, +authlib/jose/rfc8037/__pycache__/okp_key.cpython-312.pyc,, +authlib/jose/rfc8037/jws_eddsa.py,sha256=dpg6ZKOdpqGcp582mP5yqsRLlnC81y_Mnl3xL0_tgLQ,716 +authlib/jose/rfc8037/okp_key.py,sha256=SlpI-u0J0l1K0J5rh1L9g7yiUqEn6YTpmoJc7tpRmhw,3560 +authlib/jose/util.py,sha256=eZGduiUbhgqYYxzjT-b1OaKZFTTICZE16Sq1UpyBK48,1065 +authlib/oauth1/__init__.py,sha256=8c5_O3G8lWOUqd3NgWR8CGpCnKEubMA4jeVNHocRQvQ,735 +authlib/oauth1/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth1/__pycache__/client.cpython-312.pyc,, +authlib/oauth1/__pycache__/errors.cpython-312.pyc,, +authlib/oauth1/client.py,sha256=Vyo3cfAzU1f1WavJ-WosZy3B_eV0RShW_19sx1kc5I8,6524 +authlib/oauth1/errors.py,sha256=pg0NaUgENjfTN_ba50_yQB9aSNe5Mte5MDlikFuypBY,46 +authlib/oauth1/rfc5849/__init__.py,sha256=S4rdtQiDd83QsR1hBqdsvuI-VgmgoG7rFuJH2fLP_K4,1036 +authlib/oauth1/rfc5849/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth1/rfc5849/__pycache__/authorization_server.cpython-312.pyc,, +authlib/oauth1/rfc5849/__pycache__/base_server.cpython-312.pyc,, +authlib/oauth1/rfc5849/__pycache__/client_auth.cpython-312.pyc,, +authlib/oauth1/rfc5849/__pycache__/errors.cpython-312.pyc,, +authlib/oauth1/rfc5849/__pycache__/models.cpython-312.pyc,, +authlib/oauth1/rfc5849/__pycache__/parameters.cpython-312.pyc,, +authlib/oauth1/rfc5849/__pycache__/resource_protector.cpython-312.pyc,, +authlib/oauth1/rfc5849/__pycache__/rsa.cpython-312.pyc,, +authlib/oauth1/rfc5849/__pycache__/signature.cpython-312.pyc,, +authlib/oauth1/rfc5849/__pycache__/util.cpython-312.pyc,, +authlib/oauth1/rfc5849/__pycache__/wrapper.cpython-312.pyc,, +authlib/oauth1/rfc5849/authorization_server.py,sha256=nHnxaNgOfTZ0FwnFXe0--A81-rgFTBoT72OSDmjzZl8,13869 +authlib/oauth1/rfc5849/base_server.py,sha256=-9Il95qaOhoFEQYJPkSMqQailAfQRYBtbdt1b0dZagc,3849 +authlib/oauth1/rfc5849/client_auth.py,sha256=SnCp8R50CAJxyHMtkuYN5ZvbHJcLfnldctVaZtz9hnY,6920 +authlib/oauth1/rfc5849/errors.py,sha256=z7LM1IMKzzTb3vnjRziRJk1tH0dQZm1axHnw8DXKXtA,2303 +authlib/oauth1/rfc5849/models.py,sha256=qdNruenkQVjrOjRkPku4EtU7ak3giIpxwirrRJd8Hi0,3418 +authlib/oauth1/rfc5849/parameters.py,sha256=bHgF_EwyJqWi1rvqYsbDKWf9I7z5RJh1OUpNvARHYEg,3455 +authlib/oauth1/rfc5849/resource_protector.py,sha256=v4EsTBnqYAQMarwukpYmgiyT3J9khnp8Apj6n_RRabI,1258 +authlib/oauth1/rfc5849/rsa.py,sha256=z2cx8e-p2pdvzx_WQm9mcMzkGfsBJniWoy6SLn6XZa4,896 +authlib/oauth1/rfc5849/signature.py,sha256=z1xPJmbA19zXJsfOdHSUOdIjAeMYUauSJm3Q1COvnpY,14123 +authlib/oauth1/rfc5849/util.py,sha256=89kP-xwQHop8SaBjCEkppxzYO7GQFbmi3ekVyI-zFvE,136 +authlib/oauth1/rfc5849/wrapper.py,sha256=FCyqlpSnCmNT5U0H_cM2yqkdTALoRLFnOu409xUBOQ0,3945 +authlib/oauth2/__init__.py,sha256=SlhZAaE8Tudl1W5KE57rfitY-9lLSvHJa02blVg9kJo,423 +authlib/oauth2/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/__pycache__/auth.cpython-312.pyc,, +authlib/oauth2/__pycache__/base.cpython-312.pyc,, +authlib/oauth2/__pycache__/client.cpython-312.pyc,, +authlib/oauth2/auth.py,sha256=dNJF5BSwO0Qo9u-VvCwdCBl9kfaJnKruh4MriGgC1G0,3439 +authlib/oauth2/base.py,sha256=cadG08-t_9mhgwzdo73bLeAa1Qvbw3qj4j-g1tbl6uo,958 +authlib/oauth2/client.py,sha256=l8rYciVHptgOMpE_mVhQfzceqvzyDMdIAySARhfDHuc,18024 +authlib/oauth2/rfc6749/__init__.py,sha256=2WVrM30q6NM-DbUjEX2wUwqJwrR0nBr-9HFvjP80b5A,2323 +authlib/oauth2/rfc6749/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc6749/__pycache__/authenticate_client.cpython-312.pyc,, +authlib/oauth2/rfc6749/__pycache__/authorization_server.cpython-312.pyc,, +authlib/oauth2/rfc6749/__pycache__/errors.cpython-312.pyc,, +authlib/oauth2/rfc6749/__pycache__/models.cpython-312.pyc,, +authlib/oauth2/rfc6749/__pycache__/parameters.cpython-312.pyc,, +authlib/oauth2/rfc6749/__pycache__/requests.cpython-312.pyc,, +authlib/oauth2/rfc6749/__pycache__/resource_protector.cpython-312.pyc,, +authlib/oauth2/rfc6749/__pycache__/token_endpoint.cpython-312.pyc,, +authlib/oauth2/rfc6749/__pycache__/util.cpython-312.pyc,, +authlib/oauth2/rfc6749/__pycache__/wrappers.cpython-312.pyc,, +authlib/oauth2/rfc6749/authenticate_client.py,sha256=8ZmUuxuodBQeRe2Z4vMXi_OdPjTJbbnSnY7V8ODNNZ4,3748 +authlib/oauth2/rfc6749/authorization_server.py,sha256=HjrCxNfnAABaSEntZQxcZn3Vkg1Xi6N-nEUxMMdq-YA,11960 +authlib/oauth2/rfc6749/errors.py,sha256=J93QcmMsS_eBgsBa_P4wepkAw87C7ZDW8-JGvc3X6c8,7371 +authlib/oauth2/rfc6749/grants/__init__.py,sha256=jJZOtFgyBztuIGQxcuK1qtvnR3hWRDU-G59RuyWmqKI,1314 +authlib/oauth2/rfc6749/grants/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc6749/grants/__pycache__/authorization_code.cpython-312.pyc,, +authlib/oauth2/rfc6749/grants/__pycache__/base.cpython-312.pyc,, +authlib/oauth2/rfc6749/grants/__pycache__/client_credentials.cpython-312.pyc,, +authlib/oauth2/rfc6749/grants/__pycache__/implicit.cpython-312.pyc,, +authlib/oauth2/rfc6749/grants/__pycache__/refresh_token.cpython-312.pyc,, +authlib/oauth2/rfc6749/grants/__pycache__/resource_owner_password_credentials.cpython-312.pyc,, +authlib/oauth2/rfc6749/grants/authorization_code.py,sha256=ktdVV-2_Mp5tl7TTSj2Jxx4xpV53Mmouzb0APZmmaS8,15367 +authlib/oauth2/rfc6749/grants/base.py,sha256=OZyWwm2-hUsQXd0hmOfdk6B72TyLOC4GtUe5WoerXks,5794 +authlib/oauth2/rfc6749/grants/client_credentials.py,sha256=OeG_V5aR-oHxkpj5ejOo9pqfa7wu1rKRCzUZ6F9ukJk,3892 +authlib/oauth2/rfc6749/grants/implicit.py,sha256=qNBYKp8GG3sNLZSfC1VL6xRiloTXDl3c1qPXsGf3XF8,9297 +authlib/oauth2/rfc6749/grants/refresh_token.py,sha256=fBAcFH35PhdW15rkQwpK_tH88JdOCRH7XFX8vcfLRZY,6432 +authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.py,sha256=bmOAvYmjEhOlV77xsKQy7JndZNspfaeYkQXUGVao0Ak,5755 +authlib/oauth2/rfc6749/models.py,sha256=lpqMhIwZbnEJazleH4cM3G3jQh14Zs5Sx_csuPy1Z64,7502 +authlib/oauth2/rfc6749/parameters.py,sha256=qjvtW7-APiQ91MVm4cu0LzHNTNFC_3HLZ44uxhlZzog,8308 +authlib/oauth2/rfc6749/requests.py,sha256=bo7qGfhGf2llkRAJGQ_YM0gXMxnQifsAMbA74DiG5oE,2841 +authlib/oauth2/rfc6749/resource_protector.py,sha256=G62crN3HobWrU9DnSNm3iEKXls2cutvpkSEdXfqP_Pg,5309 +authlib/oauth2/rfc6749/token_endpoint.py,sha256=IUx79XxDS12s3QHQukzCwqXS3s1K9l3OrkvlbApnLS0,1103 +authlib/oauth2/rfc6749/util.py,sha256=VG4BVdNClarZE5xr1wMNz8U8CDt4cpkf2Mdc2W9tMMg,1173 +authlib/oauth2/rfc6749/wrappers.py,sha256=Hge83mpl0zH-cbSSoyf4D2kXGS3c_K2degUx2S2LEqQ,842 +authlib/oauth2/rfc6750/__init__.py,sha256=NmL2KnczR-sddyzk3lZAnFT1Aj5nJ21B77W05gMLoVU,635 +authlib/oauth2/rfc6750/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc6750/__pycache__/errors.cpython-312.pyc,, +authlib/oauth2/rfc6750/__pycache__/parameters.cpython-312.pyc,, +authlib/oauth2/rfc6750/__pycache__/token.cpython-312.pyc,, +authlib/oauth2/rfc6750/__pycache__/validator.cpython-312.pyc,, +authlib/oauth2/rfc6750/errors.py,sha256=8jmPrtinFNL3qkxuTlKWscsWmVjdUhYPNWYOZXes90g,2827 +authlib/oauth2/rfc6750/parameters.py,sha256=-9xyt87e5uHFcjvibmYZN-CIEvxy105Hzt4VaHJ6ICI,1204 +authlib/oauth2/rfc6750/token.py,sha256=5ciVR8G3AR0xqCDPTHP858UViaE_qhA5rKkjk0vcA0E,3350 +authlib/oauth2/rfc6750/validator.py,sha256=Nb1Tg_uz5ZBWRZqzbS0Ag3_zq0GlznA0YkTGSP7TBc8,1377 +authlib/oauth2/rfc7009/__init__.py,sha256=kXO-Miq9N7fFStVJp6uqRgbleitHc_DJII5QyDCvQ10,353 +authlib/oauth2/rfc7009/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc7009/__pycache__/parameters.cpython-312.pyc,, +authlib/oauth2/rfc7009/__pycache__/revocation.cpython-312.pyc,, +authlib/oauth2/rfc7009/parameters.py,sha256=klTaHudte5Oncfw6M5Mr6LL9Y-cVnN29Th1ix-uVYAU,854 +authlib/oauth2/rfc7009/revocation.py,sha256=m6Z7ActdJuvtmcLepLyRM-ZclA-5v7YSLjpRsxL92l4,4118 +authlib/oauth2/rfc7521/__init__.py,sha256=sI-EfGOAZTLM-LxfBfYEXU_74cd3ib63g8fkGu39PJA,67 +authlib/oauth2/rfc7521/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc7521/__pycache__/client.cpython-312.pyc,, +authlib/oauth2/rfc7521/client.py,sha256=pbXyTl1e0s9u3EBrSz4un_QtRpUR3kQ8IDhMOKkrNAs,2723 +authlib/oauth2/rfc7523/__init__.py,sha256=0vllgNHhuHqWawuXy7vKtBT7wjHrn3YRA6ftmAkKOkA,852 +authlib/oauth2/rfc7523/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc7523/__pycache__/assertion.cpython-312.pyc,, +authlib/oauth2/rfc7523/__pycache__/auth.cpython-312.pyc,, +authlib/oauth2/rfc7523/__pycache__/client.cpython-312.pyc,, +authlib/oauth2/rfc7523/__pycache__/jwt_bearer.cpython-312.pyc,, +authlib/oauth2/rfc7523/__pycache__/token.cpython-312.pyc,, +authlib/oauth2/rfc7523/__pycache__/validator.cpython-312.pyc,, +authlib/oauth2/rfc7523/assertion.py,sha256=ZFklvOm9ulLdUCoWn245igaK4LjXsFIJn5GcWJRl7iU,2024 +authlib/oauth2/rfc7523/auth.py,sha256=owAbFXkftxI79uPjchtfsFmvhUAquyA02McmaovVrNw,3346 +authlib/oauth2/rfc7523/client.py,sha256=dfleCVFY3VXJk6OKXVZBXKSVUY4817jqtD6qWA6nO9I,4388 +authlib/oauth2/rfc7523/jwt_bearer.py,sha256=e-G12oauDZiz8QTWrHXIXGuLcOpXgqZTrw5y3jjlGoQ,6532 +authlib/oauth2/rfc7523/token.py,sha256=tSMhEJs8doVZbAFvX1RSIX0iAR4xemQo6umWxoC5r4U,3325 +authlib/oauth2/rfc7523/validator.py,sha256=VXx11aa1nF3cp-fdsbQSkx0oXchiArhCtIGdnK-A9Ks,1602 +authlib/oauth2/rfc7591/__init__.py,sha256=Wb4PkmuGy8makvgceRVV1472cGL1udCS2bD9SGbqJyg,667 +authlib/oauth2/rfc7591/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc7591/__pycache__/claims.cpython-312.pyc,, +authlib/oauth2/rfc7591/__pycache__/endpoint.cpython-312.pyc,, +authlib/oauth2/rfc7591/__pycache__/errors.cpython-312.pyc,, +authlib/oauth2/rfc7591/claims.py,sha256=4eMNWjY0Osh4IvdxAZE-bHXf0gqfk0VoERxtkSL07AQ,9809 +authlib/oauth2/rfc7591/endpoint.py,sha256=3EYBDrBhp8G7iK12hBZBns_95yDmLeXzEiP5I1JRW6g,8104 +authlib/oauth2/rfc7591/errors.py,sha256=nx_1-wyT1kznHXh1R91uwa1gbRJ5YKUxWY1SiFVRTWg,1098 +authlib/oauth2/rfc7592/__init__.py,sha256=4du9AjzDNRXbmtkTWBPbpXz5ivugseKIuXtp3oAsk4w,315 +authlib/oauth2/rfc7592/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc7592/__pycache__/endpoint.cpython-312.pyc,, +authlib/oauth2/rfc7592/endpoint.py,sha256=7VEdi04ZnpaUICoZl8YuP1Esp8YhlebaJBhOjR43z5c,10167 +authlib/oauth2/rfc7636/__init__.py,sha256=U-568f2yopL7jOGbL-6js5AMm9mSu74PgK21AkmfAN8,340 +authlib/oauth2/rfc7636/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc7636/__pycache__/challenge.cpython-312.pyc,, +authlib/oauth2/rfc7636/challenge.py,sha256=GNH53aDsI6yDoo98wKtNvh7k0YWACCLfChwbrCHt76E,5665 +authlib/oauth2/rfc7662/__init__.py,sha256=buYHq9DwjRGLIENJkDJ0-9VNyo_8Sui--5C3WyFc6O4,423 +authlib/oauth2/rfc7662/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc7662/__pycache__/introspection.cpython-312.pyc,, +authlib/oauth2/rfc7662/__pycache__/models.cpython-312.pyc,, +authlib/oauth2/rfc7662/__pycache__/token_validator.cpython-312.pyc,, +authlib/oauth2/rfc7662/introspection.py,sha256=BZ5ET90mL22R3wp5OobUnAXwzrbTFJkqcyZ8uuixPqQ,5244 +authlib/oauth2/rfc7662/models.py,sha256=vlmy-_UJmzy4rFjOjLsKpybpodoMsxt4FRi-URDoCKQ,868 +authlib/oauth2/rfc7662/token_validator.py,sha256=x7s4funp-7edBTWs64hsTkxZLxf5O8yPKkDdKPwMMWA,1339 +authlib/oauth2/rfc8414/__init__.py,sha256=ilEoqCBQ-lqXM7RBVlqAzbHbW39d_5oTKbKzJU4bfIY,361 +authlib/oauth2/rfc8414/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc8414/__pycache__/models.cpython-312.pyc,, +authlib/oauth2/rfc8414/__pycache__/well_known.cpython-312.pyc,, +authlib/oauth2/rfc8414/models.py,sha256=-ajtcMT5OleAEwnUNzvRGVdpDEDJ2nfhXn9WuLPNwpE,17450 +authlib/oauth2/rfc8414/well_known.py,sha256=-2eObCyYbBJHBokeBfuVHLqL2gvaLOK-aOVcy4M-5i8,727 +authlib/oauth2/rfc8628/__init__.py,sha256=TqyFJ_dAH5XjFdHFzxuiYrfpvX8rR0UiCyDoEPXPhhc,678 +authlib/oauth2/rfc8628/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc8628/__pycache__/device_code.cpython-312.pyc,, +authlib/oauth2/rfc8628/__pycache__/endpoint.cpython-312.pyc,, +authlib/oauth2/rfc8628/__pycache__/errors.cpython-312.pyc,, +authlib/oauth2/rfc8628/__pycache__/models.cpython-312.pyc,, +authlib/oauth2/rfc8628/device_code.py,sha256=drtONNm2ZNzqNN5Ts93GEE68RaTVqhJFJPUnb16STT0,7715 +authlib/oauth2/rfc8628/endpoint.py,sha256=bA4U9n7I6rt1FblM5eVTx3jhfbJ0-7ptCD9kUV_wLuo,7107 +authlib/oauth2/rfc8628/errors.py,sha256=c761U-GWU58bmfyEPw1VveaOkmOgXElBVlWaE-tXMTg,919 +authlib/oauth2/rfc8628/models.py,sha256=o5734crJs0HZ8d_Iyqoh5O3i_D4_Pf2nml-HU62Re80,827 +authlib/oauth2/rfc8693/__init__.py,sha256=Omkb_UdGC1dsrqP8SUD1f562SnEyOs_pd5G03pouMrE,182 +authlib/oauth2/rfc8693/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc9068/__init__.py,sha256=XWpuGChgg0Rvyy7ZZ3c26q5LiTcQQbZboJknSCpKg4s,332 +authlib/oauth2/rfc9068/__pycache__/__init__.cpython-312.pyc,, +authlib/oauth2/rfc9068/__pycache__/claims.cpython-312.pyc,, +authlib/oauth2/rfc9068/__pycache__/introspection.cpython-312.pyc,, +authlib/oauth2/rfc9068/__pycache__/revocation.cpython-312.pyc,, +authlib/oauth2/rfc9068/__pycache__/token.cpython-312.pyc,, +authlib/oauth2/rfc9068/__pycache__/token_validator.cpython-312.pyc,, +authlib/oauth2/rfc9068/claims.py,sha256=omSrkPIBtOwPGmgIl8ERc8IAk9bzAewoQw9fkvDZauE,1866 +authlib/oauth2/rfc9068/introspection.py,sha256=rJ7QfzYM0ynQFkzYJ-icEByeaBzX1AcgxgLyQGwwOt8,4251 +authlib/oauth2/rfc9068/revocation.py,sha256=vqwfdE5sWEL2bJUrjoYVLllkFaAOt_BuSxU9UxQFnPs,2521 +authlib/oauth2/rfc9068/token.py,sha256=LZgwzqRtjiaZv-EKwUSl07IM2v7XfvgTioTmfwgqFoE,8639 +authlib/oauth2/rfc9068/token_validator.py,sha256=I8ZWuEGMByR0KV4KxUnr-iLHTITBvCf9lfzzpgh_Dbk,6890 +authlib/oidc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +authlib/oidc/__pycache__/__init__.cpython-312.pyc,, +authlib/oidc/core/__init__.py,sha256=NX_zhew3om9I0qVozcXyHshqqCqbk2V1qhiHO8Q4Ztk,650 +authlib/oidc/core/__pycache__/__init__.cpython-312.pyc,, +authlib/oidc/core/__pycache__/claims.cpython-312.pyc,, +authlib/oidc/core/__pycache__/errors.cpython-312.pyc,, +authlib/oidc/core/__pycache__/models.cpython-312.pyc,, +authlib/oidc/core/__pycache__/util.cpython-312.pyc,, +authlib/oidc/core/claims.py,sha256=AvHWg9GPMjQCLDS8uKtOxlXi_prz-fyWGJAF34vD5w0,10187 +authlib/oidc/core/errors.py,sha256=2nZdVIlTweNwXapWa7QodgCR0_5neGlCbGB2U4xUT6o,2883 +authlib/oidc/core/grants/__init__.py,sha256=E6KAaqJMyRYWD2635gdcfE-oN_RRaIAL_imHATP12Uo,226 +authlib/oidc/core/grants/__pycache__/__init__.cpython-312.pyc,, +authlib/oidc/core/grants/__pycache__/code.cpython-312.pyc,, +authlib/oidc/core/grants/__pycache__/hybrid.cpython-312.pyc,, +authlib/oidc/core/grants/__pycache__/implicit.cpython-312.pyc,, +authlib/oidc/core/grants/__pycache__/util.cpython-312.pyc,, +authlib/oidc/core/grants/code.py,sha256=Jl7Asl_PzMPHHaileADIkSn8cODbYCLSC14F4zkUy4U,4848 +authlib/oidc/core/grants/hybrid.py,sha256=t20y1cX83vX-fgHL0v8JOtrjcs1pbe4zhMRnYarHKFM,3362 +authlib/oidc/core/grants/implicit.py,sha256=wzpMiiC0z31dvSrZVrZThBL5WsTWowvlu2MN5JI97dM,5288 +authlib/oidc/core/grants/util.py,sha256=0Zmyd_ofSrzAyJYf7dxtuQ7wcx2shkha4aDIIIeo-F8,4122 +authlib/oidc/core/models.py,sha256=eYGC-NcIuu1m50h5o1bJFwUjwGF9l-AytTic8B07zmU,413 +authlib/oidc/core/util.py,sha256=LggTWoq-qHvF5JW6MsMxBa76rxVlxCARIlQXkxkZD_o,382 +authlib/oidc/discovery/__init__.py,sha256=sI40mT-HXoRPqxAE5UfPg-r3xa_wrQZ8jIuN5n5nm70,321 +authlib/oidc/discovery/__pycache__/__init__.cpython-312.pyc,, +authlib/oidc/discovery/__pycache__/models.cpython-312.pyc,, +authlib/oidc/discovery/__pycache__/well_known.cpython-312.pyc,, +authlib/oidc/discovery/models.py,sha256=dNpNiDyss3T-6FGkoWe_t0_D2xTHSppd4wMY_BsoQNg,12575 +authlib/oidc/discovery/well_known.py,sha256=3VmfTsVReChVVxBuN1eoL7fIRHL0-tN0aXH2XVKeiDM,574 diff --git a/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/WHEEL new file mode 100644 index 0000000..84237ed --- /dev/null +++ b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (73.0.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/top_level.txt b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/top_level.txt new file mode 100644 index 0000000..b91e7e4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/Authlib-1.3.2.dist-info/top_level.txt @@ -0,0 +1 @@ +authlib diff --git a/myenv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc b/myenv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc new file mode 100644 index 0000000..95f344f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc new file mode 100644 index 0000000..0cc378d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so b/myenv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so new file mode 100755 index 0000000..bddc5d1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so differ diff --git a/myenv/lib/python3.12/site-packages/_distutils_hack/__init__.py b/myenv/lib/python3.12/site-packages/_distutils_hack/__init__.py new file mode 100644 index 0000000..30ac3a7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/_distutils_hack/__init__.py @@ -0,0 +1,240 @@ +# don't import any costly modules +import os +import sys + +report_url = ( + "https://github.com/pypa/setuptools/issues/new?" + "template=distutils-deprecation.yml" +) + + +def warn_distutils_present(): + if 'distutils' not in sys.modules: + return + import warnings + + warnings.warn( + "Distutils was imported before Setuptools, but importing Setuptools " + "also replaces the `distutils` module in `sys.modules`. This may lead " + "to undesirable behaviors or errors. To avoid these issues, avoid " + "using distutils directly, ensure that setuptools is installed in the " + "traditional way (e.g. not an editable install), and/or make sure " + "that setuptools is always imported before distutils." + ) + + +def clear_distutils(): + if 'distutils' not in sys.modules: + return + import warnings + + warnings.warn( + "Setuptools is replacing distutils. Support for replacing " + "an already imported distutils is deprecated. In the future, " + "this condition will fail. " + f"Register concerns at {report_url}" + ) + mods = [ + name + for name in sys.modules + if name == "distutils" or name.startswith("distutils.") + ] + for name in mods: + del sys.modules[name] + + +def enabled(): + """ + Allow selection of distutils by environment variable. + """ + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') + if which == 'stdlib': + import warnings + + warnings.warn( + "Reliance on distutils from stdlib is deprecated. Users " + "must rely on setuptools to provide the distutils module. " + "Avoid importing distutils or import setuptools first, " + "and avoid setting SETUPTOOLS_USE_DISTUTILS=stdlib. " + f"Register concerns at {report_url}" + ) + return which == 'local' + + +def ensure_local_distutils(): + import importlib + + clear_distutils() + + # With the DistutilsMetaFinder in place, + # perform an import to cause distutils to be + # loaded from setuptools._distutils. Ref #2906. + with shim(): + importlib.import_module('distutils') + + # check that submodules load as expected + core = importlib.import_module('distutils.core') + assert '_distutils' in core.__file__, core.__file__ + assert 'setuptools._distutils.log' not in sys.modules + + +def do_override(): + """ + Ensure that the local copy of distutils is preferred over stdlib. + + See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 + for more motivation. + """ + if enabled(): + warn_distutils_present() + ensure_local_distutils() + + +class _TrivialRe: + def __init__(self, *patterns): + self._patterns = patterns + + def match(self, string): + return all(pat in string for pat in self._patterns) + + +class DistutilsMetaFinder: + def find_spec(self, fullname, path, target=None): + # optimization: only consider top level modules and those + # found in the CPython test suite. + if path is not None and not fullname.startswith('test.'): + return None + + method_name = 'spec_for_{fullname}'.format(**locals()) + method = getattr(self, method_name, lambda: None) + return method() + + def spec_for_distutils(self): + if self.is_cpython(): + return None + + import importlib + import importlib.abc + import importlib.util + + try: + mod = importlib.import_module('setuptools._distutils') + except Exception: + # There are a couple of cases where setuptools._distutils + # may not be present: + # - An older Setuptools without a local distutils is + # taking precedence. Ref #2957. + # - Path manipulation during sitecustomize removes + # setuptools from the path but only after the hook + # has been loaded. Ref #2980. + # In either case, fall back to stdlib behavior. + return None + + class DistutilsLoader(importlib.abc.Loader): + def create_module(self, spec): + mod.__name__ = 'distutils' + return mod + + def exec_module(self, module): + pass + + return importlib.util.spec_from_loader( + 'distutils', DistutilsLoader(), origin=mod.__file__ + ) + + @staticmethod + def is_cpython(): + """ + Suppress supplying distutils for CPython (build and tests). + Ref #2965 and #3007. + """ + return os.path.isfile('pybuilddir.txt') + + def spec_for_pip(self): + """ + Ensure stdlib distutils when running under pip. + See pypa/pip#8761 for rationale. + """ + if sys.version_info >= (3, 12) or self.pip_imported_during_build(): + return + clear_distutils() + self.spec_for_distutils = lambda: None + + @classmethod + def pip_imported_during_build(cls): + """ + Detect if pip is being imported in a build script. Ref #2355. + """ + import traceback + + return any( + cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None) + ) + + @staticmethod + def frame_file_is_setup(frame): + """ + Return True if the indicated frame suggests a setup.py file. + """ + # some frames may not have __file__ (#2940) + return frame.f_globals.get('__file__', '').endswith('setup.py') + + def spec_for_sensitive_tests(self): + """ + Ensure stdlib distutils when running select tests under CPython. + + python/cpython#91169 + """ + clear_distutils() + self.spec_for_distutils = lambda: None + + sensitive_tests = ( + [ + 'test.test_distutils', + 'test.test_peg_generator', + 'test.test_importlib', + ] + if sys.version_info < (3, 10) + else [ + 'test.test_distutils', + ] + ) + + +for name in DistutilsMetaFinder.sensitive_tests: + setattr( + DistutilsMetaFinder, + f'spec_for_{name}', + DistutilsMetaFinder.spec_for_sensitive_tests, + ) + + +DISTUTILS_FINDER = DistutilsMetaFinder() + + +def add_shim(): + DISTUTILS_FINDER in sys.meta_path or insert_shim() + + +class shim: + def __enter__(self) -> None: + insert_shim() + + def __exit__(self, exc: object, value: object, tb: object) -> None: + _remove_shim() + + +def insert_shim(): + sys.meta_path.insert(0, DISTUTILS_FINDER) + + +def _remove_shim(): + try: + sys.meta_path.remove(DISTUTILS_FINDER) + except ValueError: + pass + + +if sys.version_info < (3, 12): + # DistutilsMetaFinder can only be disabled in Python < 3.12 (PEP 632) + remove_shim = _remove_shim diff --git a/myenv/lib/python3.12/site-packages/_distutils_hack/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/_distutils_hack/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..101b4a1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/_distutils_hack/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/_distutils_hack/__pycache__/override.cpython-312.pyc b/myenv/lib/python3.12/site-packages/_distutils_hack/__pycache__/override.cpython-312.pyc new file mode 100644 index 0000000..bf083d4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/_distutils_hack/__pycache__/override.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/_distutils_hack/override.py b/myenv/lib/python3.12/site-packages/_distutils_hack/override.py new file mode 100644 index 0000000..2cc433a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/_distutils_hack/override.py @@ -0,0 +1 @@ +__import__('_distutils_hack').do_override() diff --git a/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA b/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA new file mode 100644 index 0000000..3ac05cf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA @@ -0,0 +1,295 @@ +Metadata-Version: 2.3 +Name: annotated-types +Version: 0.7.0 +Summary: Reusable constraint types to use with typing.Annotated +Project-URL: Homepage, https://github.com/annotated-types/annotated-types +Project-URL: Source, https://github.com/annotated-types/annotated-types +Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases +Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin , Zac Hatfield-Dodds +License-File: LICENSE +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Console +Classifier: Environment :: MacOS X +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Unix +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Typing :: Typed +Requires-Python: >=3.8 +Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9' +Description-Content-Type: text/markdown + +# annotated-types + +[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) +[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types) +[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types) +[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE) + +[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of +adding context-specific metadata to existing types, and specifies that +`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special +logic for `x`. + +This package provides metadata objects which can be used to represent common +constraints such as upper and lower bounds on scalar values and collection sizes, +a `Predicate` marker for runtime checks, and +descriptions of how we intend these metadata to be interpreted. In some cases, +we also note alternative representations which do not require this package. + +## Install + +```bash +pip install annotated-types +``` + +## Examples + +```python +from typing import Annotated +from annotated_types import Gt, Len, Predicate + +class MyClass: + age: Annotated[int, Gt(18)] # Valid: 19, 20, ... + # Invalid: 17, 18, "19", 19.0, ... + factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ... + # Invalid: 4, 8, -2, 5.0, "prime", ... + + my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50] + # Invalid: (1, 2), ["abc"], [0] * 20 +``` + +## Documentation + +_While `annotated-types` avoids runtime checks for performance, users should not +construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`. +Downstream implementors may choose to raise an error, emit a warning, silently ignore +a metadata item, etc., if the metadata objects described below are used with an +incompatible type - or for any other reason!_ + +### Gt, Ge, Lt, Le + +Express inclusive and/or exclusive bounds on orderable values - which may be numbers, +dates, times, strings, sets, etc. Note that the boundary value need not be of the +same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]` +is fine, for example, and implies that the value is an integer x such that `x > 1.5`. + +We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)` +as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on +the `annotated-types` package. + +To be explicit, these types have the following meanings: + +* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum +* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum +* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum +* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum + +### Interval + +`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single +metadata object. `None` attributes should be ignored, and non-`None` attributes +treated as per the single bounds above. + +### MultipleOf + +`MultipleOf(multiple_of=x)` might be interpreted in two ways: + +1. Python semantics, implying `value % multiple_of == 0`, or +2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1), + where `int(value / multiple_of) == value / multiple_of`. + +We encourage users to be aware of these two common interpretations and their +distinct behaviours, especially since very large or non-integer numbers make +it easy to cause silent data corruption due to floating-point imprecision. + +We encourage libraries to carefully document which interpretation they implement. + +### MinLen, MaxLen, Len + +`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive. + +As well as `Len()` which can optionally include upper and lower bounds, we also +provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)` +and `Len(max_length=y)` respectively. + +`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`. + +Examples of usage: + +* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less +* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less +* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more +* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6 +* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8 + +#### Changed in v0.4.0 + +* `min_inclusive` has been renamed to `min_length`, no change in meaning +* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive** +* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic + meaning of the upper bound in slices vs. `Len` + +See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion. + +### Timezone + +`Timezone` can be used with a `datetime` or a `time` to express which timezones +are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime. +`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis)) +expresses that any timezone-aware datetime is allowed. You may also pass a specific +timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) +object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only +allow a specific timezone, though we note that this is often a symptom of fragile design. + +#### Changed in v0.x.x + +* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of + `timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries. + +### Unit + +`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of +a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]` +would be a float representing a velocity in meters per second. + +Please note that `annotated_types` itself makes no attempt to parse or validate +the unit string in any way. That is left entirely to downstream libraries, +such as [`pint`](https://pint.readthedocs.io) or +[`astropy.units`](https://docs.astropy.org/en/stable/units/). + +An example of how a library might use this metadata: + +```python +from annotated_types import Unit +from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args + +# given a type annotated with a unit: +Meters = Annotated[float, Unit("m")] + + +# you can cast the annotation to a specific unit type with any +# callable that accepts a string and returns the desired type +T = TypeVar("T") +def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None: + if get_origin(tp) is Annotated: + for arg in get_args(tp): + if isinstance(arg, Unit): + return unit_cls(arg.unit) + return None + + +# using `pint` +import pint +pint_unit = cast_unit(Meters, pint.Unit) + + +# using `astropy.units` +import astropy.units as u +astropy_unit = cast_unit(Meters, u.Unit) +``` + +### Predicate + +`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values. +Users should prefer the statically inspectable metadata above, but if you need +the full power and flexibility of arbitrary runtime predicates... here it is. + +For some common constraints, we provide generic types: + +* `IsLower = Annotated[T, Predicate(str.islower)]` +* `IsUpper = Annotated[T, Predicate(str.isupper)]` +* `IsDigit = Annotated[T, Predicate(str.isdigit)]` +* `IsFinite = Annotated[T, Predicate(math.isfinite)]` +* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]` +* `IsNan = Annotated[T, Predicate(math.isnan)]` +* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]` +* `IsInfinite = Annotated[T, Predicate(math.isinf)]` +* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]` + +so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer +(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`. + +Some libraries might have special logic to handle known or understandable predicates, +for example by checking for `str.isdigit` and using its presence to both call custom +logic to enforce digit-only strings, and customise some generated external schema. +Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in +favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`. + +To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner. + +We do not specify what behaviour should be expected for predicates that raise +an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently +skip invalid constraints, or statically raise an error; or it might try calling it +and then propagate or discard the resulting +`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object` +exception. We encourage libraries to document the behaviour they choose. + +### Doc + +`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used. + +It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools. + +It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`. + +This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md). + +### Integrating downstream types with `GroupedMetadata` + +Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata. +This can help reduce verbosity and cognitive overhead for users. +For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata: + +```python +from dataclasses import dataclass +from typing import Iterator +from annotated_types import GroupedMetadata, Ge + +@dataclass +class Field(GroupedMetadata): + ge: int | None = None + description: str | None = None + + def __iter__(self) -> Iterator[object]: + # Iterating over a GroupedMetadata object should yield annotated-types + # constraint metadata objects which describe it as fully as possible, + # and may include other unknown objects too. + if self.ge is not None: + yield Ge(self.ge) + if self.description is not None: + yield Description(self.description) +``` + +Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently. + +Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself. + +Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`. + +### Consuming metadata + +We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103). + +It is up to the implementer to determine how this metadata is used. +You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases. + +## Design & History + +This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic +and Hypothesis, with the goal of making it as easy as possible for end-users to +provide more informative annotations for use by runtime libraries. + +It is deliberately minimal, and following PEP-593 allows considerable downstream +discretion in what (if anything!) they choose to support. Nonetheless, we expect +that staying simple and covering _only_ the most common use-cases will give users +and maintainers the best experience we can. If you'd like more constraints for your +types - follow our lead, by defining them and documenting them downstream! diff --git a/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD b/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD new file mode 100644 index 0000000..a66e278 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD @@ -0,0 +1,10 @@ +annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046 +annotated_types-0.7.0.dist-info/RECORD,, +annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87 +annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083 +annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819 +annotated_types/__pycache__/__init__.cpython-312.pyc,, +annotated_types/__pycache__/test_cases.cpython-312.pyc,, +annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421 diff --git a/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL new file mode 100644 index 0000000..516596c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.24.2 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE b/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..d99323a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2022 the contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/myenv/lib/python3.12/site-packages/annotated_types/__init__.py b/myenv/lib/python3.12/site-packages/annotated_types/__init__.py new file mode 100644 index 0000000..74e0dee --- /dev/null +++ b/myenv/lib/python3.12/site-packages/annotated_types/__init__.py @@ -0,0 +1,432 @@ +import math +import sys +import types +from dataclasses import dataclass +from datetime import tzinfo +from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union + +if sys.version_info < (3, 8): + from typing_extensions import Protocol, runtime_checkable +else: + from typing import Protocol, runtime_checkable + +if sys.version_info < (3, 9): + from typing_extensions import Annotated, Literal +else: + from typing import Annotated, Literal + +if sys.version_info < (3, 10): + EllipsisType = type(Ellipsis) + KW_ONLY = {} + SLOTS = {} +else: + from types import EllipsisType + + KW_ONLY = {"kw_only": True} + SLOTS = {"slots": True} + + +__all__ = ( + 'BaseMetadata', + 'GroupedMetadata', + 'Gt', + 'Ge', + 'Lt', + 'Le', + 'Interval', + 'MultipleOf', + 'MinLen', + 'MaxLen', + 'Len', + 'Timezone', + 'Predicate', + 'LowerCase', + 'UpperCase', + 'IsDigits', + 'IsFinite', + 'IsNotFinite', + 'IsNan', + 'IsNotNan', + 'IsInfinite', + 'IsNotInfinite', + 'doc', + 'DocInfo', + '__version__', +) + +__version__ = '0.7.0' + + +T = TypeVar('T') + + +# arguments that start with __ are considered +# positional only +# see https://peps.python.org/pep-0484/#positional-only-arguments + + +class SupportsGt(Protocol): + def __gt__(self: T, __other: T) -> bool: + ... + + +class SupportsGe(Protocol): + def __ge__(self: T, __other: T) -> bool: + ... + + +class SupportsLt(Protocol): + def __lt__(self: T, __other: T) -> bool: + ... + + +class SupportsLe(Protocol): + def __le__(self: T, __other: T) -> bool: + ... + + +class SupportsMod(Protocol): + def __mod__(self: T, __other: T) -> T: + ... + + +class SupportsDiv(Protocol): + def __div__(self: T, __other: T) -> T: + ... + + +class BaseMetadata: + """Base class for all metadata. + + This exists mainly so that implementers + can do `isinstance(..., BaseMetadata)` while traversing field annotations. + """ + + __slots__ = () + + +@dataclass(frozen=True, **SLOTS) +class Gt(BaseMetadata): + """Gt(gt=x) implies that the value must be greater than x. + + It can be used with any type that supports the ``>`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + gt: SupportsGt + + +@dataclass(frozen=True, **SLOTS) +class Ge(BaseMetadata): + """Ge(ge=x) implies that the value must be greater than or equal to x. + + It can be used with any type that supports the ``>=`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + ge: SupportsGe + + +@dataclass(frozen=True, **SLOTS) +class Lt(BaseMetadata): + """Lt(lt=x) implies that the value must be less than x. + + It can be used with any type that supports the ``<`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + lt: SupportsLt + + +@dataclass(frozen=True, **SLOTS) +class Le(BaseMetadata): + """Le(le=x) implies that the value must be less than or equal to x. + + It can be used with any type that supports the ``<=`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + le: SupportsLe + + +@runtime_checkable +class GroupedMetadata(Protocol): + """A grouping of multiple objects, like typing.Unpack. + + `GroupedMetadata` on its own is not metadata and has no meaning. + All of the constraints and metadata should be fully expressable + in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`. + + Concrete implementations should override `GroupedMetadata.__iter__()` + to add their own metadata. + For example: + + >>> @dataclass + >>> class Field(GroupedMetadata): + >>> gt: float | None = None + >>> description: str | None = None + ... + >>> def __iter__(self) -> Iterable[object]: + >>> if self.gt is not None: + >>> yield Gt(self.gt) + >>> if self.description is not None: + >>> yield Description(self.gt) + + Also see the implementation of `Interval` below for an example. + + Parsers should recognize this and unpack it so that it can be used + both with and without unpacking: + + - `Annotated[int, Field(...)]` (parser must unpack Field) + - `Annotated[int, *Field(...)]` (PEP-646) + """ # noqa: trailing-whitespace + + @property + def __is_annotated_types_grouped_metadata__(self) -> Literal[True]: + return True + + def __iter__(self) -> Iterator[object]: + ... + + if not TYPE_CHECKING: + __slots__ = () # allow subclasses to use slots + + def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None: + # Basic ABC like functionality without the complexity of an ABC + super().__init_subclass__(*args, **kwargs) + if cls.__iter__ is GroupedMetadata.__iter__: + raise TypeError("Can't subclass GroupedMetadata without implementing __iter__") + + def __iter__(self) -> Iterator[object]: # noqa: F811 + raise NotImplementedError # more helpful than "None has no attribute..." type errors + + +@dataclass(frozen=True, **KW_ONLY, **SLOTS) +class Interval(GroupedMetadata): + """Interval can express inclusive or exclusive bounds with a single object. + + It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which + are interpreted the same way as the single-bound constraints. + """ + + gt: Union[SupportsGt, None] = None + ge: Union[SupportsGe, None] = None + lt: Union[SupportsLt, None] = None + le: Union[SupportsLe, None] = None + + def __iter__(self) -> Iterator[BaseMetadata]: + """Unpack an Interval into zero or more single-bounds.""" + if self.gt is not None: + yield Gt(self.gt) + if self.ge is not None: + yield Ge(self.ge) + if self.lt is not None: + yield Lt(self.lt) + if self.le is not None: + yield Le(self.le) + + +@dataclass(frozen=True, **SLOTS) +class MultipleOf(BaseMetadata): + """MultipleOf(multiple_of=x) might be interpreted in two ways: + + 1. Python semantics, implying ``value % multiple_of == 0``, or + 2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of`` + + We encourage users to be aware of these two common interpretations, + and libraries to carefully document which they implement. + """ + + multiple_of: Union[SupportsDiv, SupportsMod] + + +@dataclass(frozen=True, **SLOTS) +class MinLen(BaseMetadata): + """ + MinLen() implies minimum inclusive length, + e.g. ``len(value) >= min_length``. + """ + + min_length: Annotated[int, Ge(0)] + + +@dataclass(frozen=True, **SLOTS) +class MaxLen(BaseMetadata): + """ + MaxLen() implies maximum inclusive length, + e.g. ``len(value) <= max_length``. + """ + + max_length: Annotated[int, Ge(0)] + + +@dataclass(frozen=True, **SLOTS) +class Len(GroupedMetadata): + """ + Len() implies that ``min_length <= len(value) <= max_length``. + + Upper bound may be omitted or ``None`` to indicate no upper length bound. + """ + + min_length: Annotated[int, Ge(0)] = 0 + max_length: Optional[Annotated[int, Ge(0)]] = None + + def __iter__(self) -> Iterator[BaseMetadata]: + """Unpack a Len into zone or more single-bounds.""" + if self.min_length > 0: + yield MinLen(self.min_length) + if self.max_length is not None: + yield MaxLen(self.max_length) + + +@dataclass(frozen=True, **SLOTS) +class Timezone(BaseMetadata): + """Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive). + + ``Annotated[datetime, Timezone(None)]`` must be a naive datetime. + ``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be + tz-aware but any timezone is allowed. + + You may also pass a specific timezone string or tzinfo object such as + ``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that + you only allow a specific timezone, though we note that this is often + a symptom of poor design. + """ + + tz: Union[str, tzinfo, EllipsisType, None] + + +@dataclass(frozen=True, **SLOTS) +class Unit(BaseMetadata): + """Indicates that the value is a physical quantity with the specified unit. + + It is intended for usage with numeric types, where the value represents the + magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]`` + or ``speed: Annotated[float, Unit('m/s')]``. + + Interpretation of the unit string is left to the discretion of the consumer. + It is suggested to follow conventions established by python libraries that work + with physical quantities, such as + + - ``pint`` : + - ``astropy.units``: + + For indicating a quantity with a certain dimensionality but without a specific unit + it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`. + Note, however, ``annotated_types`` itself makes no use of the unit string. + """ + + unit: str + + +@dataclass(frozen=True, **SLOTS) +class Predicate(BaseMetadata): + """``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values. + + Users should prefer statically inspectable metadata, but if you need the full + power and flexibility of arbitrary runtime predicates... here it is. + + We provide a few predefined predicates for common string constraints: + ``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and + ``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which + can be given special handling, and avoid indirection like ``lambda s: s.lower()``. + + Some libraries might have special logic to handle certain predicates, e.g. by + checking for `str.isdigit` and using its presence to both call custom logic to + enforce digit-only strings, and customise some generated external schema. + + We do not specify what behaviour should be expected for predicates that raise + an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently + skip invalid constraints, or statically raise an error; or it might try calling it + and then propagate or discard the resulting exception. + """ + + func: Callable[[Any], bool] + + def __repr__(self) -> str: + if getattr(self.func, "__name__", "") == "": + return f"{self.__class__.__name__}({self.func!r})" + if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and ( + namespace := getattr(self.func.__self__, "__name__", None) + ): + return f"{self.__class__.__name__}({namespace}.{self.func.__name__})" + if isinstance(self.func, type(str.isascii)): # method descriptor + return f"{self.__class__.__name__}({self.func.__qualname__})" + return f"{self.__class__.__name__}({self.func.__name__})" + + +@dataclass +class Not: + func: Callable[[Any], bool] + + def __call__(self, __v: Any) -> bool: + return not self.func(__v) + + +_StrType = TypeVar("_StrType", bound=str) + +LowerCase = Annotated[_StrType, Predicate(str.islower)] +""" +Return True if the string is a lowercase string, False otherwise. + +A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string. +""" # noqa: E501 +UpperCase = Annotated[_StrType, Predicate(str.isupper)] +""" +Return True if the string is an uppercase string, False otherwise. + +A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string. +""" # noqa: E501 +IsDigit = Annotated[_StrType, Predicate(str.isdigit)] +IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63 +""" +Return True if the string is a digit string, False otherwise. + +A string is a digit string if all characters in the string are digits and there is at least one character in the string. +""" # noqa: E501 +IsAscii = Annotated[_StrType, Predicate(str.isascii)] +""" +Return True if all characters in the string are ASCII, False otherwise. + +ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too. +""" + +_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex]) +IsFinite = Annotated[_NumericType, Predicate(math.isfinite)] +"""Return True if x is neither an infinity nor a NaN, and False otherwise.""" +IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))] +"""Return True if x is one of infinity or NaN, and False otherwise""" +IsNan = Annotated[_NumericType, Predicate(math.isnan)] +"""Return True if x is a NaN (not a number), and False otherwise.""" +IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))] +"""Return True if x is anything but NaN (not a number), and False otherwise.""" +IsInfinite = Annotated[_NumericType, Predicate(math.isinf)] +"""Return True if x is a positive or negative infinity, and False otherwise.""" +IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))] +"""Return True if x is neither a positive or negative infinity, and False otherwise.""" + +try: + from typing_extensions import DocInfo, doc # type: ignore [attr-defined] +except ImportError: + + @dataclass(frozen=True, **SLOTS) + class DocInfo: # type: ignore [no-redef] + """ " + The return value of doc(), mainly to be used by tools that want to extract the + Annotated documentation at runtime. + """ + + documentation: str + """The documentation string passed to doc().""" + + def doc( + documentation: str, + ) -> DocInfo: + """ + Add documentation to a type annotation inside of Annotated. + + For example: + + >>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ... + """ + return DocInfo(documentation) diff --git a/myenv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..4e0726e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc b/myenv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc new file mode 100644 index 0000000..f471080 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/annotated_types/py.typed b/myenv/lib/python3.12/site-packages/annotated_types/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/annotated_types/test_cases.py b/myenv/lib/python3.12/site-packages/annotated_types/test_cases.py new file mode 100644 index 0000000..d9164d6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/annotated_types/test_cases.py @@ -0,0 +1,151 @@ +import math +import sys +from datetime import date, datetime, timedelta, timezone +from decimal import Decimal +from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple + +if sys.version_info < (3, 9): + from typing_extensions import Annotated +else: + from typing import Annotated + +import annotated_types as at + + +class Case(NamedTuple): + """ + A test case for `annotated_types`. + """ + + annotation: Any + valid_cases: Iterable[Any] + invalid_cases: Iterable[Any] + + +def cases() -> Iterable[Case]: + # Gt, Ge, Lt, Le + yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1)) + yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1)) + yield Case( + Annotated[datetime, at.Gt(datetime(2000, 1, 1))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(2000, 1, 1), datetime(1999, 12, 31)], + ) + yield Case( + Annotated[datetime, at.Gt(date(2000, 1, 1))], + [date(2000, 1, 2), date(2000, 1, 3)], + [date(2000, 1, 1), date(1999, 12, 31)], + ) + yield Case( + Annotated[datetime, at.Gt(Decimal('1.123'))], + [Decimal('1.1231'), Decimal('123')], + [Decimal('1.123'), Decimal('0')], + ) + + yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1)) + yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1)) + yield Case( + Annotated[datetime, at.Ge(datetime(2000, 1, 1))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(1998, 1, 1), datetime(1999, 12, 31)], + ) + + yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4)) + yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9)) + yield Case( + Annotated[datetime, at.Lt(datetime(2000, 1, 1))], + [datetime(1999, 12, 31), datetime(1999, 12, 31)], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + ) + + yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000)) + yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9)) + yield Case( + Annotated[datetime, at.Le(datetime(2000, 1, 1))], + [datetime(2000, 1, 1), datetime(1999, 12, 31)], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + ) + + # Interval + yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1)) + yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1)) + yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1)) + yield Case( + Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(2000, 1, 1), datetime(2000, 1, 4)], + ) + + yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4)) + yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1)) + + # lengths + + yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) + yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) + yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) + yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) + + yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10)) + yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10)) + yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) + yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) + + yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10)) + yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234')) + + yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}]) + yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4})) + yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4))) + + # Timezone + + yield Case( + Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)] + ) + yield Case( + Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)] + ) + yield Case( + Annotated[datetime, at.Timezone(timezone.utc)], + [datetime(2000, 1, 1, tzinfo=timezone.utc)], + [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], + ) + yield Case( + Annotated[datetime, at.Timezone('Europe/London')], + [datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))], + [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], + ) + + # Quantity + + yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m')) + + # predicate types + + yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom']) + yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC']) + yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2']) + yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀']) + + yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5]) + + yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf]) + yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23]) + yield Case(at.IsNan[float], [math.nan], [1.23, math.inf]) + yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan]) + yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23]) + yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf]) + + # check stacked predicates + yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan]) + + # doc + yield Case(Annotated[int, at.doc("A number")], [1, 2], []) + + # custom GroupedMetadata + class MyCustomGroupedMetadata(at.GroupedMetadata): + def __iter__(self) -> Iterator[at.Predicate]: + yield at.Predicate(lambda x: float(x).is_integer()) + + yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5]) diff --git a/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/LICENSE b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/LICENSE new file mode 100644 index 0000000..104eebf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/METADATA b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/METADATA new file mode 100644 index 0000000..be13c8a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/METADATA @@ -0,0 +1,104 @@ +Metadata-Version: 2.1 +Name: anyio +Version: 4.4.0 +Summary: High level compatibility layer for multiple asynchronous event loop implementations +Author-email: Alex Grönholm +License: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Framework :: AnyIO +Classifier: Typing :: Typed +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: idna >=2.8 +Requires-Dist: sniffio >=1.1 +Requires-Dist: exceptiongroup >=1.0.2 ; python_version < "3.11" +Requires-Dist: typing-extensions >=4.1 ; python_version < "3.11" +Provides-Extra: doc +Requires-Dist: packaging ; extra == 'doc' +Requires-Dist: Sphinx >=7 ; extra == 'doc' +Requires-Dist: sphinx-rtd-theme ; extra == 'doc' +Requires-Dist: sphinx-autodoc-typehints >=1.2.0 ; extra == 'doc' +Provides-Extra: test +Requires-Dist: anyio[trio] ; extra == 'test' +Requires-Dist: coverage[toml] >=7 ; extra == 'test' +Requires-Dist: exceptiongroup >=1.2.0 ; extra == 'test' +Requires-Dist: hypothesis >=4.0 ; extra == 'test' +Requires-Dist: psutil >=5.9 ; extra == 'test' +Requires-Dist: pytest >=7.0 ; extra == 'test' +Requires-Dist: pytest-mock >=3.6.1 ; extra == 'test' +Requires-Dist: trustme ; extra == 'test' +Requires-Dist: uvloop >=0.17 ; (platform_python_implementation == "CPython" and platform_system != "Windows") and extra == 'test' +Provides-Extra: trio +Requires-Dist: trio >=0.23 ; extra == 'trio' + +.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/anyio/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony +with the native SC of trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with the native libraries of your chosen backend. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High-level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Asynchronous file I/O (using worker threads) +* Signal handling + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _Hypothesis: https://hypothesis.works/ diff --git a/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/RECORD b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/RECORD new file mode 100644 index 0000000..6ecc522 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/RECORD @@ -0,0 +1,82 @@ +anyio-4.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-4.4.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-4.4.0.dist-info/METADATA,sha256=sbJaOJ_Ilka4D0U6yKtfOtVrYef7XRFzGjoBEZnRpes,4599 +anyio-4.4.0.dist-info/RECORD,, +anyio-4.4.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +anyio-4.4.0.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 +anyio-4.4.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=CxUxIHOIONI3KpsDLCg-dI6lQaDkW_4Zhtu5jWt1XO8,4344 +anyio/__pycache__/__init__.cpython-312.pyc,, +anyio/__pycache__/from_thread.cpython-312.pyc,, +anyio/__pycache__/lowlevel.cpython-312.pyc,, +anyio/__pycache__/pytest_plugin.cpython-312.pyc,, +anyio/__pycache__/to_process.cpython-312.pyc,, +anyio/__pycache__/to_thread.cpython-312.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-312.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-312.pyc,, +anyio/_backends/__pycache__/_trio.cpython-312.pyc,, +anyio/_backends/_asyncio.py,sha256=CVy87WpTh1URmEjlE-AKTrBoPwsqH_nRxbGkLnTrfeg,83244 +anyio/_backends/_trio.py,sha256=8gdA930WJFn4xrMNpMH6LrHC9MeyTdZBHsB_W-8HFBw,35909 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-312.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-312.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-312.pyc,, +anyio/_core/__pycache__/_fileio.cpython-312.pyc,, +anyio/_core/__pycache__/_resources.cpython-312.pyc,, +anyio/_core/__pycache__/_signals.cpython-312.pyc,, +anyio/_core/__pycache__/_sockets.cpython-312.pyc,, +anyio/_core/__pycache__/_streams.cpython-312.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-312.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-312.pyc,, +anyio/_core/__pycache__/_tasks.cpython-312.pyc,, +anyio/_core/__pycache__/_testing.cpython-312.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-312.pyc,, +anyio/_core/_eventloop.py,sha256=t_tAwBFPjF8jrZGjlJ6bbYy6KA3bjsbZxV9mvh9t1i0,4695 +anyio/_core/_exceptions.py,sha256=wUmhDu80qEB7z9EdCqUwVEhNUlNEok4_W2-rC6sCAUQ,2078 +anyio/_core/_fileio.py,sha256=fC6H6DcueA-2AUaDkP91kmVeqoU7DlWj6O0CCAdnsdM,19456 +anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435 +anyio/_core/_signals.py,sha256=rDOVxtugZDgC5AhfW3lrwsre2n9Pj_adoRUidBiF6dA,878 +anyio/_core/_sockets.py,sha256=2jOzi4bXQQYTLr9PSrCaeTgwaU_N7mt0yjHGmX4LvA8,24028 +anyio/_core/_streams.py,sha256=Z8ZlTY6xom5EszrMsgCT3TphiT4JIlQG-y33CrD0NQY,1811 +anyio/_core/_subprocesses.py,sha256=ZLLNXAtlRGfbyC4sOIltYB1k3NJa3tqk_x_Fsnbcs1M,5272 +anyio/_core/_synchronization.py,sha256=h3o6dWWbzVrcNmi7i2mQjEgRtnIxkGtjmYK7KMpdlaE,18444 +anyio/_core/_tasks.py,sha256=pvVEX2Fw159sf0ypAPerukKsZgRRwvFFedVW52nR2Vk,4764 +anyio/_core/_testing.py,sha256=YUGwA5cgFFbUTv4WFd7cv_BSVr4ryTtPp8owQA3JdWE,2118 +anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508 +anyio/abc/__init__.py,sha256=U44_s3BglL8BojWQiq0KuokvCqkunIp-ySH3GyRXxAc,2681 +anyio/abc/__pycache__/__init__.cpython-312.pyc,, +anyio/abc/__pycache__/_eventloop.cpython-312.pyc,, +anyio/abc/__pycache__/_resources.cpython-312.pyc,, +anyio/abc/__pycache__/_sockets.cpython-312.pyc,, +anyio/abc/__pycache__/_streams.cpython-312.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-312.pyc,, +anyio/abc/__pycache__/_tasks.cpython-312.pyc,, +anyio/abc/__pycache__/_testing.cpython-312.pyc,, +anyio/abc/_eventloop.py,sha256=r9pldSu6p-ZsvO6D_brc0EIi1JgZRDbfgVLuy7Q7R6o,10085 +anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783 +anyio/abc/_sockets.py,sha256=XdZ42TQ1omZN9Ec3HUfTMWG_i-21yMjXQ_FFslAZtzQ,6269 +anyio/abc/_streams.py,sha256=GzST5Q2zQmxVzdrAqtbSyHNxkPlIC9AzeZJg_YyPAXw,6598 +anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067 +anyio/abc/_tasks.py,sha256=0Jc6oIwUjMIVReehF6knOZyAqlgwDt4TP1NQkx4IQGw,2731 +anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821 +anyio/from_thread.py,sha256=HtgJ7yZ6RLfRe0l0yyyhpg2mnoax0mqXXgKv8TORUlA,17700 +anyio/lowlevel.py,sha256=nkgmW--SdxGVp0cmLUYazjkigveRm5HY7-gW8Bpp9oY,4169 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=TBgRAfT-Oxy6efhO1Tziq54NND3Jy4dRmwkMmQXSvhI,5386 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-312.pyc,, +anyio/streams/__pycache__/buffered.cpython-312.pyc,, +anyio/streams/__pycache__/file.cpython-312.pyc,, +anyio/streams/__pycache__/memory.cpython-312.pyc,, +anyio/streams/__pycache__/stapled.cpython-312.pyc,, +anyio/streams/__pycache__/text.cpython-312.pyc,, +anyio/streams/__pycache__/tls.cpython-312.pyc,, +anyio/streams/buffered.py,sha256=UCldKC168YuLvT7n3HtNPnQ2iWAMSTYQWbZvzLwMwkM,4500 +anyio/streams/file.py,sha256=6uoTNb5KbMoj-6gS3_xrrL8uZN8Q4iIvOS1WtGyFfKw,4383 +anyio/streams/memory.py,sha256=Y286x16omNSSGONQx5CBLLNiB3vAJb_vVKt5vb3go-Q,10190 +anyio/streams/stapled.py,sha256=U09pCrmOw9kkNhe6tKopsm1QIMT1lFTFvtb-A7SIe4k,4302 +anyio/streams/text.py,sha256=6x8w8xlfCZKTUWQoJiMPoMhSSJFUBRKgoBNSBtbd9yg,5094 +anyio/streams/tls.py,sha256=ev-6yNOGcIkziIkcIfKj8VmLqQJW-iDBJttaKgKDsF4,12752 +anyio/to_process.py,sha256=lx_bt0CUJsS1eSlraw662OpCjRgGXowoyf1Q-i-kOxo,9535 +anyio/to_thread.py,sha256=WM2JQ2MbVsd5D5CM08bQiTwzZIvpsGjfH1Fy247KoDQ,2396 diff --git a/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/WHEEL new file mode 100644 index 0000000..bab98d6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/entry_points.txt b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/entry_points.txt new file mode 100644 index 0000000..44dd9bd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[pytest11] +anyio = anyio.pytest_plugin diff --git a/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/top_level.txt b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..c77c069 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio-4.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/myenv/lib/python3.12/site-packages/anyio/__init__.py b/myenv/lib/python3.12/site-packages/anyio/__init__.py new file mode 100644 index 0000000..7bfe231 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/__init__.py @@ -0,0 +1,76 @@ +from __future__ import annotations + +from typing import Any + +from ._core._eventloop import current_time as current_time +from ._core._eventloop import get_all_backends as get_all_backends +from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class +from ._core._eventloop import run as run +from ._core._eventloop import sleep as sleep +from ._core._eventloop import sleep_forever as sleep_forever +from ._core._eventloop import sleep_until as sleep_until +from ._core._exceptions import BrokenResourceError as BrokenResourceError +from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess +from ._core._exceptions import BusyResourceError as BusyResourceError +from ._core._exceptions import ClosedResourceError as ClosedResourceError +from ._core._exceptions import DelimiterNotFound as DelimiterNotFound +from ._core._exceptions import EndOfStream as EndOfStream +from ._core._exceptions import IncompleteRead as IncompleteRead +from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError +from ._core._exceptions import WouldBlock as WouldBlock +from ._core._fileio import AsyncFile as AsyncFile +from ._core._fileio import Path as Path +from ._core._fileio import open_file as open_file +from ._core._fileio import wrap_file as wrap_file +from ._core._resources import aclose_forcefully as aclose_forcefully +from ._core._signals import open_signal_receiver as open_signal_receiver +from ._core._sockets import connect_tcp as connect_tcp +from ._core._sockets import connect_unix as connect_unix +from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket +from ._core._sockets import ( + create_connected_unix_datagram_socket as create_connected_unix_datagram_socket, +) +from ._core._sockets import create_tcp_listener as create_tcp_listener +from ._core._sockets import create_udp_socket as create_udp_socket +from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket +from ._core._sockets import create_unix_listener as create_unix_listener +from ._core._sockets import getaddrinfo as getaddrinfo +from ._core._sockets import getnameinfo as getnameinfo +from ._core._sockets import wait_socket_readable as wait_socket_readable +from ._core._sockets import wait_socket_writable as wait_socket_writable +from ._core._streams import create_memory_object_stream as create_memory_object_stream +from ._core._subprocesses import open_process as open_process +from ._core._subprocesses import run_process as run_process +from ._core._synchronization import CapacityLimiter as CapacityLimiter +from ._core._synchronization import ( + CapacityLimiterStatistics as CapacityLimiterStatistics, +) +from ._core._synchronization import Condition as Condition +from ._core._synchronization import ConditionStatistics as ConditionStatistics +from ._core._synchronization import Event as Event +from ._core._synchronization import EventStatistics as EventStatistics +from ._core._synchronization import Lock as Lock +from ._core._synchronization import LockStatistics as LockStatistics +from ._core._synchronization import ResourceGuard as ResourceGuard +from ._core._synchronization import Semaphore as Semaphore +from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics +from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED +from ._core._tasks import CancelScope as CancelScope +from ._core._tasks import create_task_group as create_task_group +from ._core._tasks import current_effective_deadline as current_effective_deadline +from ._core._tasks import fail_after as fail_after +from ._core._tasks import move_on_after as move_on_after +from ._core._testing import TaskInfo as TaskInfo +from ._core._testing import get_current_task as get_current_task +from ._core._testing import get_running_tasks as get_running_tasks +from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked +from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider +from ._core._typedattr import TypedAttributeSet as TypedAttributeSet +from ._core._typedattr import typed_attribute as typed_attribute + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, "__module__", "").startswith("anyio."): + value.__module__ = __name__ diff --git a/myenv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..5170f5a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc new file mode 100644 index 0000000..122e5ba Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc new file mode 100644 index 0000000..c16c916 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc new file mode 100644 index 0000000..f0c750f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc new file mode 100644 index 0000000..595034e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc new file mode 100644 index 0000000..7fdf71f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_backends/__init__.py b/myenv/lib/python3.12/site-packages/anyio/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..52ad26b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc new file mode 100644 index 0000000..8bbeb39 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc new file mode 100644 index 0000000..b8e816c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py b/myenv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 0000000..43b7cb0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,2511 @@ +from __future__ import annotations + +import array +import asyncio +import concurrent.futures +import math +import socket +import sys +import threading +import weakref +from asyncio import ( + AbstractEventLoop, + CancelledError, + all_tasks, + create_task, + current_task, + get_running_loop, + sleep, +) +from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] +from collections import OrderedDict, deque +from collections.abc import AsyncIterator, Generator, Iterable +from concurrent.futures import Future +from contextlib import suppress +from contextvars import Context, copy_context +from dataclasses import dataclass +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, + CORO_SUSPENDED, + getcoroutinestate, + iscoroutine, +) +from io import IOBase +from os import PathLike +from queue import Queue +from signal import Signals +from socket import AddressFamily, SocketKind +from threading import Thread +from types import TracebackType +from typing import ( + IO, + Any, + AsyncGenerator, + Awaitable, + Callable, + Collection, + ContextManager, + Coroutine, + Mapping, + Optional, + Sequence, + Tuple, + TypeVar, + cast, +) +from weakref import WeakKeyDictionary + +import sniffio + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._eventloop import claim_worker_thread, threadlocals +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import ( + AsyncBackend, + IPSockAddrType, + SocketListener, + UDPPacketType, + UNIXDatagramPacketType, +) +from ..lowlevel import RunVar +from ..streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from asyncio import Runner + from typing import TypeVarTuple, Unpack +else: + import contextvars + import enum + import signal + from asyncio import coroutines, events, exceptions, tasks + + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + + class _State(enum.Enum): + CREATED = "created" + INITIALIZED = "initialized" + CLOSED = "closed" + + class Runner: + # Copied from CPython 3.11 + def __init__( + self, + *, + debug: bool | None = None, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ): + self._state = _State.CREATED + self._debug = debug + self._loop_factory = loop_factory + self._loop: AbstractEventLoop | None = None + self._context = None + self._interrupt_count = 0 + self._set_event_loop = False + + def __enter__(self) -> Runner: + self._lazy_init() + return self + + def __exit__( + self, + exc_type: type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, + ) -> None: + self.close() + + def close(self) -> None: + """Shutdown and close event loop.""" + if self._state is not _State.INITIALIZED: + return + try: + loop = self._loop + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + if hasattr(loop, "shutdown_default_executor"): + loop.run_until_complete(loop.shutdown_default_executor()) + else: + loop.run_until_complete(_shutdown_default_executor(loop)) + finally: + if self._set_event_loop: + events.set_event_loop(None) + loop.close() + self._loop = None + self._state = _State.CLOSED + + def get_loop(self) -> AbstractEventLoop: + """Return embedded event loop.""" + self._lazy_init() + return self._loop + + def run(self, coro: Coroutine[T_Retval], *, context=None) -> T_Retval: + """Run a coroutine inside the embedded event loop.""" + if not coroutines.iscoroutine(coro): + raise ValueError(f"a coroutine was expected, got {coro!r}") + + if events._get_running_loop() is not None: + # fail fast with short traceback + raise RuntimeError( + "Runner.run() cannot be called from a running event loop" + ) + + self._lazy_init() + + if context is None: + context = self._context + task = context.run(self._loop.create_task, coro) + + if ( + threading.current_thread() is threading.main_thread() + and signal.getsignal(signal.SIGINT) is signal.default_int_handler + ): + sigint_handler = partial(self._on_sigint, main_task=task) + try: + signal.signal(signal.SIGINT, sigint_handler) + except ValueError: + # `signal.signal` may throw if `threading.main_thread` does + # not support signals (e.g. embedded interpreter with signals + # not registered - see gh-91880) + sigint_handler = None + else: + sigint_handler = None + + self._interrupt_count = 0 + try: + return self._loop.run_until_complete(task) + except exceptions.CancelledError: + if self._interrupt_count > 0: + uncancel = getattr(task, "uncancel", None) + if uncancel is not None and uncancel() == 0: + raise KeyboardInterrupt() + raise # CancelledError + finally: + if ( + sigint_handler is not None + and signal.getsignal(signal.SIGINT) is sigint_handler + ): + signal.signal(signal.SIGINT, signal.default_int_handler) + + def _lazy_init(self) -> None: + if self._state is _State.CLOSED: + raise RuntimeError("Runner is closed") + if self._state is _State.INITIALIZED: + return + if self._loop_factory is None: + self._loop = events.new_event_loop() + if not self._set_event_loop: + # Call set_event_loop only once to avoid calling + # attach_loop multiple times on child watchers + events.set_event_loop(self._loop) + self._set_event_loop = True + else: + self._loop = self._loop_factory() + if self._debug is not None: + self._loop.set_debug(self._debug) + self._context = contextvars.copy_context() + self._state = _State.INITIALIZED + + def _on_sigint(self, signum, frame, main_task: asyncio.Task) -> None: + self._interrupt_count += 1 + if self._interrupt_count == 1 and not main_task.done(): + main_task.cancel() + # wakeup loop if it is blocked by select() with long timeout + self._loop.call_soon_threadsafe(lambda: None) + return + raise KeyboardInterrupt() + + def _cancel_all_tasks(loop: AbstractEventLoop) -> None: + to_cancel = tasks.all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(tasks.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + async def _shutdown_default_executor(loop: AbstractEventLoop) -> None: + """Schedule the shutdown of the default executor.""" + + def _do_shutdown(future: asyncio.futures.Future) -> None: + try: + loop._default_executor.shutdown(wait=True) # type: ignore[attr-defined] + loop.call_soon_threadsafe(future.set_result, None) + except Exception as ex: + loop.call_soon_threadsafe(future.set_exception, ex) + + loop._executor_shutdown_called = True + if loop._default_executor is None: + return + future = loop.create_future() + thread = threading.Thread(target=_do_shutdown, args=(future,)) + thread.start() + try: + await future + finally: + thread.join() + + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + +_root_task: RunVar[asyncio.Task | None] = RunVar("_root_task") + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + callbacks = [cb for cb, context in task._callbacks] + for cb in callbacks: + if ( + cb is _run_until_complete_cb + or getattr(cb, "__module__", None) == "uvloop.loop" + ): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, "__module__", None) + qualname = getattr(func, "__qualname__", None) + return ".".join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] = WeakKeyDictionary() + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + try: + return getcoroutinestate(task.get_coro()) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") from None + + +# +# Timeouts and cancellation +# + + +class CancelScope(BaseCancelScope): + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: CancelScope | None = None + self._child_scopes: set[CancelScope] = set() + self._cancel_called = False + self._cancelled_caught = False + self._active = False + self._timeout_handle: asyncio.TimerHandle | None = None + self._cancel_handle: asyncio.Handle | None = None + self._tasks: set[asyncio.Task] = set() + self._host_task: asyncio.Task | None = None + self._cancel_calls: int = 0 + self._cancelling: int | None = None + + def __enter__(self) -> CancelScope: + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_state = TaskState(None, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + if self._parent_scope is not None: + self._parent_scope._child_scopes.add(self) + self._parent_scope._tasks.remove(host_task) + + self._timeout() + self._active = True + if sys.version_info >= (3, 11): + self._cancelling = self._host_task.cancelling() + + # Start cancelling the host task if the scope was cancelled before entering + if self._cancel_called: + self._deliver_cancellation(self) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + if not self._active: + raise RuntimeError("This cancel scope is not active") + if current_task() is not self._host_task: + raise RuntimeError( + "Attempted to exit cancel scope in a different task than it was " + "entered in" + ) + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError( + "Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope" + ) + + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + if self._parent_scope is not None: + self._parent_scope._child_scopes.remove(self) + self._parent_scope._tasks.add(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the closest directly cancelled parent + # scope if this one was shielded + self._restart_cancellation_in_parent() + + if self._cancel_called and exc_val is not None: + for exc in iterate_exceptions(exc_val): + if isinstance(exc, CancelledError): + self._cancelled_caught = self._uncancel(exc) + if self._cancelled_caught: + break + + return self._cancelled_caught + + return None + + def _uncancel(self, cancelled_exc: CancelledError) -> bool: + if sys.version_info < (3, 9) or self._host_task is None: + self._cancel_calls = 0 + return True + + # Undo all cancellations done by this scope + if self._cancelling is not None: + while self._cancel_calls: + self._cancel_calls -= 1 + if self._host_task.uncancel() <= self._cancelling: + return True + + self._cancel_calls = 0 + return f"Cancelled by cancel scope {id(self):x}" in cancelled_exc.args + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self.cancel() + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self, origin: CancelScope) -> bool: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for + cancellation. + + :param origin: the cancel scope that originated the cancellation + :return: ``True`` if the delivery needs to be retried on the next cycle + + """ + should_retry = False + current = current_task() + for task in self._tasks: + if task._must_cancel: # type: ignore[attr-defined] + continue + + # The task is eligible for cancellation if it has started + should_retry = True + if task is not current and (task is self._host_task or _task_started(task)): + waiter = task._fut_waiter # type: ignore[attr-defined] + if not isinstance(waiter, asyncio.Future) or not waiter.done(): + origin._cancel_calls += 1 + if sys.version_info >= (3, 9): + task.cancel(f"Cancelled by cancel scope {id(origin):x}") + else: + task.cancel() + + # Deliver cancellation to child scopes that aren't shielded or running their own + # cancellation callbacks + for scope in self._child_scopes: + if not scope._shield and not scope.cancel_called: + should_retry = scope._deliver_cancellation(origin) or should_retry + + # Schedule another callback if there are still tasks left + if origin is self: + if should_retry: + self._cancel_handle = get_running_loop().call_soon( + self._deliver_cancellation, origin + ) + else: + self._cancel_handle = None + + return should_retry + + def _restart_cancellation_in_parent(self) -> None: + """ + Restart the cancellation effort in the closest directly cancelled parent scope. + + """ + scope = self._parent_scope + while scope is not None: + if scope._cancel_called: + if scope._cancel_handle is None: + scope._deliver_cancellation(scope) + + break + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + def _parent_cancelled(self) -> bool: + # Check whether any parent has been cancelled + cancel_scope = self._parent_scope + while cancel_scope is not None and not cancel_scope._shield: + if cancel_scope._cancel_called: + return True + else: + cancel_scope = cancel_scope._parent_scope + + return False + + def cancel(self) -> None: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + if self._host_task is not None: + self._deliver_cancellation(self) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def cancelled_caught(self) -> bool: + return self._cancelled_caught + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._restart_cancellation_in_parent() + + +# +# Task states +# + + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance + itself because there are no guarantees about its implementation. + """ + + __slots__ = "parent_id", "cancel_scope", "__weakref__" + + def __init__(self, parent_id: int | None, cancel_scope: CancelScope | None): + self.parent_id = parent_id + self.cancel_scope = cancel_scope + + +_task_states: WeakKeyDictionary[asyncio.Task, TaskState] = WeakKeyDictionary() + + +# +# Task groups +# + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future, parent_id: int): + self._future = future + self._parent_id = parent_id + + def started(self, value: T_contra | None = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + if not self._future.cancelled(): + raise RuntimeError( + "called 'started' twice on the same task status" + ) from None + + task = cast(asyncio.Task, current_task()) + _task_states[task].parent_id = self._parent_id + + +def iterate_exceptions( + exception: BaseException, +) -> Generator[BaseException, None, None]: + if isinstance(exception, BaseExceptionGroup): + for exc in exception.exceptions: + yield from iterate_exceptions(exc) + else: + yield exception + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: list[BaseException] = [] + self._tasks: set[asyncio.Task] = set() + + async def __aenter__(self) -> TaskGroup: + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if exc_val is not None: + self.cancel_scope.cancel() + if not isinstance(exc_val, CancelledError): + self._exceptions.append(exc_val) + + cancelled_exc_while_waiting_tasks: CancelledError | None = None + while self._tasks: + try: + await asyncio.wait(self._tasks) + except CancelledError as exc: + # This task was cancelled natively; reraise the CancelledError later + # unless this task was already interrupted by another exception + self.cancel_scope.cancel() + if cancelled_exc_while_waiting_tasks is None: + cancelled_exc_while_waiting_tasks = exc + + self._active = False + if self._exceptions: + raise BaseExceptionGroup( + "unhandled errors in a TaskGroup", self._exceptions + ) + + # Raise the CancelledError received while waiting for child tasks to exit, + # unless the context manager itself was previously exited with another + # exception, or if any of the child tasks raised an exception other than + # CancelledError + if cancelled_exc_while_waiting_tasks: + if exc_val is None or ignore_exception: + raise cancelled_exc_while_waiting_tasks + + return ignore_exception + + def _spawn( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + args: tuple[Unpack[PosArgsT]], + name: object, + task_status_future: asyncio.Future | None = None, + ) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + task_state = _task_states[_task] + assert task_state.cancel_scope is not None + assert _task in task_state.cancel_scope._tasks + task_state.cancel_scope._tasks.remove(_task) + self._tasks.remove(task) + del _task_states[_task] + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + # The future can only be in the cancelled state if the host task was + # cancelled, so return immediately instead of adding one more + # CancelledError to the exceptions list + if task_status_future is not None and task_status_future.cancelled(): + return + + if task_status_future is None or task_status_future.done(): + if not isinstance(exc, CancelledError): + self._exceptions.append(exc) + + if not self.cancel_scope._parent_cancelled(): + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError("Child exited without calling task_status.started()") + ) + + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + kwargs = {} + if task_status_future: + parent_id = id(current_task()) + kwargs["task_status"] = _AsyncioTaskStatus( + task_status_future, id(self.cancel_scope._host_task) + ) + else: + parent_id = id(self.cancel_scope._host_task) + + coro = func(*args, **kwargs) + if not iscoroutine(coro): + prefix = f"{func.__module__}." if hasattr(func, "__module__") else "" + raise TypeError( + f"Expected {prefix}{func.__qualname__}() to return a coroutine, but " + f"the return value ({coro!r}) is not a coroutine object" + ) + + name = get_callable_name(func) if name is None else str(name) + task = create_task(coro, name=name) + task.add_done_callback(task_done) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState( + parent_id=parent_id, cancel_scope=self.cancel_scope + ) + self.cancel_scope._tasks.add(task) + self._tasks.add(task) + return task + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + self._spawn(func, args, name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch + # point between, the task group is cancelled and this method never proceeds to + # process the completed future. That's why we have to have a shielded cancel + # scope here. + try: + return await future + except CancelledError: + # Cancel the task and wait for it to exit before returning + task.cancel() + with CancelScope(shield=True), suppress(CancelledError): + await task + + raise + + +# +# Threads +# + +_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__( + self, + root_task: asyncio.Task, + workers: set[WorkerThread], + idle_workers: deque[WorkerThread], + ): + super().__init__(name="AnyIO worker thread") + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[ + tuple[Context, Callable, tuple, asyncio.Future, CancelScope] | None + ] = Queue(2) + self.idle_since = AsyncIOBackend.current_time() + self.stopping = False + + def _report_result( + self, future: asyncio.Future, result: Any, exc: BaseException | None + ) -> None: + self.idle_since = AsyncIOBackend.current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + if isinstance(exc, StopIteration): + new_exc = RuntimeError("coroutine raised StopIteration") + new_exc.__cause__ = exc + exc = new_exc + + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread(AsyncIOBackend, self.loop): + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + context, func, args, future, cancel_scope = item + if not future.cancelled(): + result = None + exception: BaseException | None = None + threadlocals.current_cancel_scope = cancel_scope + try: + result = context.run(func, *args) + except BaseException as exc: + exception = exc + finally: + del threadlocals.current_cancel_scope + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception + ) + + self.queue.task_done() + + def stop(self, f: asyncio.Task | None = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar( + "_threadpool_idle_workers" +) +_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers") + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> BlockingPortal: + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._loop = get_running_loop() + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + AsyncIOBackend.run_sync_from_thread( + partial(self._task_group.start_soon, name=name), + (self._call_func, func, args, kwargs, future), + self._loop, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.feed_eof() + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + + async def send(self, item: bytes) -> None: + self._stream.write(item) + await self._stream.drain() + + async def aclose(self) -> None: + self._stream.close() + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: StreamWriterWrapper | None + _stdout: StreamReaderWrapper | None + _stderr: StreamReaderWrapper | None + + async def aclose(self) -> None: + with CancelScope(shield=True): + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + try: + await self.wait() + except BaseException: + self.kill() + with CancelScope(shield=True): + await self.wait() + + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +def _forcibly_shutdown_process_pool_on_exit( + workers: set[Process], _task: object +) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: asyncio.AbstractChildWatcher | None = None + if sys.version_info < (3, 12): + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + pass + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers: + if process.returncode is None: + continue + + process._stdin._stream._transport.close() # type: ignore[union-attr] + process._stdout._stream._transport.close() # type: ignore[union-attr] + process._stderr._stream._transport.close() # type: ignore[union-attr] + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: set[abc.Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or + anyio.run(). + + """ + process: abc.Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + is_at_eof: bool = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Exception | None) -> None: + if exc: + self.exception = BrokenResourceError() + self.exception.__cause__ = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + self.read_queue.append(data) + self.read_event.set() + + def eof_received(self) -> bool | None: + self.is_at_eof = True + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: deque[tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Exception | None) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + if ( + not self._protocol.read_event.is_set() + and not self._transport.is_closing() + and not self._protocol.is_at_eof + ): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + else: + await AsyncIOBackend.checkpoint() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise self._protocol.exception from None + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will + # block until data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class _RawSocketMixin: + _receive_future: asyncio.Future | None = None + _send_future: asyncio.Future | None = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class UNIXSocketStream(_RawSocketMixin, abc.UNIXSocketStream): + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self._raw_socket.send(view) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + loop = get_running_loop() + fds = array.array("i") + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self._raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + loop = get_running_loop() + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self._raw_socket.sendmsg( + [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: CancelScope | None = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard("accepting connections from") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await AsyncIOBackend.checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket( + StreamProtocol, client_sock + ) + return SocketStream(transport, protocol) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard("accepting connections from") + self._closed = False + + async def accept(self) -> abc.SocketStream: + await AsyncIOBackend.checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + client_sock.setblocking(False) + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback( + lambda _: self._loop.remove_reader(self.__raw_socket) + ) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +class UNIXDatagramSocket(_RawSocketMixin, abc.UNIXDatagramSocket): + async def receive(self) -> UNIXDatagramPacketType: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recvfrom(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: UNIXDatagramPacketType) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.sendto(*item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +class ConnectedUNIXDatagramSocket(_RawSocketMixin, abc.ConnectedUNIXDatagramSocket): + async def receive(self) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +_read_events: RunVar[dict[Any, asyncio.Event]] = RunVar("read_events") +_write_events: RunVar[dict[Any, asyncio.Event]] = RunVar("write_events") + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> None: + self._event.set() + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if self.is_set(): + await AsyncIOBackend.checkpoint() + else: + await self._event.wait() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> CapacityLimiter: + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: set[Any] = set() + self._wait_queue: OrderedDict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError("total_tokens must be an int or math.inf") + if value < 1: + raise ValueError("total_tokens must be >= 1") + + waiters_to_notify = max(value - self._total_tokens, 0) + self._total_tokens = value + + # Notify waiting tasks that they have acquired the limiter + while self._wait_queue and waiters_to_notify: + event = self._wait_queue.popitem(last=False)[1] + event.set() + waiters_to_notify -= 1 + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def acquire_nowait(self) -> None: + self.acquire_on_behalf_of_nowait(current_task()) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + if borrower in self._borrowers: + raise RuntimeError( + "this borrower is already holding one of this CapacityLimiter's " + "tokens" + ) + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await AsyncIOBackend.checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + raise + + self._borrowers.add(borrower) + else: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError( + "this borrower isn't holding any of this CapacityLimiter's tokens" + ) from None + + # Notify the next task in line if this limiter has free capacity now + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem(last=False)[1] + event.set() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics( + self.borrowed_tokens, + self.total_tokens, + tuple(self._borrowers), + len(self._wait_queue), + ) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") + + +# +# Operating system signals +# + + +class _SignalReceiver: + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: deque[Signals] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: set[Signals] = set() + + def _deliver(self, signum: Signals) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> _SignalReceiver: + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + return None + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + await AsyncIOBackend.checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +# +# Testing and debugging +# + + +class AsyncIOTaskInfo(TaskInfo): + def __init__(self, task: asyncio.Task): + task_state = _task_states.get(task) + if task_state is None: + parent_id = None + else: + parent_id = task_state.parent_id + + super().__init__(id(task), parent_id, task.get_name(), task.get_coro()) + self._task = weakref.ref(task) + + def has_pending_cancellation(self) -> bool: + if not (task := self._task()): + # If the task isn't around anymore, it won't have a pending cancellation + return False + + if sys.version_info >= (3, 11): + if task.cancelling(): + return True + elif ( + isinstance(task._fut_waiter, asyncio.Future) + and task._fut_waiter.cancelled() + ): + return True + + if task_state := _task_states.get(task): + if cancel_scope := task_state.cancel_scope: + return cancel_scope.cancel_called or cancel_scope._parent_cancelled() + + return False + + +class TestRunner(abc.TestRunner): + _send_stream: MemoryObjectSendStream[tuple[Awaitable[Any], asyncio.Future[Any]]] + + def __init__( + self, + *, + debug: bool | None = None, + use_uvloop: bool = False, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ) -> None: + if use_uvloop and loop_factory is None: + import uvloop + + loop_factory = uvloop.new_event_loop + + self._runner = Runner(debug=debug, loop_factory=loop_factory) + self._exceptions: list[BaseException] = [] + self._runner_task: asyncio.Task | None = None + + def __enter__(self) -> TestRunner: + self._runner.__enter__() + self.get_loop().set_exception_handler(self._exception_handler) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._runner.__exit__(exc_type, exc_val, exc_tb) + + def get_loop(self) -> AbstractEventLoop: + return self._runner.get_loop() + + def _exception_handler( + self, loop: asyncio.AbstractEventLoop, context: dict[str, Any] + ) -> None: + if isinstance(context.get("exception"), Exception): + self._exceptions.append(context["exception"]) + else: + loop.default_exception_handler(context) + + def _raise_async_exceptions(self) -> None: + # Re-raise any exceptions raised in asynchronous callbacks + if self._exceptions: + exceptions, self._exceptions = self._exceptions, [] + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup( + "Multiple exceptions occurred in asynchronous callbacks", exceptions + ) + + async def _run_tests_and_fixtures( + self, + receive_stream: MemoryObjectReceiveStream[ + tuple[Awaitable[T_Retval], asyncio.Future[T_Retval]] + ], + ) -> None: + with receive_stream, self._send_stream: + async for coro, future in receive_stream: + try: + retval = await coro + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + else: + if not future.cancelled(): + future.set_result(retval) + + async def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if not self._runner_task: + self._send_stream, receive_stream = create_memory_object_stream[ + Tuple[Awaitable[Any], asyncio.Future] + ](1) + self._runner_task = self.get_loop().create_task( + self._run_tests_and_fixtures(receive_stream) + ) + + coro = func(*args, **kwargs) + future: asyncio.Future[T_Retval] = self.get_loop().create_future() + self._send_stream.send_nowait((coro, future)) + return await future + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + self._raise_async_exceptions() + + yield fixturevalue + + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + except StopAsyncIteration: + self._raise_async_exceptions() + else: + self.get_loop().run_until_complete(asyncgen.aclose()) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + retval = self.get_loop().run_until_complete( + self._call_in_runner_task(fixture_func, **kwargs) + ) + self._raise_async_exceptions() + return retval + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(test_func, **kwargs) + ) + except Exception as exc: + self._exceptions.append(exc) + + self._raise_async_exceptions() + + +class AsyncIOBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task.set_name(get_callable_name(func)) + _task_states[task] = TaskState(None, None) + + try: + return await func(*args) + finally: + del _task_states[task] + + debug = options.get("debug", None) + loop_factory = options.get("loop_factory", None) + if loop_factory is None and options.get("use_uvloop", False): + import uvloop + + loop_factory = uvloop.new_event_loop + + with Runner(debug=debug, loop_factory=loop_factory) as runner: + return runner.run(wrapper()) + + @classmethod + def current_token(cls) -> object: + return get_running_loop() + + @classmethod + def current_time(cls) -> float: + return get_running_loop().time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return CancelledError + + @classmethod + async def checkpoint(cls) -> None: + await sleep(0) + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + with CancelScope(shield=True): + await sleep(0) + + @classmethod + async def sleep(cls, delay: float) -> None: + await sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + try: + cancel_scope = _task_states[ + current_task() # type: ignore[index] + ].cancel_scope + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope._cancel_called: + deadline = -math.inf + break + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> abc.CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + await cls.checkpoint() + + # If this is the first run in this event loop thread, set up the necessary + # variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with limiter or cls.current_default_thread_limiter(): + with CancelScope(shield=not abandon_on_cancel) as scope: + future: asyncio.Future = asyncio.Future() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback(worker.stop) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME + # seconds or longer + now = cls.current_time() + while idle_workers: + if ( + now - idle_workers[0].idle_since + < WorkerThread.MAX_IDLE_TIME + ): + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback( + expired_worker.stop + ) + expired_worker.stop() + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + if abandon_on_cancel or scope._parent_scope is None: + worker_scope = scope + else: + worker_scope = scope._parent_scope + + worker.queue.put_nowait((context, func, args, future, worker_scope)) + return await future + + @classmethod + def check_cancelled(cls) -> None: + scope: CancelScope | None = threadlocals.current_cancel_scope + while scope is not None: + if scope.cancel_called: + raise CancelledError(f"Cancelled by cancel scope {id(scope):x}") + + if scope.shield: + return + + scope = scope._parent_scope + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + async def task_wrapper(scope: CancelScope) -> T_Retval: + __tracebackhide__ = True + task = cast(asyncio.Task, current_task()) + _task_states[task] = TaskState(None, scope) + scope._tasks.add(task) + try: + return await func(*args) + except CancelledError as exc: + raise concurrent.futures.CancelledError(str(exc)) from None + finally: + scope._tasks.discard(task) + + loop = cast(AbstractEventLoop, token) + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, "asyncio") + wrapper = task_wrapper(threadlocals.current_cancel_scope) + f: concurrent.futures.Future[T_Retval] = context.run( + asyncio.run_coroutine_threadsafe, wrapper, loop + ) + return f.result() + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + sniffio.current_async_library_cvar.set("asyncio") + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + f: concurrent.futures.Future[T_Retval] = Future() + loop = cast(AbstractEventLoop, token) + loop.call_soon_threadsafe(wrapper) + return f.result() + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: str | bytes | Sequence[str | bytes], + *, + shell: bool, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + await cls.checkpoint() + if shell: + process = await asyncio.create_subprocess_shell( + cast("str | bytes", command), + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + else: + process = await asyncio.create_subprocess_exec( + *command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + create_task( + _shutdown_process_pool_on_exit(workers), + name="AnyIO process pool shutdown task", + ) + find_root_task().add_done_callback( + partial(_forcibly_shutdown_process_pool_on_exit, workers) + ) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> abc.SocketStream: + transport, protocol = cast( + Tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection( + StreamProtocol, host, port, local_addr=local_address + ), + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + await cls.checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, + local_addr=local_address, + remote_addr=remote_address, + family=family, + reuse_port=reuse_port, + ) + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + @classmethod + async def create_unix_datagram_socket( # type: ignore[override] + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + await cls.checkpoint() + loop = get_running_loop() + + if remote_path: + while True: + try: + raw_socket.connect(remote_path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return ConnectedUNIXDatagramSocket(raw_socket) + else: + return UNIXDatagramSocket(raw_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + return await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags + ) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + @classmethod + async def wait_socket_readable(cls, sock: socket.socket) -> None: + await cls.checkpoint() + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + if read_events.get(sock): + raise BusyResourceError("reading from") from None + + loop = get_running_loop() + event = read_events[sock] = asyncio.Event() + loop.add_reader(sock, event.set) + try: + await event.wait() + finally: + if read_events.pop(sock, None) is not None: + loop.remove_reader(sock) + readable = True + else: + readable = False + + if not readable: + raise ClosedResourceError + + @classmethod + async def wait_socket_writable(cls, sock: socket.socket) -> None: + await cls.checkpoint() + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + if write_events.get(sock): + raise BusyResourceError("writing to") from None + + loop = get_running_loop() + event = write_events[sock] = asyncio.Event() + loop.add_writer(sock.fileno(), event.set) + try: + await event.wait() + finally: + if write_events.pop(sock, None) is not None: + loop.remove_writer(sock) + writable = True + else: + writable = False + + if not writable: + raise ClosedResourceError + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> ContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + return AsyncIOTaskInfo(current_task()) # type: ignore[arg-type] + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + return [AsyncIOTaskInfo(task) for task in all_tasks() if not task.done()] + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + await cls.checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + waiter = task._fut_waiter # type: ignore[attr-defined] + if waiter is None or waiter.done(): + await sleep(0.1) + break + else: + return + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = AsyncIOBackend diff --git a/myenv/lib/python3.12/site-packages/anyio/_backends/_trio.py b/myenv/lib/python3.12/site-packages/anyio/_backends/_trio.py new file mode 100644 index 0000000..cf6f3db --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,1177 @@ +from __future__ import annotations + +import array +import math +import socket +import sys +import types +import weakref +from collections.abc import AsyncIterator, Iterable +from concurrent.futures import Future +from dataclasses import dataclass +from functools import partial +from io import IOBase +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind +from types import TracebackType +from typing import ( + IO, + Any, + AsyncGenerator, + Awaitable, + Callable, + Collection, + ContextManager, + Coroutine, + Generic, + Mapping, + NoReturn, + Sequence, + TypeVar, + cast, + overload, +) + +import trio.from_thread +import trio.lowlevel +from outcome import Error, Outcome, Value +from trio.lowlevel import ( + current_root_task, + current_task, + wait_readable, + wait_writable, +) +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType +from ..abc._eventloop import AsyncBackend +from ..streams.memory import MemoryObjectSendStream + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +T_Retval = TypeVar("T_Retval") +T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + + +# +# Event loop +# + +RunVar = trio.lowlevel.RunVar + + +# +# Timeouts and cancellation +# + + +class CancelScope(BaseCancelScope): + def __new__( + cls, original: trio.CancelScope | None = None, **kwargs: object + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> CancelScope: + self.__original.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + # https://github.com/python-trio/trio-typing/pull/79 + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self) -> None: + self.__original.cancel() + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def cancelled_caught(self) -> bool: + return self.__original.cancelled_caught + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +# +# Task groups +# + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery(strict_exception_groups=True) + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> TaskGroup: + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + try: + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) + except BaseExceptionGroup as exc: + _, rest = exc.split(trio.Cancelled) + if not rest: + cancelled_exc = trio.Cancelled._create() + raise cancelled_exc from exc + + raise + finally: + self._active = False + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + self._nursery.start_soon(func, *args, name=name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + return await self._nursery.start(func, *args, name=name) + + +# +# Threads +# + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> BlockingPortal: + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._token = trio.lowlevel.current_trio_token() + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + trio.from_thread.run_sync( + partial(self._task_group.start_soon, name=name), + self._call_func, + func, + args, + kwargs, + future, + trio_token=self._token, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: int | None = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: abc.ByteSendStream | None + _stdout: abc.ByteReceiveStream | None + _stderr: abc.ByteReceiveStream | None + + async def aclose(self) -> None: + with CancelScope(shield=True): + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + try: + await self.wait() + except BaseException: + self.kill() + with CancelScope(shield=True): + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: Signals) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar( + "current_default_worker_process_limiter" +) + + +async def _shutdown_process_pool(workers: set[abc.Process]) -> None: + try: + await trio.sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock # type: ignore[attr-defined] + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> NoReturn: + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + fds = array.array("i") + await trio.lowlevel.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await trio.lowlevel.checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], + [ + ( + socket.SOL_SOCKET, + socket.SCM_RIGHTS, + fdarray, + ) + ], + ) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> UNIXDatagramPacketType: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, addr + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UNIXDatagramPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUNIXDatagramSocket( + _TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket +): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + orig_statistics = self.__original.statistics() + return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) + + def set(self) -> None: + self.__original.set() + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__( + cls, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> CapacityLimiter: + return object.__new__(cls) + + def __init__( + self, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> None: + if original is not None: + self.__original = original + else: + assert total_tokens is not None + self.__original = trio.CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> None: + self.__original.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self.__original.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + orig = self.__original.statistics() + return CapacityLimiterStatistics( + borrowed_tokens=orig.borrowed_tokens, + total_tokens=orig.total_tokens, + borrowers=tuple(orig.borrowers), + tasks_waiting=orig.tasks_waiting, + ) + + +_capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper") + + +# +# Signal handling +# + + +class _SignalReceiver: + _iterator: AsyncIterator[int] + + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + + def __enter__(self) -> _SignalReceiver: + self._cm = trio.open_signal_receiver(*self._signals) + self._iterator = self._cm.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + signum = await self._iterator.__anext__() + return Signals(signum) + + +# +# Testing and debugging +# + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: Any) -> None: + from queue import Queue + + self._call_queue: Queue[Callable[[], object]] = Queue() + self._send_stream: MemoryObjectSendStream | None = None + self._options = options + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + if self._send_stream: + self._send_stream.close() + while self._send_stream is not None: + self._call_queue.get()() + + async def _run_tests_and_fixtures(self) -> None: + self._send_stream, receive_stream = create_memory_object_stream(1) + with receive_stream: + async for coro, outcome_holder in receive_stream: + try: + retval = await coro + except BaseException as exc: + outcome_holder.append(Error(exc)) + else: + outcome_holder.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._send_stream = None + + def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if self._send_stream is None: + trio.lowlevel.start_guest_run( + self._run_tests_and_fixtures, + run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, + **self._options, + ) + while self._send_stream is None: + self._call_queue.get()() + + outcome_holder: list[Outcome] = [] + self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder)) + while not outcome_holder: + self._call_queue.get()() + + return outcome_holder[0].unwrap() + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None) + + yield fixturevalue + + try: + self._call_in_runner_task(asyncgen.asend, None) + except StopAsyncIteration: + pass + else: + self._call_in_runner_task(asyncgen.aclose) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + return self._call_in_runner_task(fixture_func, **kwargs) + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + self._call_in_runner_task(test_func, **kwargs) + + +class TrioTaskInfo(TaskInfo): + def __init__(self, task: trio.lowlevel.Task): + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + super().__init__(id(task), parent_id, task.name, task.coro) + self._task = weakref.proxy(task) + + def has_pending_cancellation(self) -> bool: + try: + return self._task._cancel_status.effectively_cancelled + except ReferenceError: + # If the task is no longer around, it surely doesn't have a cancellation + # pending + return False + + +class TrioBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + return trio.run(func, *args) + + @classmethod + def current_token(cls) -> object: + return trio.lowlevel.current_trio_token() + + @classmethod + def current_time(cls) -> float: + return trio.current_time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return trio.Cancelled + + @classmethod + async def checkpoint(cls) -> None: + await trio.lowlevel.checkpoint() + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + await trio.lowlevel.checkpoint_if_cancelled() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + await trio.lowlevel.cancel_shielded_checkpoint() + + @classmethod + async def sleep(cls, delay: float) -> None: + await trio.sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> abc.CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + return trio.current_effective_deadline() + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread(TrioBackend, token): + return func(*args) + + token = TrioBackend.current_token() + return await run_sync( + wrapper, + abandon_on_cancel=abandon_on_cancel, + limiter=cast(trio.CapacityLimiter, limiter), + ) + + @classmethod + def check_cancelled(cls) -> None: + trio.from_thread.check_cancelled() + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + return trio.from_thread.run(func, *args) + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + return trio.from_thread.run_sync(func, *args) + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: str | bytes | Sequence[str | bytes], + *, + shell: bool, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + process = await trio.lowlevel.open_process( # type: ignore[misc] + command, # type: ignore[arg-type] + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=shell, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + family = socket.AF_INET6 if ":" in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: socket.AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: None + ) -> abc.UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes + ) -> abc.ConnectedUNIXDatagramSocket: ... + + @classmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + trio_socket = trio.socket.from_stdlib_socket(raw_socket) + + if remote_path: + await trio_socket.connect(remote_path) + return ConnectedUNIXDatagramSocket(trio_socket) + else: + return UNIXDatagramSocket(trio_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + return await trio.socket.getaddrinfo(host, port, family, type, proto, flags) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await trio.socket.getnameinfo(sockaddr, flags) + + @classmethod + async def wait_socket_readable(cls, sock: socket.socket) -> None: + try: + await wait_readable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("reading from") from None + + @classmethod + async def wait_socket_writable(cls, sock: socket.socket) -> None: + try: + await wait_writable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("writing to") from None + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter( + original=trio.to_thread.current_default_thread_limiter() + ) + _capacity_limiter_wrapper.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> ContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + task = current_task() + return TrioTaskInfo(task) + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + root_task = current_root_task() + assert root_task + task_infos = [TrioTaskInfo(root_task)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: list[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append(TrioTaskInfo(task)) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + from trio.testing import wait_all_tasks_blocked + + await wait_all_tasks_blocked() + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = TrioBackend diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__init__.py b/myenv/lib/python3.12/site-packages/anyio/_core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..9701101 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc new file mode 100644 index 0000000..87fc42b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 0000000..a63615d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc new file mode 100644 index 0000000..9efbcd6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc new file mode 100644 index 0000000..61c1416 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc new file mode 100644 index 0000000..5164f9a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc new file mode 100644 index 0000000..32e3aeb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc new file mode 100644 index 0000000..f74e699 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc new file mode 100644 index 0000000..1fcb9d6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc new file mode 100644 index 0000000..d0b861b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc new file mode 100644 index 0000000..df55d27 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc new file mode 100644 index 0000000..a5858e9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc new file mode 100644 index 0000000..6e23b87 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_eventloop.py b/myenv/lib/python3.12/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 0000000..6dcb458 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,166 @@ +from __future__ import annotations + +import math +import sys +import threading +from collections.abc import Awaitable, Callable, Generator +from contextlib import contextmanager +from importlib import import_module +from typing import TYPE_CHECKING, Any, TypeVar + +import sniffio + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from ..abc import AsyncBackend + +# This must be updated when new backends are introduced +BACKENDS = "asyncio", "trio" + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +threadlocals = threading.local() +loaded_backends: dict[str, type[AsyncBackend]] = {} + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + backend: str = "asyncio", + backend_options: dict[str, Any] | None = None, +) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently + either ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` + implementation with (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this + thread + :raises LookupError: if the named backend is not found + + """ + try: + asynclib_name = sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + else: + raise RuntimeError(f"Already running {asynclib_name} in this thread") + + try: + async_backend = get_async_backend(backend) + except ImportError as exc: + raise LookupError(f"No such backend: {backend}") from exc + + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async + # library + token = sniffio.current_async_library_cvar.set(backend) + + try: + backend_options = backend_options or {} + return async_backend.run(func, args, {}, backend_options) + finally: + if token: + sniffio.current_async_library_cvar.reset(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_async_backend().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal + monotonic clock of the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + + """ + return get_async_backend().current_time() + + +def get_all_backends() -> tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_cancelled_exc_class() -> type[BaseException]: + """Return the current async library's cancellation exception class.""" + return get_async_backend().cancelled_exception_class() + + +# +# Private API +# + + +@contextmanager +def claim_worker_thread( + backend_class: type[AsyncBackend], token: object +) -> Generator[Any, None, None]: + threadlocals.current_async_backend = backend_class + threadlocals.current_token = token + try: + yield + finally: + del threadlocals.current_async_backend + del threadlocals.current_token + + +def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]: + if asynclib_name is None: + asynclib_name = sniffio.current_async_library() + + # We use our own dict instead of sys.modules to get the already imported back-end + # class because the appropriate modules in sys.modules could potentially be only + # partially initialized + try: + return loaded_backends[asynclib_name] + except KeyError: + module = import_module(f"anyio._backends._{asynclib_name}") + loaded_backends[asynclib_name] = module.backend_class + return module.backend_class diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_exceptions.py b/myenv/lib/python3.12/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 0000000..571c3b8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,73 @@ +from __future__ import annotations + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusable due to external + causes (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or + otherwise misbehaves. + """ + + +class BusyResourceError(Exception): + """ + Raised when two tasks are trying to read from or write to the same resource + concurrently. + """ + + def __init__(self, action: str): + super().__init__(f"Another task is already {action} this resource") + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class DelimiterNotFound(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__( + f"The delimiter was not found among the first {max_bytes} bytes" + ) + + +class EndOfStream(Exception): + """ + Raised when trying to read from a stream that has been closed from the other end. + """ + + +class IncompleteRead(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__( + "The stream was closed before the read operation could be completed" + ) + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute + is not found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_fileio.py b/myenv/lib/python3.12/site-packages/anyio/_core/_fileio.py new file mode 100644 index 0000000..df2057f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,637 @@ +from __future__ import annotations + +import os +import pathlib +import sys +from collections.abc import Callable, Iterable, Iterator, Sequence +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + AnyStr, + AsyncIterator, + Final, + Generic, + overload, +) + +from .. import to_thread +from ..abc import AsyncResource + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the + following blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the + underlying file at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> list[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ... + + @overload + async def write(self: AsyncFile[str], b: str) -> int: ... + + async def write(self, b: ReadableBuffer | str) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines( + self: AsyncFile[bytes], lines: Iterable[ReadableBuffer] + ) -> None: ... + + @overload + async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ... + + async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: int | None = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[bytes]: ... + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[str]: ... + + +async def open_file( + file: str | PathLike[str] | int, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: Callable[[str, int], int] | None = None, +) -> AsyncFile[Any]: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync( + open, file, mode, buffering, encoding, errors, newline, closefd, opener + ) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator["Path"]): + iterator: Iterator[PathLike[str]] + + async def __anext__(self) -> Path: + nextval = await to_thread.run_sync( + next, self.iterator, None, abandon_on_cancel=True + ) + if nextval is None: + raise StopAsyncIteration from None + + return Path(nextval) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or + :class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike` + interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for + the deprecated :meth:`~pathlib.Path.link_to` method. + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding + :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = "_path", "__weakref__" + + __weakref__: Any + + def __init__(self, *args: str | PathLike[str]) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.as_posix()!r})" + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: str | PathLike[str]) -> Path: + return Path(self._path / other) + + def __rtruediv__(self, other: str | PathLike[str]) -> Path: + return Path(other) / self + + @property + def parts(self) -> tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence[Path]: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> Path: + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> list[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> Path: + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + def is_relative_to(self, other: str | PathLike[str]) -> bool: + try: + self.relative_to(other) + return True + except ValueError: + return False + + async def is_junction(self) -> bool: + return await to_thread.run_sync(self._path.is_junction) + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> Path: + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True) + + async def expanduser(self) -> Path: + return Path( + await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True) + ) + + def glob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.glob(pattern) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, abandon_on_cancel=True) + + async def hardlink_to( + self, target: str | bytes | PathLike[str] | PathLike[bytes] + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> Path: + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_block_device, abandon_on_cancel=True + ) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_char_device, abandon_on_cancel=True + ) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True) + + async def is_mount(self) -> bool: + return await to_thread.run_sync( + os.path.ismount, self._path, abandon_on_cancel=True + ) + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True) + + def iterdir(self) -> AsyncIterator[Path]: + gen = self._path.iterdir() + return _PathIterator(gen) + + def joinpath(self, *args: str | PathLike[str]) -> Path: + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True) + + async def mkdir( + self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False + ) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open( + self, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[bytes]: ... + + @overload + async def open( + self, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[str]: ... + + async def open( + self, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> AsyncFile[Any]: + fp = await to_thread.run_sync( + self._path.open, mode, buffering, encoding, errors, newline + ) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text( + self, encoding: str | None = None, errors: str | None = None + ) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + if sys.version_info >= (3, 12): + + def relative_to( + self, *other: str | PathLike[str], walk_up: bool = False + ) -> Path: + return Path(self._path.relative_to(*other, walk_up=walk_up)) + + else: + + def relative_to(self, *other: str | PathLike[str]) -> Path: + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> Path: + target = await to_thread.run_sync(os.readlink, self._path) + return Path(target) + + async def rename(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> Path: + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, abandon_on_cancel=True)) + + def rglob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile(self, other_path: str | PathLike[str]) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync( + self._path.samefile, other_path, abandon_on_cancel=True + ) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, abandon_on_cancel=True) + + async def symlink_to( + self, + target: str | bytes | PathLike[str] | PathLike[bytes], + target_is_directory: bool = False, + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + if sys.version_info >= (3, 12): + + async def walk( + self, + top_down: bool = True, + on_error: Callable[[OSError], object] | None = None, + follow_symlinks: bool = False, + ) -> AsyncIterator[tuple[Path, list[str], list[str]]]: + def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None: + try: + return next(gen) + except StopIteration: + return None + + gen = self._path.walk(top_down, on_error, follow_symlinks) + while True: + value = await to_thread.run_sync(get_next_value) + if value is None: + return + + root, dirs, paths = value + yield Path(root), dirs, paths + + def with_name(self, name: str) -> Path: + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> Path: + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> Path: + return Path(self._path.with_suffix(suffix)) + + def with_segments(self, *pathsegments: str | PathLike[str]) -> Path: + return Path(*pathsegments) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text( + self, + data: str, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> int: + # Path.write_text() does not support the "newline" parameter before Python 3.10 + def sync_write_text() -> int: + with self._path.open( + "w", encoding=encoding, errors=errors, newline=newline + ) as fp: + return fp.write(data) + + return await to_thread.run_sync(sync_write_text) + + +PathLike.register(Path) diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_resources.py b/myenv/lib/python3.12/site-packages/anyio/_core/_resources.py new file mode 100644 index 0000000..b9a5344 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_resources.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_signals.py b/myenv/lib/python3.12/site-packages/anyio/_core/_signals.py new file mode 100644 index 0000000..115c749 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_signals.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from collections.abc import AsyncIterator +from signal import Signals +from typing import ContextManager + +from ._eventloop import get_async_backend + + +def open_signal_receiver(*signals: Signals) -> ContextManager[AsyncIterator[Signals]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields + signal numbers + + .. warning:: Windows does not support signals natively so it is best to avoid + relying on this in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for + the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_async_backend().open_signal_receiver(*signals) diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_sockets.py b/myenv/lib/python3.12/site-packages/anyio/_core/_sockets.py new file mode 100644 index 0000000..5e09cdb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,711 @@ +from __future__ import annotations + +import errno +import os +import socket +import ssl +import stat +import sys +from collections.abc import Awaitable +from ipaddress import IPv6Address, ip_address +from os import PathLike, chmod +from socket import AddressFamily, SocketKind +from typing import Any, Literal, cast, overload + +from .. import to_thread +from ..abc import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPAddressType, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, +) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSStream +from ._eventloop import get_async_backend +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + +IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 + +AnyIPAddressFamily = Literal[ + AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 +] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str, + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext, + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[True], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[False], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = None, + tls: bool = False, + ssl_context: ssl.SSLContext | None = None, + tls_standard_compatible: bool = True, + tls_hostname: str | None = None, + happy_eyeballs_delay: float = 0.25, +) -> SocketStream | TLSStream: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC + 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses, + each one is tried until one connection attempt succeeds. If the first attempt does + not connected within 250 milliseconds, a second attempt is started using the next + address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if + available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before + connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is + created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake + before closing the stream and requires that the server does this as well. + Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to + the value of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection + attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises OSError: if the connection attempt fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: SocketStream | None = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_async_backend() + local_address: IPSockAddrType | None = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo( + target_host, remote_port, family=family, type=socket.SOCK_STREAM + ) + + # Organize the list so that the first address is an IPv6 address (if available) + # and the second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs: list[tuple[socket.AddressFamily, str]] = [] + for af, *rest, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + else: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + + oserrors: list[OSError] = [] + async with create_task_group() as tg: + for i, (af, addr) in enumerate(target_addrs): + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = ( + oserrors[0] + if len(oserrors) == 1 + else ExceptionGroup("multiple connection attempts failed", oserrors) + ) + raise OSError("All connection attempts failed") from cause + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap( + connected_stream, + server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible, + ) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + + """ + path = os.fspath(path) + return await get_async_backend().connect_unix(path) + + +async def create_tcp_listener( + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, + backlog: int = 65536, + reuse_port: bool = False, +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on + all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address + family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``local_host`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a list of listener objects + + """ + asynclib = get_async_backend() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + gai_res = await getaddrinfo( + local_host, + local_port, + family=family, + type=socket.SocketKind.SOCK_STREAM if sys.platform == "win32" else 0, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + listeners: list[SocketListener] = [] + try: + # The set() is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + sockaddr: tuple[str, int] | tuple[str, int, int, int] + for fam, kind, *_, sockaddr in sorted(set(gai_res)): + # Workaround for an uvloop bug where we don't get the correct scope ID for + # IPv6 link-local addresses when passing type=socket.SOCK_STREAM to + # getaddrinfo(): https://github.com/MagicStack/uvloop/issues/539 + if sys.platform != "win32" and kind is not SocketKind.SOCK_STREAM: + continue + + raw_socket = socket.socket(fam) + raw_socket.setblocking(False) + + # For Windows, enable exclusive address use. For others, enable address + # reuse. + if sys.platform == "win32": + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # If only IPv6 was requested, disable dual stack operation + if fam == socket.AF_INET6: + raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + # Workaround for #554 + if "%" in sockaddr[0]: + addr, scope_id = sockaddr[0].split("%", 1) + sockaddr = (addr, sockaddr[1], 0, int(scope_id)) + + raw_socket.bind(sockaddr) + raw_socket.listen(backlog) + listener = asynclib.create_tcp_listener(raw_socket) + listeners.append(listener) + except BaseException: + for listener in listeners: + await listener.aclose() + + raise + + return MultiListener(listeners) + + +async def create_unix_listener( + path: str | bytes | PathLike[Any], + *, + mode: int | None = None, + backlog: int = 65536, +) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be + removed first. + + """ + backlog = min(backlog, 65536) + raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM) + try: + raw_socket.listen(backlog) + return get_async_backend().create_unix_listener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local + machine, making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + elif family is AddressFamily.AF_INET6: + local_address = ("::", 0) + else: + local_address = ("0.0.0.0", 0) + + sock = await get_async_backend().create_udp_socket( + family, local_address, None, reuse_port + ) + return cast(UDPSocket, sock) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, + remote_port: int, + *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, an + any packets sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo( + str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + sock = await get_async_backend().create_udp_socket( + family, local_address, remote_address, reuse_port + ) + return cast(ConnectedUDPSocket, sock) + + +async def create_unix_datagram_socket( + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> UNIXDatagramSocket: + """ + Create a UNIX datagram socket. + + Not available on Windows. + + If ``local_path`` has been given, the socket will be bound to this path, making this + socket suitable for receiving datagrams from other processes. Other processes can + send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a UNIX datagram socket + + """ + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket(raw_socket, None) + + +async def create_connected_unix_datagram_socket( + remote_path: str | bytes | PathLike[Any], + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> ConnectedUNIXDatagramSocket: + """ + Create a connected UNIX datagram socket. + + Connected datagram sockets can only communicate with the specified remote path. + + If ``local_path`` has been given, the socket will be bound to this path, making + this socket suitable for receiving datagrams from other processes. Other processes + can send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param remote_path: the path to set as the default target + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a connected UNIX datagram socket + + """ + remote_path = os.fspath(remote_path) + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket( + raw_socket, remote_path + ) + + +async def getaddrinfo( + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) + IDNA 2008 standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host: bytes | None = host.encode("ascii") + except UnicodeEncodeError: + import idna + + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_async_backend().getaddrinfo( + encoded_host, port, family=family, type=type, proto=proto, flags=flags + ) + return [ + (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res + ] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_async_backend().getnameinfo(sockaddr, flags) + + +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket has data to be read. + + This does **NOT** work on Windows when using the asyncio backend with a proactor + event loop (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + + """ + return get_async_backend().wait_socket_readable(sock) + + +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor + event loop (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + + """ + return get_async_backend().wait_socket_writable(sock) + + +# +# Private API +# + + +def convert_ipv6_sockaddr( + sockaddr: tuple[str, int, int, int] | tuple[str, int], +) -> tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = sockaddr + if scope_id: + # PyPy (as of v7.3.11) leaves the interface name in the result, so + # we discard it and only get the scope ID from the end + # (https://foss.heptapod.net/pypy/pypy/-/issues/3938) + host = host.split("%")[0] + + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return sockaddr + + +async def setup_unix_local_socket( + path: None | str | bytes | PathLike[Any], + mode: int | None, + socktype: int, +) -> socket.socket: + """ + Create a UNIX local socket object, deleting the socket at the given path if it + exists. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM + + """ + path_str: str | bytes | None + if path is not None: + path_str = os.fspath(path) + + # Copied from pathlib... + try: + stat_result = os.stat(path) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EBADF, errno.ELOOP): + raise + else: + if stat.S_ISSOCK(stat_result.st_mode): + os.unlink(path) + else: + path_str = None + + raw_socket = socket.socket(socket.AF_UNIX, socktype) + raw_socket.setblocking(False) + + if path_str is not None: + try: + await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True) + except BaseException: + raw_socket.close() + raise + + return raw_socket diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_streams.py b/myenv/lib/python3.12/site-packages/anyio/_core/_streams.py new file mode 100644 index 0000000..aa6b0c2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_streams.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import math +from typing import Tuple, TypeVar +from warnings import warn + +from ..streams.memory import ( + MemoryObjectReceiveStream, + MemoryObjectSendStream, + MemoryObjectStreamState, +) + +T_Item = TypeVar("T_Item") + + +class create_memory_object_stream( + Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], +): + """ + Create a memory object stream. + + The stream's item type can be annotated like + :func:`create_memory_object_stream[T_Item]`. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts + blocking + :param item_type: old way of marking the streams with the right generic type for + static typing (does nothing on AnyIO 4) + + .. deprecated:: 4.0 + Use ``create_memory_object_stream[YourItemType](...)`` instead. + :return: a tuple of (send stream, receive stream) + + """ + + def __new__( # type: ignore[misc] + cls, max_buffer_size: float = 0, item_type: object = None + ) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError("max_buffer_size must be either an integer or math.inf") + if max_buffer_size < 0: + raise ValueError("max_buffer_size cannot be negative") + if item_type is not None: + warn( + "The item_type argument has been deprecated in AnyIO 4.0. " + "Use create_memory_object_stream[YourItemType](...) instead.", + DeprecationWarning, + stacklevel=2, + ) + + state = MemoryObjectStreamState[T_Item](max_buffer_size) + return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)) diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py b/myenv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 0000000..5d5d7b7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,140 @@ +from __future__ import annotations + +from collections.abc import AsyncIterable, Mapping, Sequence +from io import BytesIO +from os import PathLike +from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess +from typing import IO, Any, cast + +from ..abc import Process +from ._eventloop import get_async_backend +from ._tasks import create_task_group + + +async def run_process( + command: str | bytes | Sequence[str | bytes], + *, + input: bytes | None = None, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + check: bool = True, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, +) -> CompletedProcess[bytes]: + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or `None` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the + process terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the + command + :param env: if not ``None``, this mapping replaces the inherited environment + variables from the parent process + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process + exits with a nonzero return code + + """ + + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + async with await open_process( + command, + stdin=PIPE if input else DEVNULL, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) as process: + stream_contents: list[bytes | None] = [None, None] + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process( + command: str | bytes | Sequence[str | bytes], + *, + stdin: int | IO[Any] | None = PIPE, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, +) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a + file-like object, or ``None`` + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or ``None`` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or ``None`` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the + environment variables for the new process + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :return: an asynchronous process object + + """ + if isinstance(command, (str, bytes)): + return await get_async_backend().open_process( + command, + shell=True, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + else: + return await get_async_backend().open_process( + command, + shell=False, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_synchronization.py b/myenv/lib/python3.12/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 0000000..b274a31 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,649 @@ +from __future__ import annotations + +import math +from collections import deque +from dataclasses import dataclass +from types import TracebackType + +from sniffio import AsyncLibraryNotFoundError + +from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled +from ._eventloop import get_async_backend +from ._exceptions import BusyResourceError, WouldBlock +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from + this limiter + :ivar int tasks_waiting: number of tasks waiting on + :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the + lock is not held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: TaskInfo | None + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying + :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + + tasks_waiting: int + + +class Event: + def __new__(cls) -> Event: + try: + return get_async_backend().create_event() + except AsyncLibraryNotFoundError: + return EventAdapter() + + def set(self) -> None: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns + immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class EventAdapter(Event): + _internal_event: Event | None = None + + def __new__(cls) -> EventAdapter: + return object.__new__(cls) + + @property + def _event(self) -> Event: + if self._internal_event is None: + self._internal_event = get_async_backend().create_event() + + return self._internal_event + + def set(self) -> None: + self._event.set() + + def is_set(self) -> bool: + return self._internal_event is not None and self._internal_event.is_set() + + async def wait(self) -> None: + await self._event.wait() + + def statistics(self) -> EventStatistics: + if self._internal_event is None: + return EventStatistics(tasks_waiting=0) + + return self._internal_event.statistics() + + +class Lock: + _owner_task: TaskInfo | None = None + + def __init__(self) -> None: + self._waiters: deque[tuple[TaskInfo, Event]] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + task = get_current_task() + event = Event() + token = task, event + self._waiters.append(token) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(token) + elif self._owner_task == task: + self.release() + + raise + + assert self._owner_task == task + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + task = get_current_task() + if self._owner_task == task: + raise RuntimeError("Attempted to acquire an already held Lock") + + if self._owner_task is not None: + raise WouldBlock + + self._owner_task = task + + def release(self) -> None: + """Release the lock.""" + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding this lock") + + if self._waiters: + self._owner_task, event = self._waiters.popleft() + event.set() + else: + del self._owner_task + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._owner_task is not None + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + return LockStatistics(self.locked(), self._owner_task, len(self._waiters)) + + +class Condition: + _owner_task: TaskInfo | None = None + + def __init__(self, lock: Lock | None = None): + self._lock = lock or Lock() + self._waiters: deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding the underlying lock") + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> None: + """Release the underlying lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __init__(self, initial_value: int, *, max_value: int | None = None): + if not isinstance(initial_value, int): + raise TypeError("initial_value must be an integer") + if initial_value < 0: + raise ValueError("initial_value must be >= 0") + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError("max_value must be an integer or None") + if max_value < initial_value: + raise ValueError( + "max_value must be equal to or higher than initial_value" + ) + + self._value = initial_value + self._max_value = max_value + self._waiters: deque[Event] = deque() + + async def __aenter__(self) -> Semaphore: + await self.acquire() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + event = Event() + self._waiters.append(event) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + else: + self.release() + + raise + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> None: + """Increment the semaphore value.""" + if self._max_value is not None and self._value == self._max_value: + raise ValueError("semaphore released too many times") + + if self._waiters: + self._waiters.popleft().set() + else: + self._value += 1 + + @property + def value(self) -> int: + """The current value of the semaphore.""" + return self._value + + @property + def max_value(self) -> int | None: + """The maximum value of the semaphore.""" + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> CapacityLimiter: + try: + return get_async_backend().create_capacity_limiter(total_tokens) + except AsyncLibraryNotFoundError: + return CapacityLimiterAdapter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their + tokens. + + .. versionchanged:: 3.0 + The property is now writable. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire a token for the current task without waiting for one to become + available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become + available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + + :raises RuntimeError: if the current task has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +class CapacityLimiterAdapter(CapacityLimiter): + _internal_limiter: CapacityLimiter | None = None + + def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter: + return object.__new__(cls) + + def __init__(self, total_tokens: float) -> None: + self.total_tokens = total_tokens + + @property + def _limiter(self) -> CapacityLimiter: + if self._internal_limiter is None: + self._internal_limiter = get_async_backend().create_capacity_limiter( + self._total_tokens + ) + + return self._internal_limiter + + async def __aenter__(self) -> None: + await self._limiter.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return await self._limiter.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and value is not math.inf: + raise TypeError("total_tokens must be an int or math.inf") + elif value < 1: + raise ValueError("total_tokens must be >= 1") + + if self._internal_limiter is None: + self._total_tokens = value + return + + self._limiter.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + if self._internal_limiter is None: + return 0 + + return self._internal_limiter.borrowed_tokens + + @property + def available_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.available_tokens + + def acquire_nowait(self) -> None: + self._limiter.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self._limiter.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self._limiter.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self._limiter.acquire_on_behalf_of(borrower) + + def release(self) -> None: + self._limiter.release() + + def release_on_behalf_of(self, borrower: object) -> None: + self._limiter.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + if self._internal_limiter is None: + return CapacityLimiterStatistics( + borrowed_tokens=0, + total_tokens=self.total_tokens, + borrowers=(), + tasks_waiting=0, + ) + + return self._internal_limiter.statistics() + + +class ResourceGuard: + """ + A context manager for ensuring that a resource is only used by a single task at a + time. + + Entering this context manager while the previous has not exited it yet will trigger + :exc:`BusyResourceError`. + + :param action: the action to guard against (visible in the :exc:`BusyResourceError` + when triggered, e.g. "Another task is already {action} this resource") + + .. versionadded:: 4.1 + """ + + __slots__ = "action", "_guarded" + + def __init__(self, action: str = "using"): + self.action: str = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + self._guarded = False + return None diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_tasks.py b/myenv/lib/python3.12/site-packages/anyio/_core/_tasks.py new file mode 100644 index 0000000..2f21ea2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,158 @@ +from __future__ import annotations + +import math +from collections.abc import Generator +from contextlib import contextmanager +from types import TracebackType + +from ..abc._tasks import TaskGroup, TaskStatus +from ._eventloop import get_async_backend + + +class _IgnoredTaskStatus(TaskStatus[object]): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope: + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + """ + + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline) + + def cancel(self) -> None: + """Cancel this scope immediately.""" + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def cancelled_caught(self) -> bool: + """ + ``True`` if this scope suppressed a cancellation exception it itself raised. + + This is typically used to check if any work was interrupted, or to see if the + scope was cancelled due to its deadline being reached. The value will, however, + only be ``True`` if the cancellation was triggered by the scope itself (and not + an outer scope). + + """ + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> CancelScope: + raise NotImplementedError + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + raise NotImplementedError + + +@contextmanager +def fail_after( + delay: float | None, shield: bool = False +) -> Generator[CancelScope, None, None]: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in + time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\] + + """ + current_time = get_async_backend().current_time + deadline = (current_time() + delay) if delay is not None else math.inf + with get_async_backend().create_cancel_scope( + deadline=deadline, shield=shield + ) as cancel_scope: + yield cancel_scope + + if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline: + raise TimeoutError + + +def move_on_after(delay: float | None, shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + """ + deadline = ( + (get_async_backend().current_time() + delay) if delay is not None else math.inf + ) + return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the current + task. + + :return: a clock value from the event loop's internal clock (or ``float('inf')`` if + there is no deadline in effect, or ``float('-inf')`` if the current scope has + been cancelled) + :rtype: float + + """ + return get_async_backend().current_effective_deadline() + + +def create_task_group() -> TaskGroup: + """ + Create a task group. + + :return: a task group + + """ + return get_async_backend().create_task_group() diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_testing.py b/myenv/lib/python3.12/site-packages/anyio/_core/_testing.py new file mode 100644 index 0000000..9e28b22 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_testing.py @@ -0,0 +1,78 @@ +from __future__ import annotations + +from collections.abc import Awaitable, Generator +from typing import Any, cast + +from ._eventloop import get_async_backend + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = "_name", "id", "parent_id", "name", "coro" + + def __init__( + self, + id: int, + parent_id: int | None, + name: str | None, + coro: Generator[Any, Any, Any] | Awaitable[Any], + ): + func = get_current_task + self._name = f"{func.__module__}.{func.__qualname__}" + self.id: int = id + self.parent_id: int | None = parent_id + self.name: str | None = name + self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" + + def has_pending_cancellation(self) -> bool: + """ + Return ``True`` if the task has a cancellation pending, ``False`` otherwise. + + """ + return False + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + + """ + return get_async_backend().get_current_task() + + +def get_running_tasks() -> list[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + + """ + return cast("list[TaskInfo]", get_async_backend().get_running_tasks()) + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_async_backend().wait_all_tasks_blocked() diff --git a/myenv/lib/python3.12/site-packages/anyio/_core/_typedattr.py b/myenv/lib/python3.12/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 0000000..f358a44 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from typing import Any, TypeVar, final, overload + +from ._exceptions import TypedAttributeLookupError + +T_Attr = TypeVar("T_Attr") +T_Default = TypeVar("T_Default") +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: dict[str, Any] = getattr(cls, "__annotations__", {}) + for attrname in dir(cls): + if not attrname.startswith("_") and attrname not in annotations: + raise TypeError( + f"Attribute {attrname!r} is missing its type annotation" + ) + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding + values. + + If the provider wraps another provider, the attributes from that wrapper should + also be included in the returned mapping (but the wrapper may override the + callables from the wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to + look for + :param default: the value that should be returned if no value is found for the + attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default + value was given + + """ + try: + getter = self.extra_attributes[attribute] + except KeyError: + if default is undefined: + raise TypedAttributeLookupError("Attribute not found") from None + else: + return default + + return getter() diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/__init__.py b/myenv/lib/python3.12/site-packages/anyio/abc/__init__.py new file mode 100644 index 0000000..1ca0fcf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/abc/__init__.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from typing import Any + +from ._eventloop import AsyncBackend as AsyncBackend +from ._resources import AsyncResource as AsyncResource +from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket +from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket +from ._sockets import IPAddressType as IPAddressType +from ._sockets import IPSockAddrType as IPSockAddrType +from ._sockets import SocketAttribute as SocketAttribute +from ._sockets import SocketListener as SocketListener +from ._sockets import SocketStream as SocketStream +from ._sockets import UDPPacketType as UDPPacketType +from ._sockets import UDPSocket as UDPSocket +from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType +from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket +from ._sockets import UNIXSocketStream as UNIXSocketStream +from ._streams import AnyByteReceiveStream as AnyByteReceiveStream +from ._streams import AnyByteSendStream as AnyByteSendStream +from ._streams import AnyByteStream as AnyByteStream +from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream +from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream +from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream +from ._streams import ByteReceiveStream as ByteReceiveStream +from ._streams import ByteSendStream as ByteSendStream +from ._streams import ByteStream as ByteStream +from ._streams import Listener as Listener +from ._streams import ObjectReceiveStream as ObjectReceiveStream +from ._streams import ObjectSendStream as ObjectSendStream +from ._streams import ObjectStream as ObjectStream +from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream +from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream +from ._streams import UnreliableObjectStream as UnreliableObjectStream +from ._subprocesses import Process as Process +from ._tasks import TaskGroup as TaskGroup +from ._tasks import TaskStatus as TaskStatus +from ._testing import TestRunner as TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import ( + CapacityLimiter as CapacityLimiter, + Condition as Condition, + Event as Event, + Lock as Lock, + Semaphore as Semaphore, +) +from .._core._tasks import CancelScope as CancelScope +from ..from_thread import BlockingPortal as BlockingPortal + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, "__module__", "").startswith("anyio.abc."): + value.__module__ = __name__ diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..af1ac5e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc new file mode 100644 index 0000000..67d4b72 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc new file mode 100644 index 0000000..3e3085d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc new file mode 100644 index 0000000..240040b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc new file mode 100644 index 0000000..912a26b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc new file mode 100644 index 0000000..2f4e313 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc new file mode 100644 index 0000000..03b0666 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc new file mode 100644 index 0000000..d7c41fb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/_eventloop.py b/myenv/lib/python3.12/site-packages/anyio/abc/_eventloop.py new file mode 100644 index 0000000..a50afef --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/abc/_eventloop.py @@ -0,0 +1,390 @@ +from __future__ import annotations + +import math +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncIterator, Awaitable, Mapping +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind, socket +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + ContextManager, + Sequence, + TypeVar, + overload, +) + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from typing import Literal + + from .._core._synchronization import CapacityLimiter, Event + from .._core._tasks import CancelScope + from .._core._testing import TaskInfo + from ..from_thread import BlockingPortal + from ._sockets import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, + ) + from ._subprocesses import Process + from ._tasks import TaskGroup + from ._testing import TestRunner + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + + +class AsyncBackend(metaclass=ABCMeta): + @classmethod + @abstractmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param kwargs: positional arguments to ``func`` + :param options: keyword arguments to call the backend ``run()`` implementation + with + :return: the return value of the coroutine function + """ + + @classmethod + @abstractmethod + def current_token(cls) -> object: + """ + + :return: + """ + + @classmethod + @abstractmethod + def current_time(cls) -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + """ + + @classmethod + @abstractmethod + def cancelled_exception_class(cls) -> type[BaseException]: + """Return the exception class that is raised in a task if it's cancelled.""" + + @classmethod + @abstractmethod + async def checkpoint(cls) -> None: + """ + Check if the task has been cancelled, and allow rescheduling of other tasks. + + This is effectively the same as running :meth:`checkpoint_if_cancelled` and then + :meth:`cancel_shielded_checkpoint`. + """ + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + """ + Check if the current task group has been cancelled. + + This will check if the task has been cancelled, but will not allow other tasks + to be scheduled if not. + + """ + if cls.current_effective_deadline() == -math.inf: + await cls.checkpoint() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + """ + Allow the rescheduling of other tasks. + + This will give other tasks the opportunity to run, but without checking if the + current task group has been cancelled, unlike with :meth:`checkpoint`. + + """ + with cls.create_cancel_scope(shield=True): + await cls.sleep(0) + + @classmethod + @abstractmethod + async def sleep(cls, delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + """ + + @classmethod + @abstractmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + pass + + @classmethod + @abstractmethod + def current_effective_deadline(cls) -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the + current task. + + :return: + - a clock value from the event loop's internal clock + - ``inf`` if there is no deadline in effect + - ``-inf`` if the current scope has been cancelled + :rtype: float + """ + + @classmethod + @abstractmethod + def create_task_group(cls) -> TaskGroup: + pass + + @classmethod + @abstractmethod + def create_event(cls) -> Event: + pass + + @classmethod + @abstractmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: CapacityLimiter | None = None, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def check_cancelled(cls) -> None: + pass + + @classmethod + @abstractmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def create_blocking_portal(cls) -> BlockingPortal: + pass + + @classmethod + @overload + async def open_process( + cls, + command: str | bytes, + *, + shell: Literal[True], + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + pass + + @classmethod + @overload + async def open_process( + cls, + command: Sequence[str | bytes], + *, + shell: Literal[False], + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + pass + + @classmethod + @abstractmethod + async def open_process( + cls, + command: str | bytes | Sequence[str | bytes], + *, + shell: bool, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + cwd: str | bytes | PathLike[str] | None = None, + env: Mapping[str, str] | None = None, + start_new_session: bool = False, + ) -> Process: + pass + + @classmethod + @abstractmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[Process]) -> None: + pass + + @classmethod + @abstractmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + pass + + @classmethod + @abstractmethod + async def connect_unix(cls, path: str | bytes) -> UNIXSocketStream: + pass + + @classmethod + @abstractmethod + def create_tcp_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + def create_unix_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + pass + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: None + ) -> UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes + ) -> ConnectedUNIXDatagramSocket: ... + + @classmethod + @abstractmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes | None + ) -> UNIXDatagramSocket | ConnectedUNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + pass + + @classmethod + @abstractmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + pass + + @classmethod + @abstractmethod + async def wait_socket_readable(cls, sock: socket) -> None: + pass + + @classmethod + @abstractmethod + async def wait_socket_writable(cls, sock: socket) -> None: + pass + + @classmethod + @abstractmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> ContextManager[AsyncIterator[Signals]]: + pass + + @classmethod + @abstractmethod + def get_current_task(cls) -> TaskInfo: + pass + + @classmethod + @abstractmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + pass + + @classmethod + @abstractmethod + async def wait_all_tasks_blocked(cls) -> None: + pass + + @classmethod + @abstractmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + pass diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/_resources.py b/myenv/lib/python3.12/site-packages/anyio/abc/_resources.py new file mode 100644 index 0000000..10df115 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/abc/_resources.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, + and calls :meth:`aclose` on exit. + """ + + __slots__ = () + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/_sockets.py b/myenv/lib/python3.12/site-packages/anyio/abc/_sockets.py new file mode 100644 index 0000000..b321225 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,194 @@ +from __future__ import annotations + +import socket +from abc import abstractmethod +from collections.abc import Callable, Collection, Mapping +from contextlib import AsyncExitStack +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from types import TracebackType +from typing import Any, Tuple, TypeVar, Union + +from .._core._typedattr import ( + TypedAttributeProvider, + TypedAttributeSet, + typed_attribute, +) +from ._streams import ByteStream, Listener, UnreliableObjectStream +from ._tasks import TaskGroup + +IPAddressType = Union[str, IPv4Address, IPv6Address] +IPSockAddrType = Tuple[str, int] +SockAddrType = Union[IPSockAddrType, str] +UDPPacketType = Tuple[bytes, IPSockAddrType] +UNIXDatagramPacketType = Tuple[bytes, str] +T_Retval = TypeVar("T_Retval") + + +class _NullAsyncContextManager: + async def __aenter__(self) -> None: + pass + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return None + + +class SocketAttribute(TypedAttributeSet): + #: the address family of the underlying socket + family: AddressFamily = typed_attribute() + #: the local socket address of the underlying socket + local_address: SockAddrType = typed_attribute() + #: for IP addresses, the local port the underlying socket is bound to + local_port: int = typed_attribute() + #: the underlying stdlib socket object + raw_socket: socket.socket = typed_attribute() + #: the remote address the underlying socket is connected to + remote_address: SockAddrType = typed_attribute() + #: for IP addresses, the remote port the underlying socket is connected to + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert( + self._raw_socket.getsockname() + ), + SocketAttribute.raw_socket: lambda: self._raw_socket, + } + try: + peername: tuple[str, int] | None = convert(self._raw_socket.getpeername()) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[SocketAttribute.local_port] = ( + lambda: self._raw_socket.getsockname()[1] + ) + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXSocketStream(SocketStream): + @abstractmethod + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file + or socket objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve( + self, + handler: Callable[[SocketStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + from .. import create_task_group + + async with AsyncExitStack() as stack: + if task_group is None: + task_group = await stack.enter_async_context(create_task_group()) + + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """ + Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))). + + """ + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXDatagramSocket( + UnreliableObjectStream[UNIXDatagramPacketType], _SocketProvider +): + """ + Represents an unconnected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, path: str) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, path)).""" + return await self.send((data, path)) + + +class ConnectedUNIXDatagramSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents a connected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/_streams.py b/myenv/lib/python3.12/site-packages/anyio/abc/_streams.py new file mode 100644 index 0000000..8c63868 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/abc/_streams.py @@ -0,0 +1,203 @@ +from __future__ import annotations + +from abc import abstractmethod +from collections.abc import Callable +from typing import Any, Generic, TypeVar, Union + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class UnreliableObjectReceiveStream( + Generic[T_co], AsyncResource, TypedAttributeProvider +): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in + which they were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the + given type parameter. + """ + + def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]: + return self + + async def __anext__(self) -> T_co: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self) -> T_co: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream( + Generic[T_contra], AsyncResource, TypedAttributeProvider +): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the + recipient(s) in the same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_contra) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream( + UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] +): + """ + A bidirectional message stream which does not guarantee the order or reliability of + message delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]): + """ + A receive message stream which guarantees that messages are received in the same + order in which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_contra]): + """ + A send message stream which guarantees that messages are delivered in the same order + in which they were sent, without missing any messages in the middle. + """ + + +class ObjectStream( + ObjectReceiveStream[T_Item], + ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item], +): + """ + A bidirectional message stream which guarantees the order and reliability of message + delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more + than 65536 bytes. + """ + + def __aiter__(self) -> ByteReceiveStream: + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementors of this interface should not return an empty + :class:`bytes` object, and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream = Union[ + UnreliableObjectReceiveStream[bytes], ByteReceiveStream +] +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] +#: Type alias for all bytes-oriented streams. +AnyByteStream = Union[ObjectStream[bytes], ByteStream] + + +class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve( + self, handler: Callable[[T_co], Any], task_group: TaskGroup | None = None + ) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling + each accepted connection (if omitted, an ad-hoc task group will be created) + """ diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py b/myenv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 0000000..ce0564c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +from abc import abstractmethod +from signal import Signals + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: Signals) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> int | None: + """ + The return code of the process. If the process has not yet terminated, this will + be ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> ByteSendStream | None: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> ByteReceiveStream | None: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> ByteReceiveStream | None: + """The stream for the standard error output of the process.""" diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/_tasks.py b/myenv/lib/python3.12/site-packages/anyio/abc/_tasks.py new file mode 100644 index 0000000..88aecf3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import Awaitable, Callable +from types import TracebackType +from typing import TYPE_CHECKING, Any, Protocol, TypeVar, overload + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from .._core._tasks import CancelScope + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") + + +class TaskStatus(Protocol[T_contra]): + @overload + def started(self: TaskStatus[None]) -> None: ... + + @overload + def started(self, value: T_contra) -> None: ... + + def started(self, value: T_contra | None = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + """ + + cancel_scope: CancelScope + + @abstractmethod + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start( + self, + func: Callable[..., Awaitable[Any]], + *args: object, + name: object = None, + ) -> Any: + """ + Start a new task and wait until it signals for readiness. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling + ``task_status.started()`` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> TaskGroup: + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/myenv/lib/python3.12/site-packages/anyio/abc/_testing.py b/myenv/lib/python3.12/site-packages/anyio/abc/_testing.py new file mode 100644 index 0000000..7c50ed7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/abc/_testing.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import types +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable +from typing import Any, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the + same event loop. + """ + + def __enter__(self) -> TestRunner: + return self + + @abstractmethod + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool | None: ... + + @abstractmethod + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[_T, Any]], + kwargs: dict[str, Any], + ) -> Iterable[_T]: + """ + Run an async generator fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: an iterator yielding the value yielded from the async generator + """ + + @abstractmethod + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, _T]], + kwargs: dict[str, Any], + ) -> _T: + """ + Run an async fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: the return value of the fixture function + """ + + @abstractmethod + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + """ + Run an async test function. + + :param test_func: the test function + :param kwargs: keyword arguments to call the test function with + """ diff --git a/myenv/lib/python3.12/site-packages/anyio/from_thread.py b/myenv/lib/python3.12/site-packages/anyio/from_thread.py new file mode 100644 index 0000000..88a854b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/from_thread.py @@ -0,0 +1,530 @@ +from __future__ import annotations + +import sys +import threading +from collections.abc import Awaitable, Callable, Generator +from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait +from contextlib import AbstractContextManager, contextmanager +from dataclasses import dataclass, field +from inspect import isawaitable +from types import TracebackType +from typing import ( + Any, + AsyncContextManager, + ContextManager, + Generic, + Iterable, + TypeVar, + cast, + overload, +) + +from ._core import _eventloop +from ._core._eventloop import get_async_backend, get_cancelled_exc_class, threadlocals +from ._core._synchronization import Event +from ._core._tasks import CancelScope, create_task_group +from .abc import AsyncBackend +from .abc._tasks import TaskStatus + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +T_co = TypeVar("T_co", covariant=True) +PosArgsT = TypeVarTuple("PosArgsT") + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], *args: Unpack[PosArgsT] +) -> T_Retval: + """ + Call a coroutine function from a worker thread. + + :param func: a coroutine function + :param args: positional arguments for the callable + :return: the return value of the coroutine function + + """ + try: + async_backend = threadlocals.current_async_backend + token = threadlocals.current_token + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + return async_backend.run_async_from_thread(func, args, token=token) + + +def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] +) -> T_Retval: + """ + Call a function in the event loop thread from a worker thread. + + :param func: a callable + :param args: positional arguments for the callable + :return: the return value of the callable + + """ + try: + async_backend = threadlocals.current_async_backend + token = threadlocals.current_token + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + return async_backend.run_sync_from_thread(func, args, token=token) + + +class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): + _enter_future: Future[T_co] + _exit_future: Future[bool | None] + _exit_event: Event + _exit_exc_info: tuple[ + type[BaseException] | None, BaseException | None, TracebackType | None + ] = (None, None, None) + + def __init__(self, async_cm: AsyncContextManager[T_co], portal: BlockingPortal): + self._async_cm = async_cm + self._portal = portal + + async def run_async_cm(self) -> bool | None: + try: + self._exit_event = Event() + value = await self._async_cm.__aenter__() + except BaseException as exc: + self._enter_future.set_exception(exc) + raise + else: + self._enter_future.set_result(value) + + try: + # Wait for the sync context manager to exit. + # This next statement can raise `get_cancelled_exc_class()` if + # something went wrong in a task group in this async context + # manager. + await self._exit_event.wait() + finally: + # In case of cancellation, it could be that we end up here before + # `_BlockingAsyncContextManager.__exit__` is called, and an + # `_exit_exc_info` has been set. + result = await self._async_cm.__aexit__(*self._exit_exc_info) + return result + + def __enter__(self) -> T_co: + self._enter_future = Future() + self._exit_future = self._portal.start_task_soon(self.run_async_cm) + return self._enter_future.result() + + def __exit__( + self, + __exc_type: type[BaseException] | None, + __exc_value: BaseException | None, + __traceback: TracebackType | None, + ) -> bool | None: + self._exit_exc_info = __exc_type, __exc_value, __traceback + self._portal.call(self._exit_event.set) + return self._exit_future.result() + + +class _BlockingPortalTaskStatus(TaskStatus): + def __init__(self, future: Future): + self._future = future + + def started(self, value: object = None) -> None: + self._future.set_result(value) + + +class BlockingPortal: + """An object that lets external threads run code in an asynchronous event loop.""" + + def __new__(cls) -> BlockingPortal: + return get_async_backend().create_blocking_portal() + + def __init__(self) -> None: + self._event_loop_thread_id: int | None = threading.get_ident() + self._stop_event = Event() + self._task_group = create_task_group() + self._cancelled_exc_class = get_cancelled_exc_class() + + async def __aenter__(self) -> BlockingPortal: + await self._task_group.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + await self.stop() + return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) + + def _check_running(self) -> None: + if self._event_loop_thread_id is None: + raise RuntimeError("This portal is not running") + if self._event_loop_thread_id == threading.get_ident(): + raise RuntimeError( + "This method cannot be called from the event loop thread" + ) + + async def sleep_until_stopped(self) -> None: + """Sleep until :meth:`stop` is called.""" + await self._stop_event.wait() + + async def stop(self, cancel_remaining: bool = False) -> None: + """ + Signal the portal to shut down. + + This marks the portal as no longer accepting new calls and exits from + :meth:`sleep_until_stopped`. + + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` + to let them finish before returning + + """ + self._event_loop_thread_id = None + self._stop_event.set() + if cancel_remaining: + self._task_group.cancel_scope.cancel() + + async def _call_func( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + future: Future[T_Retval], + ) -> None: + def callback(f: Future[T_Retval]) -> None: + if f.cancelled() and self._event_loop_thread_id not in ( + None, + threading.get_ident(), + ): + self.call(scope.cancel) + + try: + retval_or_awaitable = func(*args, **kwargs) + if isawaitable(retval_or_awaitable): + with CancelScope() as scope: + if future.cancelled(): + scope.cancel() + else: + future.add_done_callback(callback) + + retval = await retval_or_awaitable + else: + retval = retval_or_awaitable + except self._cancelled_exc_class: + future.cancel() + future.set_running_or_notify_cancel() + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + # Let base exceptions fall through + if not isinstance(exc, Exception): + raise + else: + if not future.cancelled(): + future.set_result(retval) + finally: + scope = None # type: ignore[assignment] + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + """ + Spawn a new task using the given callable. + + Implementors must ensure that the future is resolved when the task finishes. + + :param func: a callable + :param args: positional arguments to be passed to the callable + :param kwargs: keyword arguments to be passed to the callable + :param name: name of the task (will be coerced to a string if not ``None``) + :param future: a future that will resolve to the return value of the callable, + or the exception raised during its execution + + """ + raise NotImplementedError + + @overload + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + ) -> T_Retval: ... + + @overload + def call( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: ... + + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + ) -> T_Retval: + """ + Call the given function in the event loop thread. + + If the callable returns a coroutine object, it is awaited on. + + :param func: any callable + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + + """ + return cast(T_Retval, self.start_task_soon(func, *args).result()) + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: ... + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: ... + + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: + """ + Start a task in the portal's task group. + + The task will be run inside a cancel scope which can be cancelled by cancelling + the returned future. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the + task completes successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + :rtype: concurrent.futures.Future[T_Retval] + + .. versionadded:: 3.0 + + """ + self._check_running() + f: Future[T_Retval] = Future() + self._spawn_task_from_thread(func, args, {}, name, f) + return f + + def start_task( + self, + func: Callable[..., Awaitable[T_Retval]], + *args: object, + name: object = None, + ) -> tuple[Future[T_Retval], Any]: + """ + Start a task in the portal's task group and wait until it signals for readiness. + + This method works the same way as :meth:`.abc.TaskGroup.start`. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a tuple of (future, task_status_value) where the ``task_status_value`` + is the value passed to ``task_status.started()`` from within the target + function + :rtype: tuple[concurrent.futures.Future[T_Retval], Any] + + .. versionadded:: 3.0 + + """ + + def task_done(future: Future[T_Retval]) -> None: + if not task_status_future.done(): + if future.cancelled(): + task_status_future.cancel() + elif future.exception(): + task_status_future.set_exception(future.exception()) + else: + exc = RuntimeError( + "Task exited without calling task_status.started()" + ) + task_status_future.set_exception(exc) + + self._check_running() + task_status_future: Future = Future() + task_status = _BlockingPortalTaskStatus(task_status_future) + f: Future = Future() + f.add_done_callback(task_done) + self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f) + return f, task_status_future.result() + + def wrap_async_context_manager( + self, cm: AsyncContextManager[T_co] + ) -> ContextManager[T_co]: + """ + Wrap an async context manager as a synchronous context manager via this portal. + + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping + in the middle until the synchronous context manager exits. + + :param cm: an asynchronous context manager + :return: a synchronous context manager + + .. versionadded:: 2.1 + + """ + return _BlockingAsyncContextManager(cm, self) + + +@dataclass +class BlockingPortalProvider: + """ + A manager for a blocking portal. Used as a context manager. The first thread to + enter this context manager causes a blocking portal to be started with the specific + parameters, and the last thread to exit causes the portal to be shut down. Thus, + there will be exactly one blocking portal running in this context as long as at + least one thread has entered this context manager. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + + .. versionadded:: 4.4 + """ + + backend: str = "asyncio" + backend_options: dict[str, Any] | None = None + _lock: threading.Lock = field(init=False, default_factory=threading.Lock) + _leases: int = field(init=False, default=0) + _portal: BlockingPortal = field(init=False) + _portal_cm: AbstractContextManager[BlockingPortal] | None = field( + init=False, default=None + ) + + def __enter__(self) -> BlockingPortal: + with self._lock: + if self._portal_cm is None: + self._portal_cm = start_blocking_portal( + self.backend, self.backend_options + ) + self._portal = self._portal_cm.__enter__() + + self._leases += 1 + return self._portal + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + portal_cm: AbstractContextManager[BlockingPortal] | None = None + with self._lock: + assert self._portal_cm + assert self._leases > 0 + self._leases -= 1 + if not self._leases: + portal_cm = self._portal_cm + self._portal_cm = None + del self._portal + + if portal_cm: + portal_cm.__exit__(None, None, None) + + +@contextmanager +def start_blocking_portal( + backend: str = "asyncio", backend_options: dict[str, Any] | None = None +) -> Generator[BlockingPortal, Any, None]: + """ + Start a new event loop in a new thread and run a blocking portal in its main task. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + :return: a context manager that yields a blocking portal + + .. versionchanged:: 3.0 + Usage as a context manager is now required. + + """ + + async def run_portal() -> None: + async with BlockingPortal() as portal_: + if future.set_running_or_notify_cancel(): + future.set_result(portal_) + await portal_.sleep_until_stopped() + + future: Future[BlockingPortal] = Future() + with ThreadPoolExecutor(1) as executor: + run_future = executor.submit( + _eventloop.run, # type: ignore[arg-type] + run_portal, + backend=backend, + backend_options=backend_options, + ) + try: + wait( + cast(Iterable[Future], [run_future, future]), + return_when=FIRST_COMPLETED, + ) + except BaseException: + future.cancel() + run_future.cancel() + raise + + if future.done(): + portal = future.result() + cancel_remaining_tasks = False + try: + yield portal + except BaseException: + cancel_remaining_tasks = True + raise + finally: + try: + portal.call(portal.stop, cancel_remaining_tasks) + except RuntimeError: + pass + + run_future.result() + + +def check_cancelled() -> None: + """ + Check if the cancel scope of the host task's running the current worker thread has + been cancelled. + + If the host task's current cancel scope has indeed been cancelled, the + backend-specific cancellation exception will be raised. + + :raises RuntimeError: if the current thread was not spawned by + :func:`.to_thread.run_sync` + + """ + try: + async_backend: AsyncBackend = threadlocals.current_async_backend + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + async_backend.check_cancelled() diff --git a/myenv/lib/python3.12/site-packages/anyio/lowlevel.py b/myenv/lib/python3.12/site-packages/anyio/lowlevel.py new file mode 100644 index 0000000..14c7668 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/lowlevel.py @@ -0,0 +1,161 @@ +from __future__ import annotations + +import enum +from dataclasses import dataclass +from typing import Any, Generic, Literal, TypeVar, overload +from weakref import WeakKeyDictionary + +from ._core._eventloop import get_async_backend + +T = TypeVar("T") +D = TypeVar("D") + + +async def checkpoint() -> None: + """ + Check for cancellation and allow the scheduler to switch to another task. + + Equivalent to (but more efficient than):: + + await checkpoint_if_cancelled() + await cancel_shielded_checkpoint() + + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint() + + +async def checkpoint_if_cancelled() -> None: + """ + Enter a checkpoint if the enclosing cancel scope has been cancelled. + + This does not allow the scheduler to switch to a different task. + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint_if_cancelled() + + +async def cancel_shielded_checkpoint() -> None: + """ + Allow the scheduler to switch to another task but without checking for cancellation. + + Equivalent to (but potentially more efficient than):: + + with CancelScope(shield=True): + await checkpoint() + + + .. versionadded:: 3.0 + + """ + await get_async_backend().cancel_shielded_checkpoint() + + +def current_token() -> object: + """ + Return a backend specific token object that can be used to get back to the event + loop. + + """ + return get_async_backend().current_token() + + +_run_vars: WeakKeyDictionary[Any, dict[str, Any]] = WeakKeyDictionary() +_token_wrappers: dict[Any, _TokenWrapper] = {} + + +@dataclass(frozen=True) +class _TokenWrapper: + __slots__ = "_token", "__weakref__" + _token: object + + +class _NoValueSet(enum.Enum): + NO_VALUE_SET = enum.auto() + + +class RunvarToken(Generic[T]): + __slots__ = "_var", "_value", "_redeemed" + + def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]): + self._var = var + self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value + self._redeemed = False + + +class RunVar(Generic[T]): + """ + Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop. + """ + + __slots__ = "_name", "_default" + + NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET + + _token_wrappers: set[_TokenWrapper] = set() + + def __init__( + self, name: str, default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ): + self._name = name + self._default = default + + @property + def _current_vars(self) -> dict[str, T]: + token = current_token() + try: + return _run_vars[token] + except KeyError: + run_vars = _run_vars[token] = {} + return run_vars + + @overload + def get(self, default: D) -> T | D: ... + + @overload + def get(self) -> T: ... + + def get( + self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ) -> T | D: + try: + return self._current_vars[self._name] + except KeyError: + if default is not RunVar.NO_VALUE_SET: + return default + elif self._default is not RunVar.NO_VALUE_SET: + return self._default + + raise LookupError( + f'Run variable "{self._name}" has no value and no default set' + ) + + def set(self, value: T) -> RunvarToken[T]: + current_vars = self._current_vars + token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) + current_vars[self._name] = value + return token + + def reset(self, token: RunvarToken[T]) -> None: + if token._var is not self: + raise ValueError("This token does not belong to this RunVar") + + if token._redeemed: + raise ValueError("This token has already been used") + + if token._value is _NoValueSet.NO_VALUE_SET: + try: + del self._current_vars[self._name] + except KeyError: + pass + else: + self._current_vars[self._name] = token._value + + token._redeemed = True + + def __repr__(self) -> str: + return f"" diff --git a/myenv/lib/python3.12/site-packages/anyio/py.typed b/myenv/lib/python3.12/site-packages/anyio/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/anyio/pytest_plugin.py b/myenv/lib/python3.12/site-packages/anyio/pytest_plugin.py new file mode 100644 index 0000000..a8dd6f3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/pytest_plugin.py @@ -0,0 +1,149 @@ +from __future__ import annotations + +from collections.abc import Iterator +from contextlib import ExitStack, contextmanager +from inspect import isasyncgenfunction, iscoroutinefunction +from typing import Any, Dict, Tuple, cast + +import pytest +import sniffio + +from ._core._eventloop import get_all_backends, get_async_backend +from .abc import TestRunner + +_current_runner: TestRunner | None = None +_runner_stack: ExitStack | None = None +_runner_leases = 0 + + +def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: + if isinstance(backend, str): + return backend, {} + elif isinstance(backend, tuple) and len(backend) == 2: + if isinstance(backend[0], str) and isinstance(backend[1], dict): + return cast(Tuple[str, Dict[str, Any]], backend) + + raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") + + +@contextmanager +def get_runner( + backend_name: str, backend_options: dict[str, Any] +) -> Iterator[TestRunner]: + global _current_runner, _runner_leases, _runner_stack + if _current_runner is None: + asynclib = get_async_backend(backend_name) + _runner_stack = ExitStack() + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the + # async library + token = sniffio.current_async_library_cvar.set(backend_name) + _runner_stack.callback(sniffio.current_async_library_cvar.reset, token) + + backend_options = backend_options or {} + _current_runner = _runner_stack.enter_context( + asynclib.create_test_runner(backend_options) + ) + + _runner_leases += 1 + try: + yield _current_runner + finally: + _runner_leases -= 1 + if not _runner_leases: + assert _runner_stack is not None + _runner_stack.close() + _runner_stack = _current_runner = None + + +def pytest_configure(config: Any) -> None: + config.addinivalue_line( + "markers", + "anyio: mark the (coroutine function) test to be run " + "asynchronously via anyio.", + ) + + +def pytest_fixture_setup(fixturedef: Any, request: Any) -> None: + def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def] + backend_name, backend_options = extract_backend_and_options(anyio_backend) + if has_backend_arg: + kwargs["anyio_backend"] = anyio_backend + + with get_runner(backend_name, backend_options) as runner: + if isasyncgenfunction(func): + yield from runner.run_asyncgen_fixture(func, kwargs) + else: + yield runner.run_fixture(func, kwargs) + + # Only apply this to coroutine functions and async generator functions in requests + # that involve the anyio_backend fixture + func = fixturedef.func + if isasyncgenfunction(func) or iscoroutinefunction(func): + if "anyio_backend" in request.fixturenames: + has_backend_arg = "anyio_backend" in fixturedef.argnames + fixturedef.func = wrapper + if not has_backend_arg: + fixturedef.argnames += ("anyio_backend",) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: + if collector.istestfunction(obj, name): + inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj + if iscoroutinefunction(inner_func): + marker = collector.get_closest_marker("anyio") + own_markers = getattr(obj, "pytestmark", ()) + if marker or any(marker.name == "anyio" for marker in own_markers): + pytest.mark.usefixtures("anyio_backend")(obj) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None: + def run_with_hypothesis(**kwargs: Any) -> None: + with get_runner(backend_name, backend_options) as runner: + runner.run_test(original_func, kwargs) + + backend = pyfuncitem.funcargs.get("anyio_backend") + if backend: + backend_name, backend_options = extract_backend_and_options(backend) + + if hasattr(pyfuncitem.obj, "hypothesis"): + # Wrap the inner test function unless it's already wrapped + original_func = pyfuncitem.obj.hypothesis.inner_test + if original_func.__qualname__ != run_with_hypothesis.__qualname__: + if iscoroutinefunction(original_func): + pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis + + return None + + if iscoroutinefunction(pyfuncitem.obj): + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + with get_runner(backend_name, backend_options) as runner: + runner.run_test(pyfuncitem.obj, testargs) + + return True + + return None + + +@pytest.fixture(scope="module", params=get_all_backends()) +def anyio_backend(request: Any) -> Any: + return request.param + + +@pytest.fixture +def anyio_backend_name(anyio_backend: Any) -> str: + if isinstance(anyio_backend, str): + return anyio_backend + else: + return anyio_backend[0] + + +@pytest.fixture +def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]: + if isinstance(anyio_backend, str): + return {} + else: + return anyio_backend[1] diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/__init__.py b/myenv/lib/python3.12/site-packages/anyio/streams/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..267ca46 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc new file mode 100644 index 0000000..a4ee6c2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc new file mode 100644 index 0000000..a82ce1d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc new file mode 100644 index 0000000..c84b1f1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc new file mode 100644 index 0000000..816e156 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc new file mode 100644 index 0000000..6a6fc87 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc new file mode 100644 index 0000000..ef29a68 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/buffered.py b/myenv/lib/python3.12/site-packages/anyio/streams/buffered.py new file mode 100644 index 0000000..f5d5e83 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/streams/buffered.py @@ -0,0 +1,119 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from dataclasses import dataclass, field +from typing import Any + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import AnyByteReceiveStream, ByteReceiveStream + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated + receiving capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes + # we get from the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[: index + len(delimiter) :] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/file.py b/myenv/lib/python3.12/site-packages/anyio/streams/file.py new file mode 100644 index 0000000..f492464 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/streams/file.py @@ -0,0 +1,148 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import Any, BinaryIO, cast + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + TypedAttributeSet, + to_thread, + typed_attribute, +) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: BinaryIO = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: BinaryIO): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, "name"): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: str | PathLike[str]) -> FileReadStream: + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, "rb") + return cls(cast(BinaryIO, file)) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path( + cls, path: str | PathLike[str], append: bool = False + ) -> FileWriteStream: + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any + existing file at the given path will be truncated + + """ + mode = "ab" if append else "wb" + file = await to_thread.run_sync(Path(path).open, mode) + return cls(cast(BinaryIO, file)) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/memory.py b/myenv/lib/python3.12/site-packages/anyio/streams/memory.py new file mode 100644 index 0000000..6840e62 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/streams/memory.py @@ -0,0 +1,311 @@ +from __future__ import annotations + +import warnings +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Generic, NamedTuple, TypeVar + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, +) +from .._core._testing import TaskInfo, get_current_task +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + tasks_waiting_send: int + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class MemoryObjectItemReceiver(Generic[T_Item]): + task_info: TaskInfo = field(init=False, default_factory=get_current_task) + item: T_Item = field(init=False) + + +@dataclass(eq=False) +class MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: OrderedDict[Event, MemoryObjectItemReceiver[T_Item]] = field( + init=False, default_factory=OrderedDict + ) + waiting_senders: OrderedDict[Event, T_Item] = field( + init=False, default_factory=OrderedDict + ) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), + self.max_buffer_size, + self.open_send_channels, + self.open_receive_channels, + len(self.waiting_senders), + len(self.waiting_receivers), + ) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): + _state: MemoryObjectStreamState[T_co] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_co: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_co: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + receiver = MemoryObjectItemReceiver[T_co]() + self._state.waiting_receivers[receive_event] = receiver + + try: + await receive_event.wait() + finally: + self._state.waiting_receivers.pop(receive_event, None) + + try: + return receiver.item + except AttributeError: + raise EndOfStream + + def clone(self) -> MemoryObjectReceiveStream[T_co]: + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will + the receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectReceiveStream[T_co]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__}>", + ResourceWarning, + source=self, + ) + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): + _state: MemoryObjectStreamState[T_contra] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_contra) -> None: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + while self._state.waiting_receivers: + receive_event, receiver = self._state.waiting_receivers.popitem(last=False) + if not receiver.task_info.has_pending_cancellation(): + receiver.item = item + receive_event.set() + return + + if len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + async def send(self, item: T_contra) -> None: + """ + Send an item to the stream. + + If the buffer is full, this method blocks until there is again room in the + buffer or the item can be sent directly to a receiver. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + + """ + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) + raise + + if send_event in self._state.waiting_senders: + del self._state.waiting_senders[send_event] + raise BrokenResourceError from None + + def clone(self) -> MemoryObjectSendStream[T_contra]: + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will + the sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectSendStream[T_contra]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__}>", + ResourceWarning, + source=self, + ) diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/stapled.py b/myenv/lib/python3.12/site-packages/anyio/streams/stapled.py new file mode 100644 index 0000000..80f64a2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/streams/stapled.py @@ -0,0 +1,141 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping, Sequence +from dataclasses import dataclass +from typing import Any, Generic, TypeVar + +from ..abc import ( + ByteReceiveStream, + ByteSendStream, + ByteStream, + Listener, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + TaskGroup, +) + +T_Item = TypeVar("T_Item") +T_Stream = TypeVar("T_Stream") + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners + moved into this one. + + Extra attributes are provided from each listener, with each successive listener + overriding any conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: list[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None + ) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/text.py b/myenv/lib/python3.12/site-packages/anyio/streams/text.py new file mode 100644 index 0000000..f1a1127 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/streams/text.py @@ -0,0 +1,147 @@ +from __future__ import annotations + +import codecs +from collections.abc import Callable, Mapping +from dataclasses import InitVar, dataclass, field +from typing import Any + +from ..abc import ( + AnyByteReceiveStream, + AnyByteSendStream, + AnyByteStream, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, +) + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any + completely received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults + to ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = "utf-8" + errors: str = "strict" + _encoder: Callable[..., tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings + to bytes on send. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from + bytes (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream( + self.transport_stream, encoding=encoding, errors=errors + ) + self._send_stream = TextSendStream( + self.transport_stream, encoding=encoding, errors=errors + ) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self._send_stream.extra_attributes, + **self._receive_stream.extra_attributes, + } diff --git a/myenv/lib/python3.12/site-packages/anyio/streams/tls.py b/myenv/lib/python3.12/site-packages/anyio/streams/tls.py new file mode 100644 index 0000000..e913eed --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/streams/tls.py @@ -0,0 +1,338 @@ +from __future__ import annotations + +import logging +import re +import ssl +import sys +from collections.abc import Callable, Mapping +from dataclasses import dataclass +from functools import wraps +from typing import Any, Tuple, TypeVar + +from .. import ( + BrokenResourceError, + EndOfStream, + aclose_forcefully, + get_cancelled_exc_class, +) +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +_PCTRTT = Tuple[Tuple[str, str], ...] +_PCTRTTT = Tuple[_PCTRTT, ...] + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + + #: the selected ALPN protocol + alpn_protocol: str | None = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` + # for more information) + peer_certificate: None | (dict[str, str | _PCTRTTT | _PCTRTT]) = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: bytes | None = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared by the client during the TLS handshake (``None`` if this is the + #: client side) + shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the + #: stream is being closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap( + cls, + transport_stream: AnyByteStream, + *, + server_side: bool | None = None, + hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + standard_compatible: bool = True, + ) -> TLSStream: + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, + ``False`` if this is the client side (if omitted, will be set to ``False`` + if ``hostname`` has been provided, ``False`` otherwise). Used only to create + a default context when an explicit context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure + default will be created) + :param standard_compatible: if ``False``, skip the closing handshake when + closing the connection, and don't raise an exception if the peer does the + same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ( + ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ) + ssl_context = ssl.create_default_context(purpose) + + # Re-enable detection of unexpected EOFs if it was disabled by Python + if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): + ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + ssl_object = ssl_context.wrap_bio( + bio_in, bio_out, server_side=server_side, server_hostname=hostname + ) + wrapper = cls( + transport_stream=transport_stream, + standard_compatible=standard_compatible, + _ssl_object=ssl_object, + _read_bio=bio_in, + _write_bio=bio_out, + ) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except ssl.SSLSyscallError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + except ssl.SSLError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + if ( + isinstance(exc, ssl.SSLEOFError) + or "UNEXPECTED_EOF_WHILE_READING" in exc.strerror + ): + if self.standard_compatible: + raise BrokenResourceError from exc + else: + raise EndOfStream from None + + raise + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError( + f"send_eof() requires at least TLSv1.3; current " + f"session uses {tls_version}" + ) + + raise NotImplementedError( + "send_eof() has not yet been implemented for TLS streams" + ) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: ( + self._ssl_object.get_channel_binding + ), + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( + True + ), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers() + if self._ssl_object.server_side + else None, + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version, + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session + on every accepted connection. + + If the TLS handshake times out or raises an exception, + :meth:`handle_handshake_error` is called to do whatever post-mortem processing is + deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener[Any] + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + """ + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the + ``anyio.streams.tls`` logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + # CPython (as of 3.11.5) returns incorrect `sys.exc_info()` here when using + # any asyncio implementation, so we explicitly pass the exception to log + # (https://github.com/python/cpython/issues/108668). Trio does not have this + # issue because it works around the CPython bug. + logging.getLogger(__name__).exception( + "Error during TLS handshake", exc_info=exc + ) + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve( + self, + handler: Callable[[TLSStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } diff --git a/myenv/lib/python3.12/site-packages/anyio/to_process.py b/myenv/lib/python3.12/site-packages/anyio/to_process.py new file mode 100644 index 0000000..1ff06f0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/to_process.py @@ -0,0 +1,259 @@ +from __future__ import annotations + +import os +import pickle +import subprocess +import sys +from collections import deque +from collections.abc import Callable +from importlib.util import module_from_spec, spec_from_file_location +from typing import TypeVar, cast + +from ._core._eventloop import current_time, get_async_backend, get_cancelled_exc_class +from ._core._exceptions import BrokenWorkerProcess +from ._core._subprocesses import open_process +from ._core._synchronization import CapacityLimiter +from ._core._tasks import CancelScope, fail_after +from .abc import ByteReceiveStream, ByteSendStream, Process +from .lowlevel import RunVar, checkpoint_if_cancelled +from .streams.buffered import BufferedByteReceiveStream + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +WORKER_MAX_IDLE_TIME = 300 # 5 minutes + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers") +_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar( + "_process_pool_idle_workers" +) +_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter") + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + cancellable: bool = False, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker process. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the worker process running it will be abruptly terminated using SIGKILL + (or ``terminateProcess()`` on Windows). + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation while it's + running + :param limiter: capacity limiter to use to limit the total amount of processes + running (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + + async def send_raw_command(pickled_cmd: bytes) -> object: + try: + await stdin.send(pickled_cmd) + response = await buffered.receive_until(b"\n", 50) + status, length = response.split(b" ") + if status not in (b"RETURN", b"EXCEPTION"): + raise RuntimeError( + f"Worker process returned unexpected response: {response!r}" + ) + + pickled_response = await buffered.receive_exactly(int(length)) + except BaseException as exc: + workers.discard(process) + try: + process.kill() + with CancelScope(shield=True): + await process.aclose() + except ProcessLookupError: + pass + + if isinstance(exc, get_cancelled_exc_class()): + raise + else: + raise BrokenWorkerProcess from exc + + retval = pickle.loads(pickled_response) + if status == b"EXCEPTION": + assert isinstance(retval, BaseException) + raise retval + else: + return retval + + # First pickle the request before trying to reserve a worker process + await checkpoint_if_cancelled() + request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL) + + # If this is the first run in this event loop thread, set up the necessary variables + try: + workers = _process_pool_workers.get() + idle_workers = _process_pool_idle_workers.get() + except LookupError: + workers = set() + idle_workers = deque() + _process_pool_workers.set(workers) + _process_pool_idle_workers.set(idle_workers) + get_async_backend().setup_process_pool_exit_at_shutdown(workers) + + async with limiter or current_default_process_limiter(): + # Pop processes from the pool (starting from the most recently used) until we + # find one that hasn't exited yet + process: Process + while idle_workers: + process, idle_since = idle_workers.pop() + if process.returncode is None: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME + # seconds or longer + now = current_time() + killed_processes: list[Process] = [] + while idle_workers: + if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: + break + + process_to_kill, idle_since = idle_workers.popleft() + process_to_kill.kill() + workers.remove(process_to_kill) + killed_processes.append(process_to_kill) + + with CancelScope(shield=True): + for killed_process in killed_processes: + await killed_process.aclose() + + break + + workers.remove(process) + else: + command = [sys.executable, "-u", "-m", __name__] + process = await open_process( + command, stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) + try: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + with fail_after(20): + message = await buffered.receive(6) + + if message != b"READY\n": + raise BrokenWorkerProcess( + f"Worker process returned unexpected response: {message!r}" + ) + + main_module_path = getattr(sys.modules["__main__"], "__file__", None) + pickled = pickle.dumps( + ("init", sys.path, main_module_path), + protocol=pickle.HIGHEST_PROTOCOL, + ) + await send_raw_command(pickled) + except (BrokenWorkerProcess, get_cancelled_exc_class()): + raise + except BaseException as exc: + process.kill() + raise BrokenWorkerProcess( + "Error during worker process initialization" + ) from exc + + workers.add(process) + + with CancelScope(shield=not cancellable): + try: + return cast(T_Retval, await send_raw_command(request)) + finally: + if process in workers: + idle_workers.append((process, current_time())) + + +def current_default_process_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of worker + processes. + + :return: a capacity limiter object + + """ + try: + return _default_process_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or 2) + _default_process_limiter.set(limiter) + return limiter + + +def process_worker() -> None: + # Redirect standard streams to os.devnull so that user code won't interfere with the + # parent-worker communication + stdin = sys.stdin + stdout = sys.stdout + sys.stdin = open(os.devnull) + sys.stdout = open(os.devnull, "w") + + stdout.buffer.write(b"READY\n") + while True: + retval = exception = None + try: + command, *args = pickle.load(stdin.buffer) + except EOFError: + return + except BaseException as exc: + exception = exc + else: + if command == "run": + func, args = args + try: + retval = func(*args) + except BaseException as exc: + exception = exc + elif command == "init": + main_module_path: str | None + sys.path, main_module_path = args + del sys.modules["__main__"] + if main_module_path: + # Load the parent's main module but as __mp_main__ instead of + # __main__ (like multiprocessing does) to avoid infinite recursion + try: + spec = spec_from_file_location("__mp_main__", main_module_path) + if spec and spec.loader: + main = module_from_spec(spec) + spec.loader.exec_module(main) + sys.modules["__main__"] = main + except BaseException as exc: + exception = exc + + try: + if exception is not None: + status = b"EXCEPTION" + pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) + else: + status = b"RETURN" + pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) + except BaseException as exc: + exception = exc + status = b"EXCEPTION" + pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) + + stdout.buffer.write(b"%s %d\n" % (status, len(pickled))) + stdout.buffer.write(pickled) + + # Respect SIGTERM + if isinstance(exception, SystemExit): + raise exception + + +if __name__ == "__main__": + process_worker() diff --git a/myenv/lib/python3.12/site-packages/anyio/to_thread.py b/myenv/lib/python3.12/site-packages/anyio/to_thread.py new file mode 100644 index 0000000..5070516 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/anyio/to_thread.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +import sys +from collections.abc import Callable +from typing import TypeVar +from warnings import warn + +from ._core._eventloop import get_async_backend +from .abc import CapacityLimiter + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + abandon_on_cancel: bool = False, + cancellable: bool | None = None, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker thread. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the thread will still run its course but its return value (or any raised + exception) will be ignored. + + :param func: a callable + :param args: positional arguments for the callable + :param abandon_on_cancel: ``True`` to abandon the thread (leaving it to run + unchecked on own) if the host task is cancelled, ``False`` to ignore + cancellations in the host task until the operation has completed in the worker + thread + :param cancellable: deprecated alias of ``abandon_on_cancel``; will override + ``abandon_on_cancel`` if both parameters are passed + :param limiter: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + if cancellable is not None: + abandon_on_cancel = cancellable + warn( + "The `cancellable=` keyword argument to `anyio.to_thread.run_sync` is " + "deprecated since AnyIO 4.1.0; use `abandon_on_cancel=` instead", + DeprecationWarning, + stacklevel=2, + ) + + return await get_async_backend().run_sync_in_worker_thread( + func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter + ) + + +def current_default_thread_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of + concurrent threads. + + :return: a capacity limiter object + + """ + return get_async_backend().current_default_thread_limiter() diff --git a/myenv/lib/python3.12/site-packages/authlib/__init__.py b/myenv/lib/python3.12/site-packages/authlib/__init__.py new file mode 100644 index 0000000..2a2e5ad --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/__init__.py @@ -0,0 +1,17 @@ +""" + authlib + ~~~~~~~ + + The ultimate Python library in building OAuth 1.0, OAuth 2.0 and OpenID + Connect clients and providers. It covers from low level specification + implementation to high level framework integrations. + + :copyright: (c) 2017 by Hsiaoming Yang. + :license: BSD, see LICENSE for more details. +""" +from .consts import version, homepage, author + +__version__ = version +__homepage__ = homepage +__author__ = author +__license__ = 'BSD-3-Clause' diff --git a/myenv/lib/python3.12/site-packages/authlib/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..44e2cd1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/__pycache__/consts.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/__pycache__/consts.cpython-312.pyc new file mode 100644 index 0000000..8579086 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/__pycache__/consts.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/__pycache__/deprecate.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/__pycache__/deprecate.cpython-312.pyc new file mode 100644 index 0000000..ddbf48f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/__pycache__/deprecate.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/common/__init__.py b/myenv/lib/python3.12/site-packages/authlib/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..95805c0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/encoding.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/encoding.cpython-312.pyc new file mode 100644 index 0000000..89b9e43 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/encoding.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..82ee12e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/security.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/security.cpython-312.pyc new file mode 100644 index 0000000..5ad1e4b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/security.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/urls.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/urls.cpython-312.pyc new file mode 100644 index 0000000..2090b0c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/common/__pycache__/urls.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/common/encoding.py b/myenv/lib/python3.12/site-packages/authlib/common/encoding.py new file mode 100644 index 0000000..f450ca4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/common/encoding.py @@ -0,0 +1,66 @@ +import json +import base64 +import struct + + +def to_bytes(x, charset='utf-8', errors='strict'): + if x is None: + return None + if isinstance(x, bytes): + return x + if isinstance(x, str): + return x.encode(charset, errors) + if isinstance(x, (int, float)): + return str(x).encode(charset, errors) + return bytes(x) + + +def to_unicode(x, charset='utf-8', errors='strict'): + if x is None or isinstance(x, str): + return x + if isinstance(x, bytes): + return x.decode(charset, errors) + return str(x) + + +def to_native(x, encoding='ascii'): + if isinstance(x, str): + return x + return x.decode(encoding) + + +def json_loads(s): + return json.loads(s) + + +def json_dumps(data, ensure_ascii=False): + return json.dumps(data, ensure_ascii=ensure_ascii, separators=(',', ':')) + + +def urlsafe_b64decode(s): + s += b'=' * (-len(s) % 4) + return base64.urlsafe_b64decode(s) + + +def urlsafe_b64encode(s): + return base64.urlsafe_b64encode(s).rstrip(b'=') + + +def base64_to_int(s): + data = urlsafe_b64decode(to_bytes(s, charset='ascii')) + buf = struct.unpack('%sB' % len(data), data) + return int(''.join(["%02x" % byte for byte in buf]), 16) + + +def int_to_base64(num): + if num < 0: + raise ValueError('Must be a positive integer') + + s = num.to_bytes((num.bit_length() + 7) // 8, 'big', signed=False) + return to_unicode(urlsafe_b64encode(s)) + + +def json_b64encode(text): + if isinstance(text, dict): + text = json_dumps(text) + return urlsafe_b64encode(to_bytes(text)) diff --git a/myenv/lib/python3.12/site-packages/authlib/common/errors.py b/myenv/lib/python3.12/site-packages/authlib/common/errors.py new file mode 100644 index 0000000..56515ba --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/common/errors.py @@ -0,0 +1,63 @@ +from authlib.consts import default_json_headers + + +class AuthlibBaseError(Exception): + """Base Exception for all errors in Authlib.""" + + #: short-string error code + error = None + #: long-string to describe this error + description = '' + #: web page that describes this error + uri = None + + def __init__(self, error=None, description=None, uri=None): + if error is not None: + self.error = error + if description is not None: + self.description = description + if uri is not None: + self.uri = uri + + message = f'{self.error}: {self.description}' + super().__init__(message) + + def __repr__(self): + return f'<{self.__class__.__name__} "{self.error}">' + + +class AuthlibHTTPError(AuthlibBaseError): + #: HTTP status code + status_code = 400 + + def __init__(self, error=None, description=None, uri=None, + status_code=None): + super().__init__(error, description, uri) + if status_code is not None: + self.status_code = status_code + + def get_error_description(self): + return self.description + + def get_body(self): + error = [('error', self.error)] + + if self.description: + error.append(('error_description', self.description)) + + if self.uri: + error.append(('error_uri', self.uri)) + return error + + def get_headers(self): + return default_json_headers[:] + + def __call__(self, uri=None): + self.uri = uri + body = dict(self.get_body()) + headers = self.get_headers() + return self.status_code, body, headers + + +class ContinueIteration(AuthlibBaseError): + pass diff --git a/myenv/lib/python3.12/site-packages/authlib/common/security.py b/myenv/lib/python3.12/site-packages/authlib/common/security.py new file mode 100644 index 0000000..b05ea14 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/common/security.py @@ -0,0 +1,19 @@ +import os +import string +import random + +UNICODE_ASCII_CHARACTER_SET = string.ascii_letters + string.digits + + +def generate_token(length=30, chars=UNICODE_ASCII_CHARACTER_SET): + rand = random.SystemRandom() + return ''.join(rand.choice(chars) for _ in range(length)) + + +def is_secure_transport(uri): + """Check if the uri is over ssl.""" + if os.getenv('AUTHLIB_INSECURE_TRANSPORT'): + return True + + uri = uri.lower() + return uri.startswith(('https://', 'http://localhost:')) diff --git a/myenv/lib/python3.12/site-packages/authlib/common/urls.py b/myenv/lib/python3.12/site-packages/authlib/common/urls.py new file mode 100644 index 0000000..1d1847f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/common/urls.py @@ -0,0 +1,146 @@ +""" + authlib.util.urls + ~~~~~~~~~~~~~~~~~ + + Wrapper functions for URL encoding and decoding. +""" + +import re +from urllib.parse import quote as _quote +from urllib.parse import unquote as _unquote +from urllib.parse import urlencode as _urlencode +import urllib.parse as urlparse + +from .encoding import to_unicode, to_bytes + +always_safe = ( + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + 'abcdefghijklmnopqrstuvwxyz' + '0123456789_.-' +) +urlencoded = set(always_safe) | set('=&;:%+~,*@!()/?') +INVALID_HEX_PATTERN = re.compile(r'%[^0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]') + + +def url_encode(params): + encoded = [] + for k, v in params: + encoded.append((to_bytes(k), to_bytes(v))) + return to_unicode(_urlencode(encoded)) + + +def url_decode(query): + """Decode a query string in x-www-form-urlencoded format into a sequence + of two-element tuples. + + Unlike urlparse.parse_qsl(..., strict_parsing=True) urldecode will enforce + correct formatting of the query string by validation. If validation fails + a ValueError will be raised. urllib.parse_qsl will only raise errors if + any of name-value pairs omits the equals sign. + """ + # Check if query contains invalid characters + if query and not set(query) <= urlencoded: + error = ("Error trying to decode a non urlencoded string. " + "Found invalid characters: %s " + "in the string: '%s'. " + "Please ensure the request/response body is " + "x-www-form-urlencoded.") + raise ValueError(error % (set(query) - urlencoded, query)) + + # Check for correctly hex encoded values using a regular expression + # All encoded values begin with % followed by two hex characters + # correct = %00, %A0, %0A, %FF + # invalid = %G0, %5H, %PO + if INVALID_HEX_PATTERN.search(query): + raise ValueError('Invalid hex encoding in query string.') + + # We encode to utf-8 prior to parsing because parse_qsl behaves + # differently on unicode input in python 2 and 3. + # Python 2.7 + # >>> urlparse.parse_qsl(u'%E5%95%A6%E5%95%A6') + # u'\xe5\x95\xa6\xe5\x95\xa6' + # Python 2.7, non unicode input gives the same + # >>> urlparse.parse_qsl('%E5%95%A6%E5%95%A6') + # '\xe5\x95\xa6\xe5\x95\xa6' + # but now we can decode it to unicode + # >>> urlparse.parse_qsl('%E5%95%A6%E5%95%A6').decode('utf-8') + # u'\u5566\u5566' + # Python 3.3 however + # >>> urllib.parse.parse_qsl(u'%E5%95%A6%E5%95%A6') + # u'\u5566\u5566' + + # We want to allow queries such as "c2" whereas urlparse.parse_qsl + # with the strict_parsing flag will not. + params = urlparse.parse_qsl(query, keep_blank_values=True) + + # unicode all the things + decoded = [] + for k, v in params: + decoded.append((to_unicode(k), to_unicode(v))) + return decoded + + +def add_params_to_qs(query, params): + """Extend a query with a list of two-tuples.""" + if isinstance(params, dict): + params = params.items() + + qs = urlparse.parse_qsl(query, keep_blank_values=True) + qs.extend(params) + return url_encode(qs) + + +def add_params_to_uri(uri, params, fragment=False): + """Add a list of two-tuples to the uri query components.""" + sch, net, path, par, query, fra = urlparse.urlparse(uri) + if fragment: + fra = add_params_to_qs(fra, params) + else: + query = add_params_to_qs(query, params) + return urlparse.urlunparse((sch, net, path, par, query, fra)) + + +def quote(s, safe=b'/'): + return to_unicode(_quote(to_bytes(s), safe)) + + +def unquote(s): + return to_unicode(_unquote(s)) + + +def quote_url(s): + return quote(s, b'~@#$&()*!+=:;,.?/\'') + + +def extract_params(raw): + """Extract parameters and return them as a list of 2-tuples. + + Will successfully extract parameters from urlencoded query strings, + dicts, or lists of 2-tuples. Empty strings/dicts/lists will return an + empty list of parameters. Any other input will result in a return + value of None. + """ + if isinstance(raw, (list, tuple)): + try: + raw = dict(raw) + except (TypeError, ValueError): + return None + + if isinstance(raw, dict): + params = [] + for k, v in raw.items(): + params.append((to_unicode(k), to_unicode(v))) + return params + + if not raw: + return None + + try: + return url_decode(raw) + except ValueError: + return None + + +def is_valid_url(url): + parsed = urlparse.urlparse(url) + return parsed.scheme and parsed.hostname diff --git a/myenv/lib/python3.12/site-packages/authlib/consts.py b/myenv/lib/python3.12/site-packages/authlib/consts.py new file mode 100644 index 0000000..157a2de --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/consts.py @@ -0,0 +1,11 @@ +name = 'Authlib' +version = '1.3.2' +author = 'Hsiaoming Yang ' +homepage = 'https://authlib.org/' +default_user_agent = f'{name}/{version} (+{homepage})' + +default_json_headers = [ + ('Content-Type', 'application/json'), + ('Cache-Control', 'no-store'), + ('Pragma', 'no-cache'), +] diff --git a/myenv/lib/python3.12/site-packages/authlib/deprecate.py b/myenv/lib/python3.12/site-packages/authlib/deprecate.py new file mode 100644 index 0000000..7d581d6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/deprecate.py @@ -0,0 +1,16 @@ +import warnings + + +class AuthlibDeprecationWarning(DeprecationWarning): + pass + + +warnings.simplefilter('always', AuthlibDeprecationWarning) + + +def deprecate(message, version=None, link_uid=None, link_file=None): + if version: + message += f'\nIt will be compatible before version {version}.' + if link_uid and link_file: + message += f'\nRead more ' + warnings.warn(AuthlibDeprecationWarning(message), stacklevel=2) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..8bffa93 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__init__.py new file mode 100644 index 0000000..077301f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__init__.py @@ -0,0 +1,18 @@ +from .registry import BaseOAuth +from .sync_app import BaseApp, OAuth1Mixin, OAuth2Mixin +from .sync_openid import OpenIDMixin +from .framework_integration import FrameworkIntegration +from .errors import ( + OAuthError, MissingRequestTokenError, MissingTokenError, + TokenExpiredError, InvalidTokenError, UnsupportedTokenTypeError, + MismatchingStateError, +) + +__all__ = [ + 'BaseOAuth', + 'BaseApp', 'OAuth1Mixin', 'OAuth2Mixin', + 'OpenIDMixin', 'FrameworkIntegration', + 'OAuthError', 'MissingRequestTokenError', 'MissingTokenError', + 'TokenExpiredError', 'InvalidTokenError', 'UnsupportedTokenTypeError', + 'MismatchingStateError', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..8ce2357 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/async_app.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/async_app.cpython-312.pyc new file mode 100644 index 0000000..eda9439 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/async_app.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/async_openid.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/async_openid.cpython-312.pyc new file mode 100644 index 0000000..9375912 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/async_openid.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..24488d3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/framework_integration.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/framework_integration.cpython-312.pyc new file mode 100644 index 0000000..a3bc639 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/framework_integration.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/registry.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/registry.cpython-312.pyc new file mode 100644 index 0000000..8ca8088 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/registry.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/sync_app.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/sync_app.cpython-312.pyc new file mode 100644 index 0000000..1b95c42 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/sync_app.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/sync_openid.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/sync_openid.cpython-312.pyc new file mode 100644 index 0000000..017d459 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/__pycache__/sync_openid.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/async_app.py b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/async_app.py new file mode 100644 index 0000000..640896e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/async_app.py @@ -0,0 +1,144 @@ +import time +import logging +from authlib.common.urls import urlparse +from .errors import ( + MissingRequestTokenError, + MissingTokenError, +) +from .sync_app import OAuth1Base, OAuth2Base + +log = logging.getLogger(__name__) + +__all__ = ['AsyncOAuth1Mixin', 'AsyncOAuth2Mixin'] + + +class AsyncOAuth1Mixin(OAuth1Base): + async def request(self, method, url, token=None, **kwargs): + async with self._get_oauth_client() as session: + return await _http_request(self, session, method, url, token, kwargs) + + async def create_authorization_url(self, redirect_uri=None, **kwargs): + """Generate the authorization url and state for HTTP redirect. + + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: dict + """ + if not self.authorize_url: + raise RuntimeError('Missing "authorize_url" value') + + if self.authorize_params: + kwargs.update(self.authorize_params) + + async with self._get_oauth_client() as client: + client.redirect_uri = redirect_uri + params = {} + if self.request_token_params: + params.update(self.request_token_params) + request_token = await client.fetch_request_token(self.request_token_url, **params) + log.debug(f'Fetch request token: {request_token!r}') + url = client.create_authorization_url(self.authorize_url, **kwargs) + state = request_token['oauth_token'] + return {'url': url, 'request_token': request_token, 'state': state} + + async def fetch_access_token(self, request_token=None, **kwargs): + """Fetch access token in one step. + + :param request_token: A previous request token for OAuth 1. + :param kwargs: Extra parameters to fetch access token. + :return: A token dict. + """ + async with self._get_oauth_client() as client: + if request_token is None: + raise MissingRequestTokenError() + # merge request token with verifier + token = {} + token.update(request_token) + token.update(kwargs) + client.token = token + params = self.access_token_params or {} + token = await client.fetch_access_token(self.access_token_url, **params) + return token + + +class AsyncOAuth2Mixin(OAuth2Base): + async def _on_update_token(self, token, refresh_token=None, access_token=None): + if self._update_token: + await self._update_token( + token, + refresh_token=refresh_token, + access_token=access_token, + ) + + async def load_server_metadata(self): + if self._server_metadata_url and '_loaded_at' not in self.server_metadata: + async with self.client_cls(**self.client_kwargs) as client: + resp = await client.request('GET', self._server_metadata_url, withhold_token=True) + resp.raise_for_status() + metadata = resp.json() + metadata['_loaded_at'] = time.time() + self.server_metadata.update(metadata) + return self.server_metadata + + async def request(self, method, url, token=None, **kwargs): + metadata = await self.load_server_metadata() + async with self._get_oauth_client(**metadata) as session: + return await _http_request(self, session, method, url, token, kwargs) + + async def create_authorization_url(self, redirect_uri=None, **kwargs): + """Generate the authorization url and state for HTTP redirect. + + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: dict + """ + metadata = await self.load_server_metadata() + authorization_endpoint = self.authorize_url or metadata.get('authorization_endpoint') + if not authorization_endpoint: + raise RuntimeError('Missing "authorize_url" value') + + if self.authorize_params: + kwargs.update(self.authorize_params) + + async with self._get_oauth_client(**metadata) as client: + client.redirect_uri = redirect_uri + return self._create_oauth2_authorization_url( + client, authorization_endpoint, **kwargs) + + async def fetch_access_token(self, redirect_uri=None, **kwargs): + """Fetch access token in the final step. + + :param redirect_uri: Callback or Redirect URI that is used in + previous :meth:`authorize_redirect`. + :param kwargs: Extra parameters to fetch access token. + :return: A token dict. + """ + metadata = await self.load_server_metadata() + token_endpoint = self.access_token_url or metadata.get('token_endpoint') + async with self._get_oauth_client(**metadata) as client: + if redirect_uri is not None: + client.redirect_uri = redirect_uri + params = {} + if self.access_token_params: + params.update(self.access_token_params) + params.update(kwargs) + token = await client.fetch_token(token_endpoint, **params) + return token + + +async def _http_request(ctx, session, method, url, token, kwargs): + request = kwargs.pop('request', None) + withhold_token = kwargs.get('withhold_token') + if ctx.api_base_url and not url.startswith(('https://', 'http://')): + url = urlparse.urljoin(ctx.api_base_url, url) + + if withhold_token: + return await session.request(method, url, **kwargs) + + if token is None and ctx._fetch_token and request: + token = await ctx._fetch_token(request) + if token is None: + raise MissingTokenError() + + session.token = token + return await session.request(method, url, **kwargs) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/async_openid.py b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/async_openid.py new file mode 100644 index 0000000..68100f2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/async_openid.py @@ -0,0 +1,79 @@ +from authlib.jose import JsonWebToken, JsonWebKey +from authlib.oidc.core import UserInfo, CodeIDToken, ImplicitIDToken + +__all__ = ['AsyncOpenIDMixin'] + + +class AsyncOpenIDMixin: + async def fetch_jwk_set(self, force=False): + metadata = await self.load_server_metadata() + jwk_set = metadata.get('jwks') + if jwk_set and not force: + return jwk_set + + uri = metadata.get('jwks_uri') + if not uri: + raise RuntimeError('Missing "jwks_uri" in metadata') + + async with self.client_cls(**self.client_kwargs) as client: + resp = await client.request('GET', uri, withhold_token=True) + resp.raise_for_status() + jwk_set = resp.json() + + self.server_metadata['jwks'] = jwk_set + return jwk_set + + async def userinfo(self, **kwargs): + """Fetch user info from ``userinfo_endpoint``.""" + metadata = await self.load_server_metadata() + resp = await self.get(metadata['userinfo_endpoint'], **kwargs) + resp.raise_for_status() + data = resp.json() + return UserInfo(data) + + async def parse_id_token(self, token, nonce, claims_options=None): + """Return an instance of UserInfo from token's ``id_token``.""" + claims_params = dict( + nonce=nonce, + client_id=self.client_id, + ) + if 'access_token' in token: + claims_params['access_token'] = token['access_token'] + claims_cls = CodeIDToken + else: + claims_cls = ImplicitIDToken + + metadata = await self.load_server_metadata() + if claims_options is None and 'issuer' in metadata: + claims_options = {'iss': {'values': [metadata['issuer']]}} + + alg_values = metadata.get('id_token_signing_alg_values_supported') + if not alg_values: + alg_values = ['RS256'] + + jwt = JsonWebToken(alg_values) + + jwk_set = await self.fetch_jwk_set() + try: + claims = jwt.decode( + token['id_token'], + key=JsonWebKey.import_key_set(jwk_set), + claims_cls=claims_cls, + claims_options=claims_options, + claims_params=claims_params, + ) + except ValueError: + jwk_set = await self.fetch_jwk_set(force=True) + claims = jwt.decode( + token['id_token'], + key=JsonWebKey.import_key_set(jwk_set), + claims_cls=claims_cls, + claims_options=claims_options, + claims_params=claims_params, + ) + + # https://github.com/lepture/authlib/issues/259 + if claims.get('nonce_supported') is False: + claims.params['nonce'] = None + claims.validate(leeway=120) + return UserInfo(claims) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/errors.py b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/errors.py new file mode 100644 index 0000000..bb4dd2b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/errors.py @@ -0,0 +1,30 @@ +from authlib.common.errors import AuthlibBaseError + + +class OAuthError(AuthlibBaseError): + error = 'oauth_error' + + +class MissingRequestTokenError(OAuthError): + error = 'missing_request_token' + + +class MissingTokenError(OAuthError): + error = 'missing_token' + + +class TokenExpiredError(OAuthError): + error = 'token_expired' + + +class InvalidTokenError(OAuthError): + error = 'token_invalid' + + +class UnsupportedTokenTypeError(OAuthError): + error = 'unsupported_token_type' + + +class MismatchingStateError(OAuthError): + error = 'mismatching_state' + description = 'CSRF Warning! State not equal in request and response.' diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/framework_integration.py b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/framework_integration.py new file mode 100644 index 0000000..9243e8f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/framework_integration.py @@ -0,0 +1,64 @@ +import json +import time + + +class FrameworkIntegration: + expires_in = 3600 + + def __init__(self, name, cache=None): + self.name = name + self.cache = cache + + def _get_cache_data(self, key): + value = self.cache.get(key) + if not value: + return None + try: + return json.loads(value) + except (TypeError, ValueError): + return None + + def _clear_session_state(self, session): + now = time.time() + for key in dict(session): + if '_authlib_' in key: + # TODO: remove in future + session.pop(key) + elif key.startswith('_state_'): + value = session[key] + exp = value.get('exp') + if not exp or exp < now: + session.pop(key) + + def get_state_data(self, session, state): + key = f'_state_{self.name}_{state}' + if self.cache: + value = self._get_cache_data(key) + else: + value = session.get(key) + if value: + return value.get('data') + return None + + def set_state_data(self, session, state, data): + key = f'_state_{self.name}_{state}' + if self.cache: + self.cache.set(key, json.dumps({'data': data}), self.expires_in) + else: + now = time.time() + session[key] = {'data': data, 'exp': now + self.expires_in} + + def clear_state_data(self, session, state): + key = f'_state_{self.name}_{state}' + if self.cache: + self.cache.delete(key) + else: + session.pop(key, None) + self._clear_session_state(session) + + def update_token(self, token, refresh_token=None, access_token=None): + raise NotImplementedError() + + @staticmethod + def load_config(oauth, name, params): + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/registry.py b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/registry.py new file mode 100644 index 0000000..68d1be5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/registry.py @@ -0,0 +1,131 @@ +import functools +from .framework_integration import FrameworkIntegration + +__all__ = ['BaseOAuth'] + + +OAUTH_CLIENT_PARAMS = ( + 'client_id', 'client_secret', + 'request_token_url', 'request_token_params', + 'access_token_url', 'access_token_params', + 'refresh_token_url', 'refresh_token_params', + 'authorize_url', 'authorize_params', + 'api_base_url', 'client_kwargs', + 'server_metadata_url', +) + + +class BaseOAuth: + """Registry for oauth clients. + + Create an instance for registry:: + + oauth = OAuth() + """ + oauth1_client_cls = None + oauth2_client_cls = None + framework_integration_cls = FrameworkIntegration + + def __init__(self, cache=None, fetch_token=None, update_token=None): + self._registry = {} + self._clients = {} + self.cache = cache + self.fetch_token = fetch_token + self.update_token = update_token + + def create_client(self, name): + """Create or get the given named OAuth client. For instance, the + OAuth registry has ``.register`` a twitter client, developers may + access the client with:: + + client = oauth.create_client('twitter') + + :param: name: Name of the remote application + :return: OAuth remote app + """ + if name in self._clients: + return self._clients[name] + + if name not in self._registry: + return None + + overwrite, config = self._registry[name] + client_cls = config.pop('client_cls', None) + + if client_cls and client_cls.OAUTH_APP_CONFIG: + kwargs = client_cls.OAUTH_APP_CONFIG + kwargs.update(config) + else: + kwargs = config + + kwargs = self.generate_client_kwargs(name, overwrite, **kwargs) + framework = self.framework_integration_cls(name, self.cache) + if client_cls: + client = client_cls(framework, name, **kwargs) + elif kwargs.get('request_token_url'): + client = self.oauth1_client_cls(framework, name, **kwargs) + else: + client = self.oauth2_client_cls(framework, name, **kwargs) + + self._clients[name] = client + return client + + def register(self, name, overwrite=False, **kwargs): + """Registers a new remote application. + + :param name: Name of the remote application. + :param overwrite: Overwrite existing config with framework settings. + :param kwargs: Parameters for :class:`RemoteApp`. + + Find parameters for the given remote app class. When a remote app is + registered, it can be accessed with *named* attribute:: + + oauth.register('twitter', client_id='', ...) + oauth.twitter.get('timeline') + """ + self._registry[name] = (overwrite, kwargs) + return self.create_client(name) + + def generate_client_kwargs(self, name, overwrite, **kwargs): + fetch_token = kwargs.pop('fetch_token', None) + update_token = kwargs.pop('update_token', None) + + config = self.load_config(name, OAUTH_CLIENT_PARAMS) + if config: + kwargs = _config_client(config, kwargs, overwrite) + + if not fetch_token and self.fetch_token: + fetch_token = functools.partial(self.fetch_token, name) + + kwargs['fetch_token'] = fetch_token + + if not kwargs.get('request_token_url'): + if not update_token and self.update_token: + update_token = functools.partial(self.update_token, name) + + kwargs['update_token'] = update_token + return kwargs + + def load_config(self, name, params): + return self.framework_integration_cls.load_config(self, name, params) + + def __getattr__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError: + if key in self._registry: + return self.create_client(key) + raise AttributeError('No such client: %s' % key) + + +def _config_client(config, kwargs, overwrite): + for k in OAUTH_CLIENT_PARAMS: + v = config.get(k, None) + if k not in kwargs: + kwargs[k] = v + elif overwrite and v: + if isinstance(kwargs[k], dict): + kwargs[k].update(v) + else: + kwargs[k] = v + return kwargs diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/sync_app.py b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/sync_app.py new file mode 100644 index 0000000..c676370 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/sync_app.py @@ -0,0 +1,348 @@ +import time +import logging +from authlib.common.urls import urlparse +from authlib.consts import default_user_agent +from authlib.common.security import generate_token +from .errors import ( + MismatchingStateError, + MissingRequestTokenError, + MissingTokenError, +) + +log = logging.getLogger(__name__) + + +class BaseApp: + client_cls = None + OAUTH_APP_CONFIG = None + + def request(self, method, url, token=None, **kwargs): + raise NotImplementedError() + + def get(self, url, **kwargs): + """Invoke GET http request. + + If ``api_base_url`` configured, shortcut is available:: + + client.get('users/lepture') + """ + return self.request('GET', url, **kwargs) + + def post(self, url, **kwargs): + """Invoke POST http request. + + If ``api_base_url`` configured, shortcut is available:: + + client.post('timeline', json={'text': 'Hi'}) + """ + return self.request('POST', url, **kwargs) + + def patch(self, url, **kwargs): + """Invoke PATCH http request. + + If ``api_base_url`` configured, shortcut is available:: + + client.patch('profile', json={'name': 'Hsiaoming Yang'}) + """ + return self.request('PATCH', url, **kwargs) + + def put(self, url, **kwargs): + """Invoke PUT http request. + + If ``api_base_url`` configured, shortcut is available:: + + client.put('profile', json={'name': 'Hsiaoming Yang'}) + """ + return self.request('PUT', url, **kwargs) + + def delete(self, url, **kwargs): + """Invoke DELETE http request. + + If ``api_base_url`` configured, shortcut is available:: + + client.delete('posts/123') + """ + return self.request('DELETE', url, **kwargs) + + +class _RequestMixin: + def _get_requested_token(self, request): + if self._fetch_token and request: + return self._fetch_token(request) + + def _send_token_request(self, session, method, url, token, kwargs): + request = kwargs.pop('request', None) + withhold_token = kwargs.get('withhold_token') + if self.api_base_url and not url.startswith(('https://', 'http://')): + url = urlparse.urljoin(self.api_base_url, url) + + if withhold_token: + return session.request(method, url, **kwargs) + + if token is None: + token = self._get_requested_token(request) + + if token is None: + raise MissingTokenError() + + session.token = token + return session.request(method, url, **kwargs) + + +class OAuth1Base: + client_cls = None + + def __init__( + self, framework, name=None, fetch_token=None, + client_id=None, client_secret=None, + request_token_url=None, request_token_params=None, + access_token_url=None, access_token_params=None, + authorize_url=None, authorize_params=None, + api_base_url=None, client_kwargs=None, user_agent=None, **kwargs): + self.framework = framework + self.name = name + self.client_id = client_id + self.client_secret = client_secret + self.request_token_url = request_token_url + self.request_token_params = request_token_params + self.access_token_url = access_token_url + self.access_token_params = access_token_params + self.authorize_url = authorize_url + self.authorize_params = authorize_params + self.api_base_url = api_base_url + self.client_kwargs = client_kwargs or {} + + self._fetch_token = fetch_token + self._user_agent = user_agent or default_user_agent + self._kwargs = kwargs + + def _get_oauth_client(self): + session = self.client_cls(self.client_id, self.client_secret, **self.client_kwargs) + session.headers['User-Agent'] = self._user_agent + return session + + +class OAuth1Mixin(_RequestMixin, OAuth1Base): + def request(self, method, url, token=None, **kwargs): + with self._get_oauth_client() as session: + return self._send_token_request(session, method, url, token, kwargs) + + def create_authorization_url(self, redirect_uri=None, **kwargs): + """Generate the authorization url and state for HTTP redirect. + + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: dict + """ + if not self.authorize_url: + raise RuntimeError('Missing "authorize_url" value') + + if self.authorize_params: + kwargs.update(self.authorize_params) + + with self._get_oauth_client() as client: + client.redirect_uri = redirect_uri + params = self.request_token_params or {} + request_token = client.fetch_request_token(self.request_token_url, **params) + log.debug(f'Fetch request token: {request_token!r}') + url = client.create_authorization_url(self.authorize_url, **kwargs) + state = request_token['oauth_token'] + return {'url': url, 'request_token': request_token, 'state': state} + + def fetch_access_token(self, request_token=None, **kwargs): + """Fetch access token in one step. + + :param request_token: A previous request token for OAuth 1. + :param kwargs: Extra parameters to fetch access token. + :return: A token dict. + """ + with self._get_oauth_client() as client: + if request_token is None: + raise MissingRequestTokenError() + # merge request token with verifier + token = {} + token.update(request_token) + token.update(kwargs) + client.token = token + params = self.access_token_params or {} + token = client.fetch_access_token(self.access_token_url, **params) + return token + + +class OAuth2Base: + client_cls = None + + def __init__( + self, framework, name=None, fetch_token=None, update_token=None, + client_id=None, client_secret=None, + access_token_url=None, access_token_params=None, + authorize_url=None, authorize_params=None, + api_base_url=None, client_kwargs=None, server_metadata_url=None, + compliance_fix=None, client_auth_methods=None, user_agent=None, **kwargs): + self.framework = framework + self.name = name + self.client_id = client_id + self.client_secret = client_secret + self.access_token_url = access_token_url + self.access_token_params = access_token_params + self.authorize_url = authorize_url + self.authorize_params = authorize_params + self.api_base_url = api_base_url + self.client_kwargs = client_kwargs or {} + + self.compliance_fix = compliance_fix + self.client_auth_methods = client_auth_methods + self._fetch_token = fetch_token + self._update_token = update_token + self._user_agent = user_agent or default_user_agent + + self._server_metadata_url = server_metadata_url + self.server_metadata = kwargs + + def _on_update_token(self, token, refresh_token=None, access_token=None): + raise NotImplementedError() + + def _get_oauth_client(self, **metadata): + client_kwargs = {} + client_kwargs.update(self.client_kwargs) + client_kwargs.update(metadata) + + if self.authorize_url: + client_kwargs['authorization_endpoint'] = self.authorize_url + if self.access_token_url: + client_kwargs['token_endpoint'] = self.access_token_url + + session = self.client_cls( + client_id=self.client_id, + client_secret=self.client_secret, + update_token=self._on_update_token, + **client_kwargs + ) + if self.client_auth_methods: + for f in self.client_auth_methods: + session.register_client_auth_method(f) + + if self.compliance_fix: + self.compliance_fix(session) + + session.headers['User-Agent'] = self._user_agent + return session + + @staticmethod + def _format_state_params(state_data, params): + if state_data is None: + raise MismatchingStateError() + + code_verifier = state_data.get('code_verifier') + if code_verifier: + params['code_verifier'] = code_verifier + + redirect_uri = state_data.get('redirect_uri') + if redirect_uri: + params['redirect_uri'] = redirect_uri + return params + + @staticmethod + def _create_oauth2_authorization_url(client, authorization_endpoint, **kwargs): + rv = {} + if client.code_challenge_method: + code_verifier = kwargs.get('code_verifier') + if not code_verifier: + code_verifier = generate_token(48) + kwargs['code_verifier'] = code_verifier + rv['code_verifier'] = code_verifier + log.debug(f'Using code_verifier: {code_verifier!r}') + + scope = kwargs.get('scope', client.scope) + scope = ( + (scope if isinstance(scope, (list, tuple)) else scope.split()) + if scope + else None + ) + if scope and "openid" in scope: + # this is an OpenID Connect service + nonce = kwargs.get('nonce') + if not nonce: + nonce = generate_token(20) + kwargs['nonce'] = nonce + rv['nonce'] = nonce + + url, state = client.create_authorization_url( + authorization_endpoint, **kwargs) + rv['url'] = url + rv['state'] = state + return rv + + +class OAuth2Mixin(_RequestMixin, OAuth2Base): + def _on_update_token(self, token, refresh_token=None, access_token=None): + if callable(self._update_token): + self._update_token( + token, + refresh_token=refresh_token, + access_token=access_token, + ) + self.framework.update_token( + token, + refresh_token=refresh_token, + access_token=access_token, + ) + + def request(self, method, url, token=None, **kwargs): + metadata = self.load_server_metadata() + with self._get_oauth_client(**metadata) as session: + return self._send_token_request(session, method, url, token, kwargs) + + def load_server_metadata(self): + if self._server_metadata_url and '_loaded_at' not in self.server_metadata: + with self.client_cls(**self.client_kwargs) as session: + resp = session.request('GET', self._server_metadata_url, withhold_token=True) + resp.raise_for_status() + metadata = resp.json() + + metadata['_loaded_at'] = time.time() + self.server_metadata.update(metadata) + return self.server_metadata + + def create_authorization_url(self, redirect_uri=None, **kwargs): + """Generate the authorization url and state for HTTP redirect. + + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: dict + """ + metadata = self.load_server_metadata() + authorization_endpoint = self.authorize_url or metadata.get('authorization_endpoint') + + if not authorization_endpoint: + raise RuntimeError('Missing "authorize_url" value') + + if self.authorize_params: + kwargs.update(self.authorize_params) + + + with self._get_oauth_client(**metadata) as client: + if redirect_uri is not None: + client.redirect_uri = redirect_uri + return self._create_oauth2_authorization_url( + client, authorization_endpoint, **kwargs) + + def fetch_access_token(self, redirect_uri=None, **kwargs): + """Fetch access token in the final step. + + :param redirect_uri: Callback or Redirect URI that is used in + previous :meth:`authorize_redirect`. + :param kwargs: Extra parameters to fetch access token. + :return: A token dict. + """ + metadata = self.load_server_metadata() + token_endpoint = self.access_token_url or metadata.get('token_endpoint') + with self._get_oauth_client(**metadata) as client: + if redirect_uri is not None: + client.redirect_uri = redirect_uri + params = {} + if self.access_token_params: + params.update(self.access_token_params) + params.update(kwargs) + token = client.fetch_token(token_endpoint, **params) + return token diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/sync_openid.py b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/sync_openid.py new file mode 100644 index 0000000..1611e24 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/base_client/sync_openid.py @@ -0,0 +1,82 @@ +from authlib.jose import jwt, JsonWebToken, JsonWebKey +from authlib.oidc.core import UserInfo, CodeIDToken, ImplicitIDToken + + +class OpenIDMixin: + def fetch_jwk_set(self, force=False): + metadata = self.load_server_metadata() + jwk_set = metadata.get('jwks') + if jwk_set and not force: + return jwk_set + + uri = metadata.get('jwks_uri') + if not uri: + raise RuntimeError('Missing "jwks_uri" in metadata') + + with self.client_cls(**self.client_kwargs) as session: + resp = session.request('GET', uri, withhold_token=True) + resp.raise_for_status() + jwk_set = resp.json() + + self.server_metadata['jwks'] = jwk_set + return jwk_set + + def userinfo(self, **kwargs): + """Fetch user info from ``userinfo_endpoint``.""" + metadata = self.load_server_metadata() + resp = self.get(metadata['userinfo_endpoint'], **kwargs) + resp.raise_for_status() + data = resp.json() + return UserInfo(data) + + def parse_id_token(self, token, nonce, claims_options=None, leeway=120): + """Return an instance of UserInfo from token's ``id_token``.""" + if 'id_token' not in token: + return None + + load_key = self.create_load_key() + + claims_params = dict( + nonce=nonce, + client_id=self.client_id, + ) + if 'access_token' in token: + claims_params['access_token'] = token['access_token'] + claims_cls = CodeIDToken + else: + claims_cls = ImplicitIDToken + + metadata = self.load_server_metadata() + if claims_options is None and 'issuer' in metadata: + claims_options = {'iss': {'values': [metadata['issuer']]}} + + alg_values = metadata.get('id_token_signing_alg_values_supported') + if alg_values: + _jwt = JsonWebToken(alg_values) + else: + _jwt = jwt + + claims = _jwt.decode( + token['id_token'], key=load_key, + claims_cls=claims_cls, + claims_options=claims_options, + claims_params=claims_params, + ) + # https://github.com/lepture/authlib/issues/259 + if claims.get('nonce_supported') is False: + claims.params['nonce'] = None + + claims.validate(leeway=leeway) + return UserInfo(claims) + + def create_load_key(self): + def load_key(header, _): + jwk_set = JsonWebKey.import_key_set(self.fetch_jwk_set()) + try: + return jwk_set.find_by_kid(header.get('kid')) + except ValueError: + # re-try with new jwk set + jwk_set = JsonWebKey.import_key_set(self.fetch_jwk_set(force=True)) + return jwk_set.find_by_kid(header.get('kid')) + + return load_key diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__init__.py new file mode 100644 index 0000000..5839c94 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__init__.py @@ -0,0 +1,19 @@ +# flake8: noqa + +from .integration import DjangoIntegration, token_update +from .apps import DjangoOAuth1App, DjangoOAuth2App +from ..base_client import BaseOAuth, OAuthError + + +class OAuth(BaseOAuth): + oauth1_client_cls = DjangoOAuth1App + oauth2_client_cls = DjangoOAuth2App + framework_integration_cls = DjangoIntegration + + +__all__ = [ + 'OAuth', + 'DjangoOAuth1App', 'DjangoOAuth2App', + 'DjangoIntegration', + 'token_update', 'OAuthError', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..9414c67 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__pycache__/apps.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__pycache__/apps.cpython-312.pyc new file mode 100644 index 0000000..263e3a9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__pycache__/apps.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__pycache__/integration.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__pycache__/integration.cpython-312.pyc new file mode 100644 index 0000000..975e344 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/__pycache__/integration.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/apps.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/apps.py new file mode 100644 index 0000000..07bdf71 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/apps.py @@ -0,0 +1,87 @@ +from django.http import HttpResponseRedirect +from ..requests_client import OAuth1Session, OAuth2Session +from ..base_client import ( + BaseApp, OAuthError, + OAuth1Mixin, OAuth2Mixin, OpenIDMixin, +) + + +class DjangoAppMixin: + def save_authorize_data(self, request, **kwargs): + state = kwargs.pop('state', None) + if state: + self.framework.set_state_data(request.session, state, kwargs) + else: + raise RuntimeError('Missing state value') + + def authorize_redirect(self, request, redirect_uri=None, **kwargs): + """Create a HTTP Redirect for Authorization Endpoint. + + :param request: HTTP request instance from Django view. + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: A HTTP redirect response. + """ + rv = self.create_authorization_url(redirect_uri, **kwargs) + self.save_authorize_data(request, redirect_uri=redirect_uri, **rv) + return HttpResponseRedirect(rv['url']) + + +class DjangoOAuth1App(DjangoAppMixin, OAuth1Mixin, BaseApp): + client_cls = OAuth1Session + + def authorize_access_token(self, request, **kwargs): + """Fetch access token in one step. + + :param request: HTTP request instance from Django view. + :return: A token dict. + """ + params = request.GET.dict() + state = params.get('oauth_token') + if not state: + raise OAuthError(description='Missing "oauth_token" parameter') + + data = self.framework.get_state_data(request.session, state) + if not data: + raise OAuthError(description='Missing "request_token" in temporary data') + + params['request_token'] = data['request_token'] + params.update(kwargs) + self.framework.clear_state_data(request.session, state) + return self.fetch_access_token(**params) + + +class DjangoOAuth2App(DjangoAppMixin, OAuth2Mixin, OpenIDMixin, BaseApp): + client_cls = OAuth2Session + + def authorize_access_token(self, request, **kwargs): + """Fetch access token in one step. + + :param request: HTTP request instance from Django view. + :return: A token dict. + """ + if request.method == 'GET': + error = request.GET.get('error') + if error: + description = request.GET.get('error_description') + raise OAuthError(error=error, description=description) + params = { + 'code': request.GET.get('code'), + 'state': request.GET.get('state'), + } + else: + params = { + 'code': request.POST.get('code'), + 'state': request.POST.get('state'), + } + + claims_options = kwargs.pop('claims_options', None) + state_data = self.framework.get_state_data(request.session, params.get('state')) + self.framework.clear_state_data(request.session, params.get('state')) + params = self._format_state_params(state_data, params) + token = self.fetch_access_token(**params, **kwargs) + + if 'id_token' in token and 'nonce' in state_data: + userinfo = self.parse_id_token(token, nonce=state_data['nonce'], claims_options=claims_options) + token['userinfo'] = userinfo + return token diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/integration.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/integration.py new file mode 100644 index 0000000..2ff03de --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_client/integration.py @@ -0,0 +1,22 @@ +from django.conf import settings +from django.dispatch import Signal +from ..base_client import FrameworkIntegration + +token_update = Signal() + + +class DjangoIntegration(FrameworkIntegration): + def update_token(self, token, refresh_token=None, access_token=None): + token_update.send( + sender=self.__class__, + name=self.name, + token=token, + refresh_token=refresh_token, + access_token=access_token, + ) + + @staticmethod + def load_config(oauth, name, params): + config = getattr(settings, 'AUTHLIB_OAUTH_CLIENTS', None) + if config: + return config.get(name) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__init__.py new file mode 100644 index 0000000..39f0e13 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__init__.py @@ -0,0 +1,9 @@ +# flake8: noqa + +from .authorization_server import ( + BaseServer, CacheAuthorizationServer +) +from .resource_protector import ResourceProtector + + +__all__ = ['BaseServer', 'CacheAuthorizationServer', 'ResourceProtector'] diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a81ca1f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/authorization_server.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/authorization_server.cpython-312.pyc new file mode 100644 index 0000000..c2300bb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/authorization_server.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/nonce.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/nonce.cpython-312.pyc new file mode 100644 index 0000000..e49bbac Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/nonce.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/resource_protector.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/resource_protector.cpython-312.pyc new file mode 100644 index 0000000..018e981 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/__pycache__/resource_protector.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/authorization_server.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/authorization_server.py new file mode 100644 index 0000000..70c2b6b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/authorization_server.py @@ -0,0 +1,125 @@ +import logging +from authlib.oauth1 import ( + OAuth1Request, + AuthorizationServer as _AuthorizationServer, +) +from authlib.oauth1 import TemporaryCredential +from authlib.common.security import generate_token +from authlib.common.urls import url_encode +from django.core.cache import cache +from django.conf import settings +from django.http import HttpResponse +from .nonce import exists_nonce_in_cache + +log = logging.getLogger(__name__) + + +class BaseServer(_AuthorizationServer): + def __init__(self, client_model, token_model, token_generator=None): + self.client_model = client_model + self.token_model = token_model + + if token_generator is None: + def token_generator(): + return { + 'oauth_token': generate_token(42), + 'oauth_token_secret': generate_token(48) + } + + self.token_generator = token_generator + self._config = getattr(settings, 'AUTHLIB_OAUTH1_PROVIDER', {}) + self._nonce_expires_in = self._config.get('nonce_expires_in', 86400) + methods = self._config.get('signature_methods') + if methods: + self.SUPPORTED_SIGNATURE_METHODS = methods + + def get_client_by_id(self, client_id): + try: + return self.client_model.objects.get(client_id=client_id) + except self.client_model.DoesNotExist: + return None + + def exists_nonce(self, nonce, request): + return exists_nonce_in_cache(nonce, request, self._nonce_expires_in) + + def create_token_credential(self, request): + temporary_credential = request.credential + token = self.token_generator() + item = self.token_model( + oauth_token=token['oauth_token'], + oauth_token_secret=token['oauth_token_secret'], + user_id=temporary_credential.get_user_id(), + client_id=temporary_credential.get_client_id() + ) + item.save() + return item + + def check_authorization_request(self, request): + req = self.create_oauth1_request(request) + self.validate_authorization_request(req) + return req + + def create_oauth1_request(self, request): + if request.method == 'POST': + body = request.POST.dict() + else: + body = None + url = request.build_absolute_uri() + return OAuth1Request(request.method, url, body, request.headers) + + def handle_response(self, status_code, payload, headers): + resp = HttpResponse(url_encode(payload), status=status_code) + for k, v in headers: + resp[k] = v + return resp + + +class CacheAuthorizationServer(BaseServer): + def __init__(self, client_model, token_model, token_generator=None): + super().__init__( + client_model, token_model, token_generator) + self._temporary_expires_in = self._config.get( + 'temporary_credential_expires_in', 86400) + self._temporary_credential_key_prefix = self._config.get( + 'temporary_credential_key_prefix', 'temporary_credential:') + + def create_temporary_credential(self, request): + key_prefix = self._temporary_credential_key_prefix + token = self.token_generator() + + client_id = request.client_id + redirect_uri = request.redirect_uri + key = key_prefix + token['oauth_token'] + token['client_id'] = client_id + if redirect_uri: + token['oauth_callback'] = redirect_uri + + cache.set(key, token, timeout=self._temporary_expires_in) + return TemporaryCredential(token) + + def get_temporary_credential(self, request): + if not request.token: + return None + + key_prefix = self._temporary_credential_key_prefix + key = key_prefix + request.token + value = cache.get(key) + if value: + return TemporaryCredential(value) + + def delete_temporary_credential(self, request): + if request.token: + key_prefix = self._temporary_credential_key_prefix + key = key_prefix + request.token + cache.delete(key) + + def create_authorization_verifier(self, request): + key_prefix = self._temporary_credential_key_prefix + verifier = generate_token(36) + credential = request.credential + user = request.user + key = key_prefix + credential.get_oauth_token() + credential['oauth_verifier'] = verifier + credential['user_id'] = user.pk + cache.set(key, credential, timeout=self._temporary_expires_in) + return verifier diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/nonce.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/nonce.py new file mode 100644 index 0000000..0bd70e3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/nonce.py @@ -0,0 +1,15 @@ +from django.core.cache import cache + + +def exists_nonce_in_cache(nonce, request, timeout): + key_prefix = 'nonce:' + timestamp = request.timestamp + client_id = request.client_id + token = request.token + key = f'{key_prefix}{nonce}-{timestamp}-{client_id}' + if token: + key = f'{key}-{token}' + + rv = bool(cache.get(key)) + cache.set(key, 1, timeout=timeout) + return rv diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/resource_protector.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/resource_protector.py new file mode 100644 index 0000000..77f3d81 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth1/resource_protector.py @@ -0,0 +1,64 @@ +import functools +from authlib.oauth1.errors import OAuth1Error +from authlib.oauth1 import ResourceProtector as _ResourceProtector +from django.http import JsonResponse +from django.conf import settings +from .nonce import exists_nonce_in_cache + + +class ResourceProtector(_ResourceProtector): + def __init__(self, client_model, token_model): + self.client_model = client_model + self.token_model = token_model + + config = getattr(settings, 'AUTHLIB_OAUTH1_PROVIDER', {}) + methods = config.get('signature_methods', []) + if methods and isinstance(methods, (list, tuple)): + self.SUPPORTED_SIGNATURE_METHODS = methods + + self._nonce_expires_in = config.get('nonce_expires_in', 86400) + + def get_client_by_id(self, client_id): + try: + return self.client_model.objects.get(client_id=client_id) + except self.client_model.DoesNotExist: + return None + + def get_token_credential(self, request): + try: + return self.token_model.objects.get( + client_id=request.client_id, + oauth_token=request.token + ) + except self.token_model.DoesNotExist: + return None + + def exists_nonce(self, nonce, request): + return exists_nonce_in_cache(nonce, request, self._nonce_expires_in) + + def acquire_credential(self, request): + if request.method in ['POST', 'PUT']: + body = request.POST.dict() + else: + body = None + + url = request.build_absolute_uri() + req = self.validate_request(request.method, url, body, request.headers) + return req.credential + + def __call__(self, realm=None): + def wrapper(f): + @functools.wraps(f) + def decorated(request, *args, **kwargs): + try: + credential = self.acquire_credential(request) + request.oauth1_credential = credential + except OAuth1Error as error: + body = dict(error.get_body()) + resp = JsonResponse(body, status=error.status_code) + resp['Cache-Control'] = 'no-store' + resp['Pragma'] = 'no-cache' + return resp + return f(request, *args, **kwargs) + return decorated + return wrapper diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__init__.py new file mode 100644 index 0000000..05c1fdf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__init__.py @@ -0,0 +1,10 @@ +# flake8: noqa + +from .authorization_server import AuthorizationServer +from .resource_protector import ResourceProtector, BearerTokenValidator +from .endpoints import RevocationEndpoint +from .signals import ( + client_authenticated, + token_authenticated, + token_revoked +) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6007556 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/authorization_server.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/authorization_server.cpython-312.pyc new file mode 100644 index 0000000..c24702d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/authorization_server.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/endpoints.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/endpoints.cpython-312.pyc new file mode 100644 index 0000000..ccd51af Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/endpoints.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/requests.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/requests.cpython-312.pyc new file mode 100644 index 0000000..310824c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/requests.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/resource_protector.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/resource_protector.cpython-312.pyc new file mode 100644 index 0000000..06def96 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/resource_protector.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/signals.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/signals.cpython-312.pyc new file mode 100644 index 0000000..91142c8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/__pycache__/signals.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/authorization_server.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/authorization_server.py new file mode 100644 index 0000000..08a2759 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/authorization_server.py @@ -0,0 +1,118 @@ +from django.http import HttpResponse +from django.utils.module_loading import import_string +from django.conf import settings +from authlib.oauth2 import ( + AuthorizationServer as _AuthorizationServer, +) +from authlib.oauth2.rfc6750 import BearerTokenGenerator +from authlib.common.security import generate_token as _generate_token +from authlib.common.encoding import json_dumps +from .requests import DjangoOAuth2Request, DjangoJsonRequest +from .signals import client_authenticated, token_revoked + + +class AuthorizationServer(_AuthorizationServer): + """Django implementation of :class:`authlib.oauth2.rfc6749.AuthorizationServer`. + Initialize it with client model and token model:: + + from authlib.integrations.django_oauth2 import AuthorizationServer + from your_project.models import OAuth2Client, OAuth2Token + + server = AuthorizationServer(OAuth2Client, OAuth2Token) + """ + + def __init__(self, client_model, token_model): + self.config = getattr(settings, 'AUTHLIB_OAUTH2_PROVIDER', {}) + self.client_model = client_model + self.token_model = token_model + scopes_supported = self.config.get('scopes_supported') + super().__init__(scopes_supported=scopes_supported) + # add default token generator + self.register_token_generator('default', self.create_bearer_token_generator()) + + def query_client(self, client_id): + """Default method for ``AuthorizationServer.query_client``. Developers MAY + rewrite this function to meet their own needs. + """ + try: + return self.client_model.objects.get(client_id=client_id) + except self.client_model.DoesNotExist: + return None + + def save_token(self, token, request): + """Default method for ``AuthorizationServer.save_token``. Developers MAY + rewrite this function to meet their own needs. + """ + client = request.client + if request.user: + user_id = request.user.pk + else: + user_id = client.user_id + item = self.token_model( + client_id=client.client_id, + user_id=user_id, + **token + ) + item.save() + return item + + def create_oauth2_request(self, request): + return DjangoOAuth2Request(request) + + def create_json_request(self, request): + return DjangoJsonRequest(request) + + def handle_response(self, status_code, payload, headers): + if isinstance(payload, dict): + payload = json_dumps(payload) + resp = HttpResponse(payload, status=status_code) + for k, v in headers: + resp[k] = v + return resp + + def send_signal(self, name, *args, **kwargs): + if name == 'after_authenticate_client': + client_authenticated.send(sender=self.__class__, *args, **kwargs) + elif name == 'after_revoke_token': + token_revoked.send(sender=self.__class__, *args, **kwargs) + + def create_bearer_token_generator(self): + """Default method to create BearerToken generator.""" + conf = self.config.get('access_token_generator', True) + access_token_generator = create_token_generator(conf, 42) + + conf = self.config.get('refresh_token_generator', False) + refresh_token_generator = create_token_generator(conf, 48) + + conf = self.config.get('token_expires_in') + expires_generator = create_token_expires_in_generator(conf) + + return BearerTokenGenerator( + access_token_generator=access_token_generator, + refresh_token_generator=refresh_token_generator, + expires_generator=expires_generator, + ) + + +def create_token_generator(token_generator_conf, length=42): + if callable(token_generator_conf): + return token_generator_conf + + if isinstance(token_generator_conf, str): + return import_string(token_generator_conf) + elif token_generator_conf is True: + def token_generator(*args, **kwargs): + return _generate_token(length) + return token_generator + + +def create_token_expires_in_generator(expires_in_conf=None): + data = {} + data.update(BearerTokenGenerator.GRANT_TYPES_EXPIRES_IN) + if expires_in_conf: + data.update(expires_in_conf) + + def expires_in(client, grant_type): + return data.get(grant_type, BearerTokenGenerator.DEFAULT_EXPIRES_IN) + + return expires_in diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/endpoints.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/endpoints.py new file mode 100644 index 0000000..686675d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/endpoints.py @@ -0,0 +1,56 @@ +from authlib.oauth2.rfc7009 import RevocationEndpoint as _RevocationEndpoint + + +class RevocationEndpoint(_RevocationEndpoint): + """The revocation endpoint for OAuth authorization servers allows clients + to notify the authorization server that a previously obtained refresh or + access token is no longer needed. + + Register it into authorization server, and create token endpoint response + for token revocation:: + + from django.views.decorators.http import require_http_methods + + # see register into authorization server instance + server.register_endpoint(RevocationEndpoint) + + @require_http_methods(["POST"]) + def revoke_token(request): + return server.create_endpoint_response( + RevocationEndpoint.ENDPOINT_NAME, + request + ) + """ + + def query_token(self, token, token_type_hint): + """Query requested token from database.""" + token_model = self.server.token_model + if token_type_hint == 'access_token': + rv = _query_access_token(token_model, token) + elif token_type_hint == 'refresh_token': + rv = _query_refresh_token(token_model, token) + else: + rv = _query_access_token(token_model, token) + if not rv: + rv = _query_refresh_token(token_model, token) + + return rv + + def revoke_token(self, token, request): + """Mark the give token as revoked.""" + token.revoked = True + token.save() + + +def _query_access_token(token_model, token): + try: + return token_model.objects.get(access_token=token) + except token_model.DoesNotExist: + return None + + +def _query_refresh_token(token_model, token): + try: + return token_model.objects.get(refresh_token=token) + except token_model.DoesNotExist: + return None diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/requests.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/requests.py new file mode 100644 index 0000000..e8c8a19 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/requests.py @@ -0,0 +1,46 @@ +from collections import defaultdict + +from django.http import HttpRequest +from django.utils.functional import cached_property +from authlib.common.encoding import json_loads +from authlib.oauth2.rfc6749 import OAuth2Request, JsonRequest + + +class DjangoOAuth2Request(OAuth2Request): + def __init__(self, request: HttpRequest): + super().__init__(request.method, request.build_absolute_uri(), None, request.headers) + self._request = request + + @property + def args(self): + return self._request.GET + + @property + def form(self): + return self._request.POST + + @cached_property + def data(self): + data = {} + data.update(self._request.GET.dict()) + data.update(self._request.POST.dict()) + return data + + @cached_property + def datalist(self): + values = defaultdict(list) + for k in self.args: + values[k].extend(self.args.getlist(k)) + for k in self.form: + values[k].extend(self.form.getlist(k)) + return values + + +class DjangoJsonRequest(JsonRequest): + def __init__(self, request: HttpRequest): + super().__init__(request.method, request.build_absolute_uri(), None, request.headers) + self._request = request + + @cached_property + def data(self): + return json_loads(self._request.body) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/resource_protector.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/resource_protector.py new file mode 100644 index 0000000..b89257b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/resource_protector.py @@ -0,0 +1,75 @@ +import functools +from django.http import JsonResponse +from authlib.oauth2 import ( + OAuth2Error, + ResourceProtector as _ResourceProtector, +) +from authlib.oauth2.rfc6749 import ( + MissingAuthorizationError, +) +from authlib.oauth2.rfc6750 import ( + BearerTokenValidator as _BearerTokenValidator +) +from .requests import DjangoJsonRequest +from .signals import token_authenticated + + +class ResourceProtector(_ResourceProtector): + def acquire_token(self, request, scopes=None, **kwargs): + """A method to acquire current valid token with the given scope. + + :param request: Django HTTP request instance + :param scopes: a list of scope values + :return: token object + """ + req = DjangoJsonRequest(request) + # backward compatibility + kwargs['scopes'] = scopes + for claim in kwargs: + if isinstance(kwargs[claim], str): + kwargs[claim] = [kwargs[claim]] + token = self.validate_request(request=req, **kwargs) + token_authenticated.send(sender=self.__class__, token=token) + return token + + def __call__(self, scopes=None, optional=False, **kwargs): + claims = kwargs + # backward compatibility + claims['scopes'] = scopes + def wrapper(f): + @functools.wraps(f) + def decorated(request, *args, **kwargs): + try: + token = self.acquire_token(request, **claims) + request.oauth_token = token + except MissingAuthorizationError as error: + if optional: + request.oauth_token = None + return f(request, *args, **kwargs) + return return_error_response(error) + except OAuth2Error as error: + return return_error_response(error) + return f(request, *args, **kwargs) + return decorated + return wrapper + + +class BearerTokenValidator(_BearerTokenValidator): + def __init__(self, token_model, realm=None, **extra_attributes): + self.token_model = token_model + super().__init__(realm, **extra_attributes) + + def authenticate_token(self, token_string): + try: + return self.token_model.objects.get(access_token=token_string) + except self.token_model.DoesNotExist: + return None + + +def return_error_response(error): + body = dict(error.get_body()) + resp = JsonResponse(body, status=error.status_code) + headers = error.get_headers() + for k, v in headers: + resp[k] = v + return resp diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/signals.py b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/signals.py new file mode 100644 index 0000000..0e9c265 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/django_oauth2/signals.py @@ -0,0 +1,11 @@ +from django.dispatch import Signal + + +#: signal when client is authenticated +client_authenticated = Signal() + +#: signal when token is revoked +token_revoked = Signal() + +#: signal when token is authenticated +token_authenticated = Signal() diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__init__.py new file mode 100644 index 0000000..ecdca2d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__init__.py @@ -0,0 +1,51 @@ +from werkzeug.local import LocalProxy +from .integration import FlaskIntegration, token_update +from .apps import FlaskOAuth1App, FlaskOAuth2App +from ..base_client import BaseOAuth, OAuthError + + +class OAuth(BaseOAuth): + oauth1_client_cls = FlaskOAuth1App + oauth2_client_cls = FlaskOAuth2App + framework_integration_cls = FlaskIntegration + + def __init__(self, app=None, cache=None, fetch_token=None, update_token=None): + super().__init__( + cache=cache, fetch_token=fetch_token, update_token=update_token) + self.app = app + if app: + self.init_app(app) + + def init_app(self, app, cache=None, fetch_token=None, update_token=None): + """Initialize lazy for Flask app. This is usually used for Flask application + factory pattern. + """ + self.app = app + if cache is not None: + self.cache = cache + + if fetch_token: + self.fetch_token = fetch_token + if update_token: + self.update_token = update_token + + app.extensions = getattr(app, 'extensions', {}) + app.extensions['authlib.integrations.flask_client'] = self + + def create_client(self, name): + if not self.app: + raise RuntimeError('OAuth is not init with Flask app.') + return super().create_client(name) + + def register(self, name, overwrite=False, **kwargs): + self._registry[name] = (overwrite, kwargs) + if self.app: + return self.create_client(name) + return LocalProxy(lambda: self.create_client(name)) + + +__all__ = [ + 'OAuth', 'FlaskIntegration', + 'FlaskOAuth1App', 'FlaskOAuth2App', + 'token_update', 'OAuthError', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..dbc2439 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__pycache__/apps.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__pycache__/apps.cpython-312.pyc new file mode 100644 index 0000000..e7e03e4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__pycache__/apps.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__pycache__/integration.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__pycache__/integration.cpython-312.pyc new file mode 100644 index 0000000..8245e66 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/__pycache__/integration.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/apps.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/apps.py new file mode 100644 index 0000000..84ac8c0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/apps.py @@ -0,0 +1,107 @@ +from flask import g, redirect, request, session +from ..requests_client import OAuth1Session, OAuth2Session +from ..base_client import ( + BaseApp, OAuthError, + OAuth1Mixin, OAuth2Mixin, OpenIDMixin, +) + + +class FlaskAppMixin: + @property + def token(self): + attr = f'_oauth_token_{self.name}' + token = g.get(attr) + if token: + return token + if self._fetch_token: + token = self._fetch_token() + self.token = token + return token + + @token.setter + def token(self, token): + attr = f'_oauth_token_{self.name}' + setattr(g, attr, token) + + def _get_requested_token(self, *args, **kwargs): + return self.token + + def save_authorize_data(self, **kwargs): + state = kwargs.pop('state', None) + if state: + self.framework.set_state_data(session, state, kwargs) + else: + raise RuntimeError('Missing state value') + + def authorize_redirect(self, redirect_uri=None, **kwargs): + """Create a HTTP Redirect for Authorization Endpoint. + + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: A HTTP redirect response. + """ + rv = self.create_authorization_url(redirect_uri, **kwargs) + self.save_authorize_data(redirect_uri=redirect_uri, **rv) + return redirect(rv['url']) + + +class FlaskOAuth1App(FlaskAppMixin, OAuth1Mixin, BaseApp): + client_cls = OAuth1Session + + def authorize_access_token(self, **kwargs): + """Fetch access token in one step. + + :return: A token dict. + """ + params = request.args.to_dict(flat=True) + state = params.get('oauth_token') + if not state: + raise OAuthError(description='Missing "oauth_token" parameter') + + data = self.framework.get_state_data(session, state) + if not data: + raise OAuthError(description='Missing "request_token" in temporary data') + + params['request_token'] = data['request_token'] + params.update(kwargs) + self.framework.clear_state_data(session, state) + token = self.fetch_access_token(**params) + self.token = token + return token + + +class FlaskOAuth2App(FlaskAppMixin, OAuth2Mixin, OpenIDMixin, BaseApp): + client_cls = OAuth2Session + + def authorize_access_token(self, **kwargs): + """Fetch access token in one step. + + :return: A token dict. + """ + if request.method == 'GET': + error = request.args.get('error') + if error: + description = request.args.get('error_description') + raise OAuthError(error=error, description=description) + + params = { + 'code': request.args.get('code'), + 'state': request.args.get('state'), + } + else: + params = { + 'code': request.form.get('code'), + 'state': request.form.get('state'), + } + + claims_options = kwargs.pop('claims_options', None) + state_data = self.framework.get_state_data(session, params.get('state')) + self.framework.clear_state_data(session, params.get('state')) + params = self._format_state_params(state_data, params) + token = self.fetch_access_token(**params, **kwargs) + self.token = token + + if 'id_token' in token and 'nonce' in state_data: + userinfo = self.parse_id_token(token, nonce=state_data['nonce'], claims_options=claims_options) + token['userinfo'] = userinfo + return token diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/integration.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/integration.py new file mode 100644 index 0000000..f4ea57e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_client/integration.py @@ -0,0 +1,28 @@ +from flask import current_app +from flask.signals import Namespace +from ..base_client import FrameworkIntegration + +_signal = Namespace() +#: signal when token is updated +token_update = _signal.signal('token_update') + + +class FlaskIntegration(FrameworkIntegration): + def update_token(self, token, refresh_token=None, access_token=None): + token_update.send( + current_app, + name=self.name, + token=token, + refresh_token=refresh_token, + access_token=access_token, + ) + + @staticmethod + def load_config(oauth, name, params): + rv = {} + for k in params: + conf_key = f'{name}_{k}'.upper() + v = oauth.app.config.get(conf_key, None) + if v is not None: + rv[k] = v + return rv diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__init__.py new file mode 100644 index 0000000..780b059 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__init__.py @@ -0,0 +1,9 @@ +# flake8: noqa + +from .authorization_server import AuthorizationServer +from .resource_protector import ResourceProtector, current_credential +from .cache import ( + register_nonce_hooks, + register_temporary_credential_hooks, + create_exists_nonce_func, +) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6a9d22b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/authorization_server.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/authorization_server.cpython-312.pyc new file mode 100644 index 0000000..fdf5c7d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/authorization_server.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/cache.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/cache.cpython-312.pyc new file mode 100644 index 0000000..8031560 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/cache.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/resource_protector.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/resource_protector.cpython-312.pyc new file mode 100644 index 0000000..55dfc98 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/__pycache__/resource_protector.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/authorization_server.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/authorization_server.py new file mode 100644 index 0000000..3a2a560 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/authorization_server.py @@ -0,0 +1,182 @@ +import logging +from werkzeug.utils import import_string +from flask import Response +from flask import request as flask_req +from authlib.oauth1 import ( + OAuth1Request, + AuthorizationServer as _AuthorizationServer, +) +from authlib.common.security import generate_token +from authlib.common.urls import url_encode + +log = logging.getLogger(__name__) + + +class AuthorizationServer(_AuthorizationServer): + """Flask implementation of :class:`authlib.rfc5849.AuthorizationServer`. + Initialize it with Flask app instance, client model class and cache:: + + server = AuthorizationServer(app=app, query_client=query_client) + # or initialize lazily + server = AuthorizationServer() + server.init_app(app, query_client=query_client) + + :param app: A Flask app instance + :param query_client: A function to get client by client_id. The client + model class MUST implement the methods described by + :class:`~authlib.oauth1.rfc5849.ClientMixin`. + :param token_generator: A function to generate token + """ + + def __init__(self, app=None, query_client=None, token_generator=None): + self.app = app + self.query_client = query_client + self.token_generator = token_generator + + self._hooks = { + 'exists_nonce': None, + 'create_temporary_credential': None, + 'get_temporary_credential': None, + 'delete_temporary_credential': None, + 'create_authorization_verifier': None, + 'create_token_credential': None, + } + if app is not None: + self.init_app(app) + + def init_app(self, app, query_client=None, token_generator=None): + if query_client is not None: + self.query_client = query_client + if token_generator is not None: + self.token_generator = token_generator + + if self.token_generator is None: + self.token_generator = self.create_token_generator(app) + + methods = app.config.get('OAUTH1_SUPPORTED_SIGNATURE_METHODS') + if methods and isinstance(methods, (list, tuple)): + self.SUPPORTED_SIGNATURE_METHODS = methods + + self.app = app + + def register_hook(self, name, func): + if name not in self._hooks: + raise ValueError('Invalid "name" of hook') + self._hooks[name] = func + + def create_token_generator(self, app): + token_generator = app.config.get('OAUTH1_TOKEN_GENERATOR') + + if isinstance(token_generator, str): + token_generator = import_string(token_generator) + else: + length = app.config.get('OAUTH1_TOKEN_LENGTH', 42) + + def token_generator(): + return generate_token(length) + + secret_generator = app.config.get('OAUTH1_TOKEN_SECRET_GENERATOR') + if isinstance(secret_generator, str): + secret_generator = import_string(secret_generator) + else: + length = app.config.get('OAUTH1_TOKEN_SECRET_LENGTH', 48) + + def secret_generator(): + return generate_token(length) + + def create_token(): + return { + 'oauth_token': token_generator(), + 'oauth_token_secret': secret_generator() + } + return create_token + + def get_client_by_id(self, client_id): + return self.query_client(client_id) + + def exists_nonce(self, nonce, request): + func = self._hooks['exists_nonce'] + if callable(func): + timestamp = request.timestamp + client_id = request.client_id + token = request.token + return func(nonce, timestamp, client_id, token) + + raise RuntimeError('"exists_nonce" hook is required.') + + def create_temporary_credential(self, request): + func = self._hooks['create_temporary_credential'] + if callable(func): + token = self.token_generator() + return func(token, request.client_id, request.redirect_uri) + raise RuntimeError( + '"create_temporary_credential" hook is required.' + ) + + def get_temporary_credential(self, request): + func = self._hooks['get_temporary_credential'] + if callable(func): + return func(request.token) + + raise RuntimeError( + '"get_temporary_credential" hook is required.' + ) + + def delete_temporary_credential(self, request): + func = self._hooks['delete_temporary_credential'] + if callable(func): + return func(request.token) + + raise RuntimeError( + '"delete_temporary_credential" hook is required.' + ) + + def create_authorization_verifier(self, request): + func = self._hooks['create_authorization_verifier'] + if callable(func): + verifier = generate_token(36) + func(request.credential, request.user, verifier) + return verifier + + raise RuntimeError( + '"create_authorization_verifier" hook is required.' + ) + + def create_token_credential(self, request): + func = self._hooks['create_token_credential'] + if callable(func): + temporary_credential = request.credential + token = self.token_generator() + return func(token, temporary_credential) + + raise RuntimeError( + '"create_token_credential" hook is required.' + ) + + def check_authorization_request(self): + req = self.create_oauth1_request(None) + self.validate_authorization_request(req) + return req + + def create_authorization_response(self, request=None, grant_user=None): + return super()\ + .create_authorization_response(request, grant_user) + + def create_token_response(self, request=None): + return super().create_token_response(request) + + def create_oauth1_request(self, request): + if request is None: + request = flask_req + if request.method in ('POST', 'PUT'): + body = request.form.to_dict(flat=True) + else: + body = None + return OAuth1Request(request.method, request.url, body, request.headers) + + def handle_response(self, status_code, payload, headers): + return Response( + url_encode(payload), + status=status_code, + headers=headers + ) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/cache.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/cache.py new file mode 100644 index 0000000..fdfc9a5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/cache.py @@ -0,0 +1,80 @@ +from authlib.oauth1 import TemporaryCredential + + +def register_temporary_credential_hooks( + authorization_server, cache, key_prefix='temporary_credential:'): + """Register temporary credential related hooks to authorization server. + + :param authorization_server: AuthorizationServer instance + :param cache: Cache instance + :param key_prefix: key prefix for temporary credential + """ + + def create_temporary_credential(token, client_id, redirect_uri): + key = key_prefix + token['oauth_token'] + token['client_id'] = client_id + if redirect_uri: + token['oauth_callback'] = redirect_uri + + cache.set(key, token, timeout=86400) # cache for one day + return TemporaryCredential(token) + + def get_temporary_credential(oauth_token): + if not oauth_token: + return None + key = key_prefix + oauth_token + value = cache.get(key) + if value: + return TemporaryCredential(value) + + def delete_temporary_credential(oauth_token): + if oauth_token: + key = key_prefix + oauth_token + cache.delete(key) + + def create_authorization_verifier(credential, grant_user, verifier): + key = key_prefix + credential.get_oauth_token() + credential['oauth_verifier'] = verifier + credential['user_id'] = grant_user.get_user_id() + cache.set(key, credential, timeout=86400) + return credential + + authorization_server.register_hook( + 'create_temporary_credential', create_temporary_credential) + authorization_server.register_hook( + 'get_temporary_credential', get_temporary_credential) + authorization_server.register_hook( + 'delete_temporary_credential', delete_temporary_credential) + authorization_server.register_hook( + 'create_authorization_verifier', create_authorization_verifier) + + +def create_exists_nonce_func(cache, key_prefix='nonce:', expires=86400): + """Create an ``exists_nonce`` function that can be used in hooks and + resource protector. + + :param cache: Cache instance + :param key_prefix: key prefix for temporary credential + :param expires: Expire time for nonce + """ + def exists_nonce(nonce, timestamp, client_id, oauth_token): + key = f'{key_prefix}{nonce}-{timestamp}-{client_id}' + if oauth_token: + key = f'{key}-{oauth_token}' + rv = cache.has(key) + cache.set(key, 1, timeout=expires) + return rv + return exists_nonce + + +def register_nonce_hooks( + authorization_server, cache, key_prefix='nonce:', expires=86400): + """Register nonce related hooks to authorization server. + + :param authorization_server: AuthorizationServer instance + :param cache: Cache instance + :param key_prefix: key prefix for temporary credential + :param expires: Expire time for nonce + """ + exists_nonce = create_exists_nonce_func(cache, key_prefix, expires) + authorization_server.register_hook('exists_nonce', exists_nonce) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/resource_protector.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/resource_protector.py new file mode 100644 index 0000000..c941eb4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth1/resource_protector.py @@ -0,0 +1,113 @@ +import functools +from flask import g, json, Response +from flask import request as _req +from werkzeug.local import LocalProxy +from authlib.consts import default_json_headers +from authlib.oauth1 import ResourceProtector as _ResourceProtector +from authlib.oauth1.errors import OAuth1Error + + +class ResourceProtector(_ResourceProtector): + """A protecting method for resource servers. Initialize a resource + protector with the these method: + + 1. query_client + 2. query_token, + 3. exists_nonce + + Usually, a ``query_client`` method would look like (if using SQLAlchemy):: + + def query_client(client_id): + return Client.query.filter_by(client_id=client_id).first() + + A ``query_token`` method accept two parameters, ``client_id`` and ``oauth_token``:: + + def query_token(client_id, oauth_token): + return Token.query.filter_by(client_id=client_id, oauth_token=oauth_token).first() + + And for ``exists_nonce``, if using cache, we have a built-in hook to create this method:: + + from authlib.integrations.flask_oauth1 import create_exists_nonce_func + + exists_nonce = create_exists_nonce_func(cache) + + Then initialize the resource protector with those methods:: + + require_oauth = ResourceProtector( + app, query_client=query_client, + query_token=query_token, exists_nonce=exists_nonce, + ) + """ + def __init__(self, app=None, query_client=None, + query_token=None, exists_nonce=None): + self.query_client = query_client + self.query_token = query_token + self._exists_nonce = exists_nonce + + self.app = app + if app: + self.init_app(app) + + def init_app(self, app, query_client=None, query_token=None, + exists_nonce=None): + if query_client is not None: + self.query_client = query_client + if query_token is not None: + self.query_token = query_token + if exists_nonce is not None: + self._exists_nonce = exists_nonce + + methods = app.config.get('OAUTH1_SUPPORTED_SIGNATURE_METHODS') + if methods and isinstance(methods, (list, tuple)): + self.SUPPORTED_SIGNATURE_METHODS = methods + + self.app = app + + def get_client_by_id(self, client_id): + return self.query_client(client_id) + + def get_token_credential(self, request): + return self.query_token(request.client_id, request.token) + + def exists_nonce(self, nonce, request): + if not self._exists_nonce: + raise RuntimeError('"exists_nonce" function is required.') + + timestamp = request.timestamp + client_id = request.client_id + token = request.token + return self._exists_nonce(nonce, timestamp, client_id, token) + + def acquire_credential(self): + req = self.validate_request( + _req.method, + _req.url, + _req.form.to_dict(flat=True), + _req.headers + ) + g.authlib_server_oauth1_credential = req.credential + return req.credential + + def __call__(self, scope=None): + def wrapper(f): + @functools.wraps(f) + def decorated(*args, **kwargs): + try: + self.acquire_credential() + except OAuth1Error as error: + body = dict(error.get_body()) + return Response( + json.dumps(body), + status=error.status_code, + headers=default_json_headers, + ) + return f(*args, **kwargs) + return decorated + return wrapper + + +def _get_current_credential(): + return g.get('authlib_server_oauth1_credential') + + +current_credential = LocalProxy(_get_current_credential) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__init__.py new file mode 100644 index 0000000..170a719 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__init__.py @@ -0,0 +1,12 @@ +# flake8: noqa + +from .authorization_server import AuthorizationServer +from .resource_protector import ( + ResourceProtector, + current_token, +) +from .signals import ( + client_authenticated, + token_authenticated, + token_revoked, +) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..24985db Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/authorization_server.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/authorization_server.cpython-312.pyc new file mode 100644 index 0000000..58cb499 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/authorization_server.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..27922ba Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/requests.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/requests.cpython-312.pyc new file mode 100644 index 0000000..8e945e4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/requests.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/resource_protector.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/resource_protector.cpython-312.pyc new file mode 100644 index 0000000..47f9cec Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/resource_protector.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/signals.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/signals.cpython-312.pyc new file mode 100644 index 0000000..b0f7574 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/__pycache__/signals.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/authorization_server.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/authorization_server.py new file mode 100644 index 0000000..14510b2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/authorization_server.py @@ -0,0 +1,159 @@ +from werkzeug.utils import import_string +from flask import Response, json +from flask import request as flask_req +from authlib.oauth2 import ( + AuthorizationServer as _AuthorizationServer, +) +from authlib.oauth2.rfc6750 import BearerTokenGenerator +from authlib.common.security import generate_token +from .requests import FlaskOAuth2Request, FlaskJsonRequest +from .signals import client_authenticated, token_revoked + + +class AuthorizationServer(_AuthorizationServer): + """Flask implementation of :class:`authlib.oauth2.rfc6749.AuthorizationServer`. + Initialize it with ``query_client``, ``save_token`` methods and Flask + app instance:: + + def query_client(client_id): + return Client.query.filter_by(client_id=client_id).first() + + def save_token(token, request): + if request.user: + user_id = request.user.id + else: + user_id = None + client = request.client + tok = Token( + client_id=client.client_id, + user_id=user.id, + **token + ) + db.session.add(tok) + db.session.commit() + + server = AuthorizationServer(app, query_client, save_token) + # or initialize lazily + server = AuthorizationServer() + server.init_app(app, query_client, save_token) + """ + + def __init__(self, app=None, query_client=None, save_token=None): + super().__init__() + self._query_client = query_client + self._save_token = save_token + self._error_uris = None + if app is not None: + self.init_app(app) + + def init_app(self, app, query_client=None, save_token=None): + """Initialize later with Flask app instance.""" + if query_client is not None: + self._query_client = query_client + if save_token is not None: + self._save_token = save_token + + self.register_token_generator('default', self.create_bearer_token_generator(app.config)) + self.scopes_supported = app.config.get('OAUTH2_SCOPES_SUPPORTED') + self._error_uris = app.config.get('OAUTH2_ERROR_URIS') + + def query_client(self, client_id): + return self._query_client(client_id) + + def save_token(self, token, request): + return self._save_token(token, request) + + def get_error_uri(self, request, error): + if self._error_uris: + uris = dict(self._error_uris) + return uris.get(error.error) + + def create_oauth2_request(self, request): + return FlaskOAuth2Request(flask_req) + + def create_json_request(self, request): + return FlaskJsonRequest(flask_req) + + def handle_response(self, status_code, payload, headers): + if isinstance(payload, dict): + payload = json.dumps(payload) + return Response(payload, status=status_code, headers=headers) + + def send_signal(self, name, *args, **kwargs): + if name == 'after_authenticate_client': + client_authenticated.send(self, *args, **kwargs) + elif name == 'after_revoke_token': + token_revoked.send(self, *args, **kwargs) + + def create_bearer_token_generator(self, config): + """Create a generator function for generating ``token`` value. This + method will create a Bearer Token generator with + :class:`authlib.oauth2.rfc6750.BearerToken`. + + Configurable settings: + + 1. OAUTH2_ACCESS_TOKEN_GENERATOR: Boolean or import string, default is True. + 2. OAUTH2_REFRESH_TOKEN_GENERATOR: Boolean or import string, default is False. + 3. OAUTH2_TOKEN_EXPIRES_IN: Dict or import string, default is None. + + By default, it will not generate ``refresh_token``, which can be turn on by + configure ``OAUTH2_REFRESH_TOKEN_GENERATOR``. + + Here are some examples of the token generator:: + + OAUTH2_ACCESS_TOKEN_GENERATOR = 'your_project.generators.gen_token' + + # and in module `your_project.generators`, you can define: + + def gen_token(client, grant_type, user, scope): + # generate token according to these parameters + token = create_random_token() + return f'{client.id}-{user.id}-{token}' + + Here is an example of ``OAUTH2_TOKEN_EXPIRES_IN``:: + + OAUTH2_TOKEN_EXPIRES_IN = { + 'authorization_code': 864000, + 'urn:ietf:params:oauth:grant-type:jwt-bearer': 3600, + } + """ + conf = config.get('OAUTH2_ACCESS_TOKEN_GENERATOR', True) + access_token_generator = create_token_generator(conf, 42) + + conf = config.get('OAUTH2_REFRESH_TOKEN_GENERATOR', False) + refresh_token_generator = create_token_generator(conf, 48) + + expires_conf = config.get('OAUTH2_TOKEN_EXPIRES_IN') + expires_generator = create_token_expires_in_generator(expires_conf) + return BearerTokenGenerator( + access_token_generator, + refresh_token_generator, + expires_generator + ) + + +def create_token_expires_in_generator(expires_in_conf=None): + if isinstance(expires_in_conf, str): + return import_string(expires_in_conf) + + data = {} + data.update(BearerTokenGenerator.GRANT_TYPES_EXPIRES_IN) + if isinstance(expires_in_conf, dict): + data.update(expires_in_conf) + + def expires_in(client, grant_type): + return data.get(grant_type, BearerTokenGenerator.DEFAULT_EXPIRES_IN) + + return expires_in + + +def create_token_generator(token_generator_conf, length=42): + if callable(token_generator_conf): + return token_generator_conf + + if isinstance(token_generator_conf, str): + return import_string(token_generator_conf) + elif token_generator_conf is True: + def token_generator(*args, **kwargs): + return generate_token(length) + return token_generator diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/errors.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/errors.py new file mode 100644 index 0000000..a771a1c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/errors.py @@ -0,0 +1,39 @@ +import importlib.metadata + +import werkzeug +from werkzeug.exceptions import HTTPException + +_version = importlib.metadata.version('werkzeug').split('.')[0] + +if _version in ('0', '1'): + class _HTTPException(HTTPException): + def __init__(self, code, body, headers, response=None): + super().__init__(None, response) + self.code = code + + self.body = body + self.headers = headers + + def get_body(self, environ=None): + return self.body + + def get_headers(self, environ=None): + return self.headers +else: + class _HTTPException(HTTPException): + def __init__(self, code, body, headers, response=None): + super().__init__(None, response) + self.code = code + + self.body = body + self.headers = headers + + def get_body(self, environ=None, scope=None): + return self.body + + def get_headers(self, environ=None, scope=None): + return self.headers + + +def raise_http_exception(status, body, headers): + raise _HTTPException(status, body, headers) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/requests.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/requests.py new file mode 100644 index 0000000..255c9ee --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/requests.py @@ -0,0 +1,40 @@ +from collections import defaultdict +from functools import cached_property + +from flask.wrappers import Request +from authlib.oauth2.rfc6749 import OAuth2Request, JsonRequest + + +class FlaskOAuth2Request(OAuth2Request): + def __init__(self, request: Request): + super().__init__(request.method, request.url, None, request.headers) + self._request = request + + @property + def args(self): + return self._request.args + + @property + def form(self): + return self._request.form + + @property + def data(self): + return self._request.values + + @cached_property + def datalist(self): + values = defaultdict(list) + for k in self.data: + values[k].extend(self.data.getlist(k)) + return values + + +class FlaskJsonRequest(JsonRequest): + def __init__(self, request: Request): + super().__init__(request.method, request.url, None, request.headers) + self._request = request + + @property + def data(self): + return self._request.get_json() diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/resource_protector.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/resource_protector.py new file mode 100644 index 0000000..be2b3fa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/resource_protector.py @@ -0,0 +1,114 @@ +import functools +from contextlib import contextmanager +from flask import g, json +from flask import request as _req +from werkzeug.local import LocalProxy +from authlib.oauth2 import ( + OAuth2Error, + ResourceProtector as _ResourceProtector +) +from authlib.oauth2.rfc6749 import ( + MissingAuthorizationError, +) +from .requests import FlaskJsonRequest +from .signals import token_authenticated +from .errors import raise_http_exception + + +class ResourceProtector(_ResourceProtector): + """A protecting method for resource servers. Creating a ``require_oauth`` + decorator easily with ResourceProtector:: + + from authlib.integrations.flask_oauth2 import ResourceProtector + + require_oauth = ResourceProtector() + + # add bearer token validator + from authlib.oauth2.rfc6750 import BearerTokenValidator + from project.models import Token + + class MyBearerTokenValidator(BearerTokenValidator): + def authenticate_token(self, token_string): + return Token.query.filter_by(access_token=token_string).first() + + require_oauth.register_token_validator(MyBearerTokenValidator()) + + # protect resource with require_oauth + + @app.route('/user') + @require_oauth(['profile']) + def user_profile(): + user = User.get(current_token.user_id) + return jsonify(user.to_dict()) + + """ + def raise_error_response(self, error): + """Raise HTTPException for OAuth2Error. Developers can re-implement + this method to customize the error response. + + :param error: OAuth2Error + :raise: HTTPException + """ + status = error.status_code + body = json.dumps(dict(error.get_body())) + headers = error.get_headers() + raise_http_exception(status, body, headers) + + def acquire_token(self, scopes=None, **kwargs): + """A method to acquire current valid token with the given scope. + + :param scopes: a list of scope values + :return: token object + """ + request = FlaskJsonRequest(_req) + # backward compatibility + kwargs['scopes'] = scopes + for claim in kwargs: + if isinstance(kwargs[claim], str): + kwargs[claim] = [kwargs[claim]] + token = self.validate_request(request=request, **kwargs) + token_authenticated.send(self, token=token) + g.authlib_server_oauth2_token = token + return token + + @contextmanager + def acquire(self, scopes=None): + """The with statement of ``require_oauth``. Instead of using a + decorator, you can use a with statement instead:: + + @app.route('/api/user') + def user_api(): + with require_oauth.acquire('profile') as token: + user = User.get(token.user_id) + return jsonify(user.to_dict()) + """ + try: + yield self.acquire_token(scopes) + except OAuth2Error as error: + self.raise_error_response(error) + + def __call__(self, scopes=None, optional=False, **kwargs): + claims = kwargs + # backward compatibility + claims['scopes'] = scopes + def wrapper(f): + @functools.wraps(f) + def decorated(*args, **kwargs): + try: + self.acquire_token(**claims) + except MissingAuthorizationError as error: + if optional: + return f(*args, **kwargs) + self.raise_error_response(error) + except OAuth2Error as error: + self.raise_error_response(error) + return f(*args, **kwargs) + return decorated + return wrapper + + +def _get_current_token(): + return g.get('authlib_server_oauth2_token') + + +current_token = LocalProxy(_get_current_token) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/signals.py b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/signals.py new file mode 100644 index 0000000..c61e011 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/flask_oauth2/signals.py @@ -0,0 +1,12 @@ +from flask.signals import Namespace + +_signal = Namespace() + +#: signal when client is authenticated +client_authenticated = _signal.signal('client_authenticated') + +#: signal when token is revoked +token_revoked = _signal.signal('token_revoked') + +#: signal when token is authenticated +token_authenticated = _signal.signal('token_authenticated') diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__init__.py new file mode 100644 index 0000000..3b5437c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__init__.py @@ -0,0 +1,25 @@ +from authlib.oauth1 import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_RSA_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_QUERY, + SIGNATURE_TYPE_BODY, +) +from .oauth1_client import OAuth1Auth, AsyncOAuth1Client, OAuth1Client +from .oauth2_client import ( + OAuth2Auth, OAuth2Client, OAuth2ClientAuth, + AsyncOAuth2Client, +) +from .assertion_client import AssertionClient, AsyncAssertionClient +from ..base_client import OAuthError + + +__all__ = [ + 'OAuthError', + 'OAuth1Auth', 'AsyncOAuth1Client', + 'SIGNATURE_HMAC_SHA1', 'SIGNATURE_RSA_SHA1', 'SIGNATURE_PLAINTEXT', + 'SIGNATURE_TYPE_HEADER', 'SIGNATURE_TYPE_QUERY', 'SIGNATURE_TYPE_BODY', + 'OAuth2Auth', 'OAuth2ClientAuth', 'OAuth2Client', 'AsyncOAuth2Client', + 'AssertionClient', 'AsyncAssertionClient', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..ad152f4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/assertion_client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/assertion_client.cpython-312.pyc new file mode 100644 index 0000000..066f7b3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/assertion_client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/oauth1_client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/oauth1_client.cpython-312.pyc new file mode 100644 index 0000000..1e049af Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/oauth1_client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/oauth2_client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/oauth2_client.cpython-312.pyc new file mode 100644 index 0000000..571cdd5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/oauth2_client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..708fec7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/__pycache__/utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/assertion_client.py b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/assertion_client.py new file mode 100644 index 0000000..83dc58b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/assertion_client.py @@ -0,0 +1,81 @@ +import httpx +from httpx import Response, USE_CLIENT_DEFAULT +from authlib.oauth2.rfc7521 import AssertionClient as _AssertionClient +from authlib.oauth2.rfc7523 import JWTBearerGrant +from .utils import extract_client_kwargs +from .oauth2_client import OAuth2Auth +from ..base_client import OAuthError + +__all__ = ['AsyncAssertionClient'] + + +class AsyncAssertionClient(_AssertionClient, httpx.AsyncClient): + token_auth_class = OAuth2Auth + oauth_error_class = OAuthError + JWT_BEARER_GRANT_TYPE = JWTBearerGrant.GRANT_TYPE + ASSERTION_METHODS = { + JWT_BEARER_GRANT_TYPE: JWTBearerGrant.sign, + } + DEFAULT_GRANT_TYPE = JWT_BEARER_GRANT_TYPE + + def __init__(self, token_endpoint, issuer, subject, audience=None, grant_type=None, + claims=None, token_placement='header', scope=None, **kwargs): + + client_kwargs = extract_client_kwargs(kwargs) + httpx.AsyncClient.__init__(self, **client_kwargs) + + _AssertionClient.__init__( + self, session=None, + token_endpoint=token_endpoint, issuer=issuer, subject=subject, + audience=audience, grant_type=grant_type, claims=claims, + token_placement=token_placement, scope=scope, **kwargs + ) + + async def request(self, method, url, withhold_token=False, auth=USE_CLIENT_DEFAULT, **kwargs) -> Response: + """Send request with auto refresh token feature.""" + if not withhold_token and auth is USE_CLIENT_DEFAULT: + if not self.token or self.token.is_expired(): + await self.refresh_token() + + auth = self.token_auth + return await super().request( + method, url, auth=auth, **kwargs) + + async def _refresh_token(self, data): + resp = await self.request( + 'POST', self.token_endpoint, data=data, withhold_token=True) + + return self.parse_response_token(resp) + + +class AssertionClient(_AssertionClient, httpx.Client): + token_auth_class = OAuth2Auth + oauth_error_class = OAuthError + JWT_BEARER_GRANT_TYPE = JWTBearerGrant.GRANT_TYPE + ASSERTION_METHODS = { + JWT_BEARER_GRANT_TYPE: JWTBearerGrant.sign, + } + DEFAULT_GRANT_TYPE = JWT_BEARER_GRANT_TYPE + + def __init__(self, token_endpoint, issuer, subject, audience=None, grant_type=None, + claims=None, token_placement='header', scope=None, **kwargs): + + client_kwargs = extract_client_kwargs(kwargs) + httpx.Client.__init__(self, **client_kwargs) + + _AssertionClient.__init__( + self, session=self, + token_endpoint=token_endpoint, issuer=issuer, subject=subject, + audience=audience, grant_type=grant_type, claims=claims, + token_placement=token_placement, scope=scope, **kwargs + ) + + def request(self, method, url, withhold_token=False, auth=USE_CLIENT_DEFAULT, **kwargs): + """Send request with auto refresh token feature.""" + if not withhold_token and auth is USE_CLIENT_DEFAULT: + if not self.token or self.token.is_expired(): + self.refresh_token() + + auth = self.token_auth + return super().request( + method, url, auth=auth, **kwargs) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/oauth1_client.py b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/oauth1_client.py new file mode 100644 index 0000000..ce031c9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/oauth1_client.py @@ -0,0 +1,102 @@ +import typing +import httpx +from httpx import Auth, Request, Response +from authlib.oauth1 import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_TYPE_HEADER, +) +from authlib.common.encoding import to_unicode +from authlib.oauth1 import ClientAuth +from authlib.oauth1.client import OAuth1Client as _OAuth1Client +from .utils import build_request, extract_client_kwargs +from ..base_client import OAuthError + + +class OAuth1Auth(Auth, ClientAuth): + """Signs the httpx request using OAuth 1 (RFC5849)""" + requires_request_body = True + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + url, headers, body = self.prepare( + request.method, str(request.url), request.headers, request.content) + headers['Content-Length'] = str(len(body)) + yield build_request(url=url, headers=headers, body=body, initial_request=request) + + +class AsyncOAuth1Client(_OAuth1Client, httpx.AsyncClient): + auth_class = OAuth1Auth + + def __init__(self, client_id, client_secret=None, + token=None, token_secret=None, + redirect_uri=None, rsa_key=None, verifier=None, + signature_method=SIGNATURE_HMAC_SHA1, + signature_type=SIGNATURE_TYPE_HEADER, + force_include_body=False, **kwargs): + + _client_kwargs = extract_client_kwargs(kwargs) + httpx.AsyncClient.__init__(self, **_client_kwargs) + + _OAuth1Client.__init__( + self, None, + client_id=client_id, client_secret=client_secret, + token=token, token_secret=token_secret, + redirect_uri=redirect_uri, rsa_key=rsa_key, verifier=verifier, + signature_method=signature_method, signature_type=signature_type, + force_include_body=force_include_body, **kwargs) + + async def fetch_access_token(self, url, verifier=None, **kwargs): + """Method for fetching an access token from the token endpoint. + + This is the final step in the OAuth 1 workflow. An access token is + obtained using all previously obtained credentials, including the + verifier from the authorization step. + + :param url: Access Token endpoint. + :param verifier: A verifier string to prove authorization was granted. + :param kwargs: Extra parameters to include for fetching access token. + :return: A token dict. + """ + if verifier: + self.auth.verifier = verifier + if not self.auth.verifier: + self.handle_error('missing_verifier', 'Missing "verifier" value') + token = await self._fetch_token(url, **kwargs) + self.auth.verifier = None + return token + + async def _fetch_token(self, url, **kwargs): + resp = await self.post(url, **kwargs) + text = await resp.aread() + token = self.parse_response_token(resp.status_code, to_unicode(text)) + self.token = token + return token + + @staticmethod + def handle_error(error_type, error_description): + raise OAuthError(error_type, error_description) + + +class OAuth1Client(_OAuth1Client, httpx.Client): + auth_class = OAuth1Auth + + def __init__(self, client_id, client_secret=None, + token=None, token_secret=None, + redirect_uri=None, rsa_key=None, verifier=None, + signature_method=SIGNATURE_HMAC_SHA1, + signature_type=SIGNATURE_TYPE_HEADER, + force_include_body=False, **kwargs): + + _client_kwargs = extract_client_kwargs(kwargs) + httpx.Client.__init__(self, **_client_kwargs) + + _OAuth1Client.__init__( + self, self, + client_id=client_id, client_secret=client_secret, + token=token, token_secret=token_secret, + redirect_uri=redirect_uri, rsa_key=rsa_key, verifier=verifier, + signature_method=signature_method, signature_type=signature_type, + force_include_body=force_include_body, **kwargs) + + @staticmethod + def handle_error(error_type, error_description): + raise OAuthError(error_type, error_description) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/oauth2_client.py b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/oauth2_client.py new file mode 100644 index 0000000..5b2d3fd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/oauth2_client.py @@ -0,0 +1,220 @@ +import typing +from contextlib import asynccontextmanager + +import httpx +from httpx import Auth, Request, Response, USE_CLIENT_DEFAULT +from anyio import Lock # Import after httpx so import errors refer to httpx +from authlib.common.urls import url_decode +from authlib.oauth2.client import OAuth2Client as _OAuth2Client +from authlib.oauth2.auth import ClientAuth, TokenAuth +from .utils import HTTPX_CLIENT_KWARGS, build_request +from ..base_client import ( + OAuthError, + InvalidTokenError, + MissingTokenError, + UnsupportedTokenTypeError, +) + +__all__ = [ + 'OAuth2Auth', 'OAuth2ClientAuth', + 'AsyncOAuth2Client', 'OAuth2Client', +] + + +class OAuth2Auth(Auth, TokenAuth): + """Sign requests for OAuth 2.0, currently only bearer token is supported.""" + requires_request_body = True + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + try: + url, headers, body = self.prepare( + str(request.url), request.headers, request.content) + headers['Content-Length'] = str(len(body)) + yield build_request(url=url, headers=headers, body=body, initial_request=request) + except KeyError as error: + description = f'Unsupported token_type: {str(error)}' + raise UnsupportedTokenTypeError(description=description) + + +class OAuth2ClientAuth(Auth, ClientAuth): + requires_request_body = True + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + url, headers, body = self.prepare( + request.method, str(request.url), request.headers, request.content) + headers['Content-Length'] = str(len(body)) + yield build_request(url=url, headers=headers, body=body, initial_request=request) + + +class AsyncOAuth2Client(_OAuth2Client, httpx.AsyncClient): + SESSION_REQUEST_PARAMS = HTTPX_CLIENT_KWARGS + + client_auth_class = OAuth2ClientAuth + token_auth_class = OAuth2Auth + oauth_error_class = OAuthError + + def __init__(self, client_id=None, client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, redirect_uri=None, + token=None, token_placement='header', + update_token=None, leeway=60, **kwargs): + + # extract httpx.Client kwargs + client_kwargs = self._extract_session_request_params(kwargs) + httpx.AsyncClient.__init__(self, **client_kwargs) + + # We use a Lock to synchronize coroutines to prevent + # multiple concurrent attempts to refresh the same token + self._token_refresh_lock = Lock() + + _OAuth2Client.__init__( + self, session=None, + client_id=client_id, client_secret=client_secret, + token_endpoint_auth_method=token_endpoint_auth_method, + revocation_endpoint_auth_method=revocation_endpoint_auth_method, + scope=scope, redirect_uri=redirect_uri, + token=token, token_placement=token_placement, + update_token=update_token, leeway=leeway, **kwargs + ) + + async def request(self, method, url, withhold_token=False, auth=USE_CLIENT_DEFAULT, **kwargs): + if not withhold_token and auth is USE_CLIENT_DEFAULT: + if not self.token: + raise MissingTokenError() + + await self.ensure_active_token(self.token) + + auth = self.token_auth + + return await super().request( + method, url, auth=auth, **kwargs) + + @asynccontextmanager + async def stream(self, method, url, withhold_token=False, auth=USE_CLIENT_DEFAULT, **kwargs): + if not withhold_token and auth is USE_CLIENT_DEFAULT: + if not self.token: + raise MissingTokenError() + + await self.ensure_active_token(self.token) + + auth = self.token_auth + + async with super().stream( + method, url, auth=auth, **kwargs) as resp: + yield resp + + async def ensure_active_token(self, token): + async with self._token_refresh_lock: + if self.token.is_expired(leeway=self.leeway): + refresh_token = token.get('refresh_token') + url = self.metadata.get('token_endpoint') + if refresh_token and url: + await self.refresh_token(url, refresh_token=refresh_token) + elif self.metadata.get('grant_type') == 'client_credentials': + access_token = token['access_token'] + new_token = await self.fetch_token(url, grant_type='client_credentials') + if self.update_token: + await self.update_token(new_token, access_token=access_token) + else: + raise InvalidTokenError() + + async def _fetch_token(self, url, body='', headers=None, auth=USE_CLIENT_DEFAULT, + method='POST', **kwargs): + if method.upper() == 'POST': + resp = await self.post( + url, data=dict(url_decode(body)), headers=headers, + auth=auth, **kwargs) + else: + if '?' in url: + url = '&'.join([url, body]) + else: + url = '?'.join([url, body]) + resp = await self.get(url, headers=headers, auth=auth, **kwargs) + + for hook in self.compliance_hook['access_token_response']: + resp = hook(resp) + + return self.parse_response_token(resp) + + async def _refresh_token(self, url, refresh_token=None, body='', + headers=None, auth=USE_CLIENT_DEFAULT, **kwargs): + resp = await self.post( + url, data=dict(url_decode(body)), headers=headers, + auth=auth, **kwargs) + + for hook in self.compliance_hook['refresh_token_response']: + resp = hook(resp) + + token = self.parse_response_token(resp) + if 'refresh_token' not in token: + self.token['refresh_token'] = refresh_token + + if self.update_token: + await self.update_token(self.token, refresh_token=refresh_token) + + return self.token + + def _http_post(self, url, body=None, auth=USE_CLIENT_DEFAULT, headers=None, **kwargs): + return self.post( + url, data=dict(url_decode(body)), + headers=headers, auth=auth, **kwargs) + + +class OAuth2Client(_OAuth2Client, httpx.Client): + SESSION_REQUEST_PARAMS = HTTPX_CLIENT_KWARGS + + client_auth_class = OAuth2ClientAuth + token_auth_class = OAuth2Auth + oauth_error_class = OAuthError + + def __init__(self, client_id=None, client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, redirect_uri=None, + token=None, token_placement='header', + update_token=None, **kwargs): + + # extract httpx.Client kwargs + client_kwargs = self._extract_session_request_params(kwargs) + httpx.Client.__init__(self, **client_kwargs) + + _OAuth2Client.__init__( + self, session=self, + client_id=client_id, client_secret=client_secret, + token_endpoint_auth_method=token_endpoint_auth_method, + revocation_endpoint_auth_method=revocation_endpoint_auth_method, + scope=scope, redirect_uri=redirect_uri, + token=token, token_placement=token_placement, + update_token=update_token, **kwargs + ) + + @staticmethod + def handle_error(error_type, error_description): + raise OAuthError(error_type, error_description) + + def request(self, method, url, withhold_token=False, auth=USE_CLIENT_DEFAULT, **kwargs): + if not withhold_token and auth is USE_CLIENT_DEFAULT: + if not self.token: + raise MissingTokenError() + + if not self.ensure_active_token(self.token): + raise InvalidTokenError() + + auth = self.token_auth + + return super().request( + method, url, auth=auth, **kwargs) + + def stream(self, method, url, withhold_token=False, auth=USE_CLIENT_DEFAULT, **kwargs): + if not withhold_token and auth is USE_CLIENT_DEFAULT: + if not self.token: + raise MissingTokenError() + + if not self.ensure_active_token(self.token): + raise InvalidTokenError() + + auth = self.token_auth + + return super().stream( + method, url, auth=auth, **kwargs) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/utils.py b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/utils.py new file mode 100644 index 0000000..8f19f37 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/httpx_client/utils.py @@ -0,0 +1,30 @@ +from httpx import Request + +HTTPX_CLIENT_KWARGS = [ + 'headers', 'cookies', 'verify', 'cert', 'http1', 'http2', + 'proxies', 'timeout', 'follow_redirects', 'limits', 'max_redirects', + 'event_hooks', 'base_url', 'transport', 'app', 'trust_env', +] + + +def extract_client_kwargs(kwargs): + client_kwargs = {} + for k in HTTPX_CLIENT_KWARGS: + if k in kwargs: + client_kwargs[k] = kwargs.pop(k) + return client_kwargs + + +def build_request(url, headers, body, initial_request: Request) -> Request: + """Make sure that all the data from initial request is passed to the updated object""" + updated_request = Request( + method=initial_request.method, + url=url, + headers=headers, + content=body + ) + + if hasattr(initial_request, 'extensions'): + updated_request.extensions = initial_request.extensions + + return updated_request diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__init__.py new file mode 100644 index 0000000..fcbdec3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__init__.py @@ -0,0 +1,22 @@ +from .oauth1_session import OAuth1Session, OAuth1Auth +from .oauth2_session import OAuth2Session, OAuth2Auth +from .assertion_session import AssertionSession +from ..base_client import OAuthError +from authlib.oauth1 import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_RSA_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_QUERY, + SIGNATURE_TYPE_BODY, +) + + +__all__ = [ + 'OAuthError', + 'OAuth1Session', 'OAuth1Auth', + 'SIGNATURE_HMAC_SHA1', 'SIGNATURE_RSA_SHA1', 'SIGNATURE_PLAINTEXT', + 'SIGNATURE_TYPE_HEADER', 'SIGNATURE_TYPE_QUERY', 'SIGNATURE_TYPE_BODY', + 'OAuth2Session', 'OAuth2Auth', + 'AssertionSession', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6d2afb0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/assertion_session.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/assertion_session.cpython-312.pyc new file mode 100644 index 0000000..c14bac6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/assertion_session.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/oauth1_session.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/oauth1_session.cpython-312.pyc new file mode 100644 index 0000000..b2d1fc7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/oauth1_session.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/oauth2_session.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/oauth2_session.cpython-312.pyc new file mode 100644 index 0000000..562524b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/oauth2_session.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..9edab4b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/__pycache__/utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/assertion_session.py b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/assertion_session.py new file mode 100644 index 0000000..de41dce --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/assertion_session.py @@ -0,0 +1,47 @@ +from requests import Session +from authlib.oauth2.rfc7521 import AssertionClient +from authlib.oauth2.rfc7523 import JWTBearerGrant +from .oauth2_session import OAuth2Auth +from .utils import update_session_configure + + +class AssertionAuth(OAuth2Auth): + def ensure_active_token(self): + if self.client and (not self.token or self.token.is_expired(self.client.leeway)): + return self.client.refresh_token() + + +class AssertionSession(AssertionClient, Session): + """Constructs a new Assertion Framework for OAuth 2.0 Authorization Grants + per RFC7521_. + + .. _RFC7521: https://tools.ietf.org/html/rfc7521 + """ + token_auth_class = AssertionAuth + JWT_BEARER_GRANT_TYPE = JWTBearerGrant.GRANT_TYPE + ASSERTION_METHODS = { + JWT_BEARER_GRANT_TYPE: JWTBearerGrant.sign, + } + DEFAULT_GRANT_TYPE = JWT_BEARER_GRANT_TYPE + + def __init__(self, token_endpoint, issuer, subject, audience=None, grant_type=None, + claims=None, token_placement='header', scope=None, default_timeout=None, + leeway=60, **kwargs): + Session.__init__(self) + self.default_timeout = default_timeout + update_session_configure(self, kwargs) + AssertionClient.__init__( + self, session=self, + token_endpoint=token_endpoint, issuer=issuer, subject=subject, + audience=audience, grant_type=grant_type, claims=claims, + token_placement=token_placement, scope=scope, leeway=leeway, **kwargs + ) + + def request(self, method, url, withhold_token=False, auth=None, **kwargs): + """Send request with auto refresh token feature.""" + if self.default_timeout: + kwargs.setdefault('timeout', self.default_timeout) + if not withhold_token and auth is None: + auth = self.token_auth + return super().request( + method, url, auth=auth, **kwargs) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/oauth1_session.py b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/oauth1_session.py new file mode 100644 index 0000000..8c49fa9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/oauth1_session.py @@ -0,0 +1,59 @@ +from requests import Session +from requests.auth import AuthBase +from authlib.oauth1 import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_TYPE_HEADER, +) +from authlib.common.encoding import to_native +from authlib.oauth1 import ClientAuth +from authlib.oauth1.client import OAuth1Client +from ..base_client import OAuthError +from .utils import update_session_configure + + +class OAuth1Auth(AuthBase, ClientAuth): + """Signs the request using OAuth 1 (RFC5849)""" + + def __call__(self, req): + url, headers, body = self.prepare( + req.method, req.url, req.headers, req.body) + + req.url = to_native(url) + req.prepare_headers(headers) + if body: + req.body = body + return req + + +class OAuth1Session(OAuth1Client, Session): + auth_class = OAuth1Auth + + def __init__(self, client_id, client_secret=None, + token=None, token_secret=None, + redirect_uri=None, rsa_key=None, verifier=None, + signature_method=SIGNATURE_HMAC_SHA1, + signature_type=SIGNATURE_TYPE_HEADER, + force_include_body=False, **kwargs): + Session.__init__(self) + update_session_configure(self, kwargs) + OAuth1Client.__init__( + self, session=self, + client_id=client_id, client_secret=client_secret, + token=token, token_secret=token_secret, + redirect_uri=redirect_uri, rsa_key=rsa_key, verifier=verifier, + signature_method=signature_method, signature_type=signature_type, + force_include_body=force_include_body, **kwargs) + + def rebuild_auth(self, prepared_request, response): + """When being redirected we should always strip Authorization + header, since nonce may not be reused as per OAuth spec. + """ + if 'Authorization' in prepared_request.headers: + # If we get redirected to a new host, we should strip out + # any authentication headers. + prepared_request.headers.pop('Authorization', True) + prepared_request.prepare_auth(self.auth) + + @staticmethod + def handle_error(error_type, error_description): + raise OAuthError(error_type, error_description) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/oauth2_session.py b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/oauth2_session.py new file mode 100644 index 0000000..9358656 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/oauth2_session.py @@ -0,0 +1,113 @@ +from requests import Session +from requests.auth import AuthBase +from authlib.oauth2.client import OAuth2Client +from authlib.oauth2.auth import ClientAuth, TokenAuth +from ..base_client import ( + OAuthError, + InvalidTokenError, + MissingTokenError, + UnsupportedTokenTypeError, +) +from .utils import update_session_configure + +__all__ = ['OAuth2Session', 'OAuth2Auth'] + + +class OAuth2Auth(AuthBase, TokenAuth): + """Sign requests for OAuth 2.0, currently only bearer token is supported.""" + + def ensure_active_token(self): + if self.client and not self.client.ensure_active_token(self.token): + raise InvalidTokenError() + + def __call__(self, req): + self.ensure_active_token() + try: + req.url, req.headers, req.body = self.prepare( + req.url, req.headers, req.body) + except KeyError as error: + description = f'Unsupported token_type: {str(error)}' + raise UnsupportedTokenTypeError(description=description) + return req + + +class OAuth2ClientAuth(AuthBase, ClientAuth): + """Attaches OAuth Client Authentication to the given Request object. + """ + def __call__(self, req): + req.url, req.headers, req.body = self.prepare( + req.method, req.url, req.headers, req.body + ) + return req + + +class OAuth2Session(OAuth2Client, Session): + """Construct a new OAuth 2 client requests session. + + :param client_id: Client ID, which you get from client registration. + :param client_secret: Client Secret, which you get from registration. + :param authorization_endpoint: URL of the authorization server's + authorization endpoint. + :param token_endpoint: URL of the authorization server's token endpoint. + :param token_endpoint_auth_method: client authentication method for + token endpoint. + :param revocation_endpoint: URL of the authorization server's OAuth 2.0 + revocation endpoint. + :param revocation_endpoint_auth_method: client authentication method for + revocation endpoint. + :param scope: Scope that you needed to access user resources. + :param state: Shared secret to prevent CSRF attack. + :param redirect_uri: Redirect URI you registered as callback. + :param token: A dict of token attributes such as ``access_token``, + ``token_type`` and ``expires_at``. + :param token_placement: The place to put token in HTTP request. Available + values: "header", "body", "uri". + :param update_token: A function for you to update token. It accept a + :class:`OAuth2Token` as parameter. + :param leeway: Time window in seconds before the actual expiration of the + authentication token, that the token is considered expired and will + be refreshed. + :param default_timeout: If settled, every requests will have a default timeout. + """ + client_auth_class = OAuth2ClientAuth + token_auth_class = OAuth2Auth + oauth_error_class = OAuthError + SESSION_REQUEST_PARAMS = ( + 'allow_redirects', 'timeout', 'cookies', 'files', + 'proxies', 'hooks', 'stream', 'verify', 'cert', 'json' + ) + + def __init__(self, client_id=None, client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, state=None, redirect_uri=None, + token=None, token_placement='header', + update_token=None, leeway=60, default_timeout=None, **kwargs): + Session.__init__(self) + self.default_timeout = default_timeout + update_session_configure(self, kwargs) + + OAuth2Client.__init__( + self, session=self, + client_id=client_id, client_secret=client_secret, + token_endpoint_auth_method=token_endpoint_auth_method, + revocation_endpoint_auth_method=revocation_endpoint_auth_method, + scope=scope, state=state, redirect_uri=redirect_uri, + token=token, token_placement=token_placement, + update_token=update_token, leeway=leeway, **kwargs + ) + + def fetch_access_token(self, url=None, **kwargs): + """Alias for fetch_token.""" + return self.fetch_token(url, **kwargs) + + def request(self, method, url, withhold_token=False, auth=None, **kwargs): + """Send request with auto refresh token feature (if available).""" + if self.default_timeout: + kwargs.setdefault('timeout', self.default_timeout) + if not withhold_token and auth is None: + if not self.token: + raise MissingTokenError() + auth = self.token_auth + return super().request( + method, url, auth=auth, **kwargs) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/utils.py b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/utils.py new file mode 100644 index 0000000..53a07db --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/requests_client/utils.py @@ -0,0 +1,10 @@ +REQUESTS_SESSION_KWARGS = [ + 'proxies', 'hooks', 'stream', 'verify', 'cert', + 'max_redirects', 'trust_env', +] + + +def update_session_configure(session, kwargs): + for k in REQUESTS_SESSION_KWARGS: + if k in kwargs: + setattr(session, k, kwargs.pop(k)) diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__init__.py new file mode 100644 index 0000000..1964aa1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__init__.py @@ -0,0 +1,17 @@ +from .client_mixin import OAuth2ClientMixin +from .tokens_mixins import OAuth2AuthorizationCodeMixin, OAuth2TokenMixin +from .functions import ( + create_query_client_func, + create_save_token_func, + create_query_token_func, + create_revocation_endpoint, + create_bearer_token_validator, +) + + +__all__ = [ + 'OAuth2ClientMixin', 'OAuth2AuthorizationCodeMixin', 'OAuth2TokenMixin', + 'create_query_client_func', 'create_save_token_func', + 'create_query_token_func', 'create_revocation_endpoint', + 'create_bearer_token_validator', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..ffb3ce2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/client_mixin.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/client_mixin.cpython-312.pyc new file mode 100644 index 0000000..69dc4dd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/client_mixin.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/functions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/functions.cpython-312.pyc new file mode 100644 index 0000000..b568bfd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/functions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/tokens_mixins.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/tokens_mixins.cpython-312.pyc new file mode 100644 index 0000000..eb4c675 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/__pycache__/tokens_mixins.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/client_mixin.py b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/client_mixin.py new file mode 100644 index 0000000..28505cd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/client_mixin.py @@ -0,0 +1,138 @@ +import secrets + +from sqlalchemy import Column, String, Text, Integer +from authlib.common.encoding import json_loads, json_dumps +from authlib.oauth2.rfc6749 import ClientMixin +from authlib.oauth2.rfc6749 import scope_to_list, list_to_scope + + +class OAuth2ClientMixin(ClientMixin): + client_id = Column(String(48), index=True) + client_secret = Column(String(120)) + client_id_issued_at = Column(Integer, nullable=False, default=0) + client_secret_expires_at = Column(Integer, nullable=False, default=0) + _client_metadata = Column('client_metadata', Text) + + @property + def client_info(self): + """Implementation for Client Info in OAuth 2.0 Dynamic Client + Registration Protocol via `Section 3.2.1`_. + + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc7591#section-3.2.1 + """ + return dict( + client_id=self.client_id, + client_secret=self.client_secret, + client_id_issued_at=self.client_id_issued_at, + client_secret_expires_at=self.client_secret_expires_at, + ) + + @property + def client_metadata(self): + if 'client_metadata' in self.__dict__: + return self.__dict__['client_metadata'] + if self._client_metadata: + data = json_loads(self._client_metadata) + self.__dict__['client_metadata'] = data + return data + return {} + + def set_client_metadata(self, value): + self._client_metadata = json_dumps(value) + if 'client_metadata' in self.__dict__: + del self.__dict__['client_metadata'] + + @property + def redirect_uris(self): + return self.client_metadata.get('redirect_uris', []) + + @property + def token_endpoint_auth_method(self): + return self.client_metadata.get( + 'token_endpoint_auth_method', + 'client_secret_basic' + ) + + @property + def grant_types(self): + return self.client_metadata.get('grant_types', []) + + @property + def response_types(self): + return self.client_metadata.get('response_types', []) + + @property + def client_name(self): + return self.client_metadata.get('client_name') + + @property + def client_uri(self): + return self.client_metadata.get('client_uri') + + @property + def logo_uri(self): + return self.client_metadata.get('logo_uri') + + @property + def scope(self): + return self.client_metadata.get('scope', '') + + @property + def contacts(self): + return self.client_metadata.get('contacts', []) + + @property + def tos_uri(self): + return self.client_metadata.get('tos_uri') + + @property + def policy_uri(self): + return self.client_metadata.get('policy_uri') + + @property + def jwks_uri(self): + return self.client_metadata.get('jwks_uri') + + @property + def jwks(self): + return self.client_metadata.get('jwks', []) + + @property + def software_id(self): + return self.client_metadata.get('software_id') + + @property + def software_version(self): + return self.client_metadata.get('software_version') + + def get_client_id(self): + return self.client_id + + def get_default_redirect_uri(self): + if self.redirect_uris: + return self.redirect_uris[0] + + def get_allowed_scope(self, scope): + if not scope: + return '' + allowed = set(self.scope.split()) + scopes = scope_to_list(scope) + return list_to_scope([s for s in scopes if s in allowed]) + + def check_redirect_uri(self, redirect_uri): + return redirect_uri in self.redirect_uris + + def check_client_secret(self, client_secret): + return secrets.compare_digest(self.client_secret, client_secret) + + def check_endpoint_auth_method(self, method, endpoint): + if endpoint == 'token': + return self.token_endpoint_auth_method == method + # TODO + return True + + def check_response_type(self, response_type): + return response_type in self.response_types + + def check_grant_type(self, grant_type): + return grant_type in self.grant_types diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/functions.py b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/functions.py new file mode 100644 index 0000000..74f1071 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/functions.py @@ -0,0 +1,101 @@ +import time + + +def create_query_client_func(session, client_model): + """Create an ``query_client`` function that can be used in authorization + server. + + :param session: SQLAlchemy session + :param client_model: Client model class + """ + def query_client(client_id): + q = session.query(client_model) + return q.filter_by(client_id=client_id).first() + return query_client + + +def create_save_token_func(session, token_model): + """Create an ``save_token`` function that can be used in authorization + server. + + :param session: SQLAlchemy session + :param token_model: Token model class + """ + def save_token(token, request): + if request.user: + user_id = request.user.get_user_id() + else: + user_id = None + client = request.client + item = token_model( + client_id=client.client_id, + user_id=user_id, + **token + ) + session.add(item) + session.commit() + return save_token + + +def create_query_token_func(session, token_model): + """Create an ``query_token`` function for revocation, introspection + token endpoints. + + :param session: SQLAlchemy session + :param token_model: Token model class + """ + def query_token(token, token_type_hint): + q = session.query(token_model) + if token_type_hint == 'access_token': + return q.filter_by(access_token=token).first() + elif token_type_hint == 'refresh_token': + return q.filter_by(refresh_token=token).first() + # without token_type_hint + item = q.filter_by(access_token=token).first() + if item: + return item + return q.filter_by(refresh_token=token).first() + return query_token + + +def create_revocation_endpoint(session, token_model): + """Create a revocation endpoint class with SQLAlchemy session + and token model. + + :param session: SQLAlchemy session + :param token_model: Token model class + """ + from authlib.oauth2.rfc7009 import RevocationEndpoint + query_token = create_query_token_func(session, token_model) + + class _RevocationEndpoint(RevocationEndpoint): + def query_token(self, token, token_type_hint): + return query_token(token, token_type_hint) + + def revoke_token(self, token, request): + now = int(time.time()) + hint = request.form.get('token_type_hint') + token.access_token_revoked_at = now + if hint != 'access_token': + token.refresh_token_revoked_at = now + session.add(token) + session.commit() + + return _RevocationEndpoint + + +def create_bearer_token_validator(session, token_model): + """Create an bearer token validator class with SQLAlchemy session + and token model. + + :param session: SQLAlchemy session + :param token_model: Token model class + """ + from authlib.oauth2.rfc6750 import BearerTokenValidator + + class _BearerTokenValidator(BearerTokenValidator): + def authenticate_token(self, token_string): + q = session.query(token_model) + return q.filter_by(access_token=token_string).first() + + return _BearerTokenValidator diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/tokens_mixins.py b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/tokens_mixins.py new file mode 100644 index 0000000..28cee89 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/sqla_oauth2/tokens_mixins.py @@ -0,0 +1,70 @@ +import time +from sqlalchemy import Column, String, Text, Integer +from authlib.oauth2.rfc6749 import ( + TokenMixin, + AuthorizationCodeMixin, +) + + +class OAuth2AuthorizationCodeMixin(AuthorizationCodeMixin): + code = Column(String(120), unique=True, nullable=False) + client_id = Column(String(48)) + redirect_uri = Column(Text, default='') + response_type = Column(Text, default='') + scope = Column(Text, default='') + nonce = Column(Text) + auth_time = Column( + Integer, nullable=False, + default=lambda: int(time.time()) + ) + + code_challenge = Column(Text) + code_challenge_method = Column(String(48)) + + def is_expired(self): + return self.auth_time + 300 < time.time() + + def get_redirect_uri(self): + return self.redirect_uri + + def get_scope(self): + return self.scope + + def get_auth_time(self): + return self.auth_time + + def get_nonce(self): + return self.nonce + + +class OAuth2TokenMixin(TokenMixin): + client_id = Column(String(48)) + token_type = Column(String(40)) + access_token = Column(String(255), unique=True, nullable=False) + refresh_token = Column(String(255), index=True) + scope = Column(Text, default='') + issued_at = Column( + Integer, nullable=False, default=lambda: int(time.time()) + ) + access_token_revoked_at = Column(Integer, nullable=False, default=0) + refresh_token_revoked_at = Column(Integer, nullable=False, default=0) + expires_in = Column(Integer, nullable=False, default=0) + + def check_client(self, client): + return self.client_id == client.get_client_id() + + def get_scope(self): + return self.scope + + def get_expires_in(self): + return self.expires_in + + def is_revoked(self): + return self.access_token_revoked_at or self.refresh_token_revoked_at + + def is_expired(self): + if not self.expires_in: + return False + + expires_at = self.issued_at + self.expires_in + return expires_at < time.time() diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__init__.py b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__init__.py new file mode 100644 index 0000000..7546c54 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__init__.py @@ -0,0 +1,22 @@ +# flake8: noqa + +from ..base_client import BaseOAuth, OAuthError +from .integration import StarletteIntegration +from .apps import StarletteOAuth1App, StarletteOAuth2App + + +class OAuth(BaseOAuth): + oauth1_client_cls = StarletteOAuth1App + oauth2_client_cls = StarletteOAuth2App + framework_integration_cls = StarletteIntegration + + def __init__(self, config=None, cache=None, fetch_token=None, update_token=None): + super().__init__( + cache=cache, fetch_token=fetch_token, update_token=update_token) + self.config = config + + +__all__ = [ + 'OAuth', 'OAuthError', + 'StarletteIntegration', 'StarletteOAuth1App', 'StarletteOAuth2App', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2b99caf Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__pycache__/apps.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__pycache__/apps.cpython-312.pyc new file mode 100644 index 0000000..233ae12 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__pycache__/apps.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__pycache__/integration.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__pycache__/integration.cpython-312.pyc new file mode 100644 index 0000000..47a6c7e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/__pycache__/integration.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/apps.py b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/apps.py new file mode 100644 index 0000000..114cbaf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/apps.py @@ -0,0 +1,86 @@ +from starlette.datastructures import URL +from starlette.responses import RedirectResponse +from ..base_client import OAuthError +from ..base_client import BaseApp +from ..base_client.async_app import AsyncOAuth1Mixin, AsyncOAuth2Mixin +from ..base_client.async_openid import AsyncOpenIDMixin +from ..httpx_client import AsyncOAuth1Client, AsyncOAuth2Client + + +class StarletteAppMixin: + async def save_authorize_data(self, request, **kwargs): + state = kwargs.pop('state', None) + if state: + if self.framework.cache: + session = None + else: + session = request.session + await self.framework.set_state_data(session, state, kwargs) + else: + raise RuntimeError('Missing state value') + + async def authorize_redirect(self, request, redirect_uri=None, **kwargs): + """Create a HTTP Redirect for Authorization Endpoint. + + :param request: HTTP request instance from Starlette view. + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: A HTTP redirect response. + """ + + # Handle Starlette >= 0.26.0 where redirect_uri may now be a URL and not a string + if redirect_uri and isinstance(redirect_uri, URL): + redirect_uri = str(redirect_uri) + rv = await self.create_authorization_url(redirect_uri, **kwargs) + await self.save_authorize_data(request, redirect_uri=redirect_uri, **rv) + return RedirectResponse(rv['url'], status_code=302) + + +class StarletteOAuth1App(StarletteAppMixin, AsyncOAuth1Mixin, BaseApp): + client_cls = AsyncOAuth1Client + + async def authorize_access_token(self, request, **kwargs): + params = dict(request.query_params) + state = params.get('oauth_token') + if not state: + raise OAuthError(description='Missing "oauth_token" parameter') + + data = await self.framework.get_state_data(request.session, state) + if not data: + raise OAuthError(description='Missing "request_token" in temporary data') + + params['request_token'] = data['request_token'] + params.update(kwargs) + await self.framework.clear_state_data(request.session, state) + return await self.fetch_access_token(**params) + + +class StarletteOAuth2App(StarletteAppMixin, AsyncOAuth2Mixin, AsyncOpenIDMixin, BaseApp): + client_cls = AsyncOAuth2Client + + async def authorize_access_token(self, request, **kwargs): + error = request.query_params.get('error') + if error: + description = request.query_params.get('error_description') + raise OAuthError(error=error, description=description) + + params = { + 'code': request.query_params.get('code'), + 'state': request.query_params.get('state'), + } + + if self.framework.cache: + session = None + else: + session = request.session + + claims_options = kwargs.pop('claims_options', None) + state_data = await self.framework.get_state_data(session, params.get('state')) + await self.framework.clear_state_data(session, params.get('state')) + params = self._format_state_params(state_data, params) + token = await self.fetch_access_token(**params, **kwargs) + + if 'id_token' in token and 'nonce' in state_data: + userinfo = await self.parse_id_token(token, nonce=state_data['nonce'], claims_options=claims_options) + token['userinfo'] = userinfo + return token diff --git a/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/integration.py b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/integration.py new file mode 100644 index 0000000..a92c8e3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/integrations/starlette_client/integration.py @@ -0,0 +1,71 @@ +import json +import time +from typing import ( + Any, + Dict, + Hashable, + Optional, +) + +from ..base_client import FrameworkIntegration + + +class StarletteIntegration(FrameworkIntegration): + async def _get_cache_data(self, key: Hashable): + value = await self.cache.get(key) + if not value: + return None + try: + return json.loads(value) + except (TypeError, ValueError): + return None + + async def get_state_data(self, session: Optional[Dict[str, Any]], state: str) -> Dict[str, Any]: + key = f'_state_{self.name}_{state}' + if self.cache: + value = await self._get_cache_data(key) + elif session is not None: + value = session.get(key) + else: + value = None + + if value: + return value.get('data') + return None + + async def set_state_data(self, session: Optional[Dict[str, Any]], state: str, data: Any): + key_prefix = f'_state_{self.name}_' + key = f'{key_prefix}{state}' + if self.cache: + await self.cache.set(key, json.dumps({'data': data}), self.expires_in) + elif session is not None: + # clear old state data to avoid session size growing + for old_key in list(session.keys()): + if old_key.startswith(key_prefix): + session.pop(old_key) + now = time.time() + session[key] = {'data': data, 'exp': now + self.expires_in} + + async def clear_state_data(self, session: Optional[Dict[str, Any]], state: str): + key = f'_state_{self.name}_{state}' + if self.cache: + await self.cache.delete(key) + elif session is not None: + session.pop(key, None) + self._clear_session_state(session) + + def update_token(self, token, refresh_token=None, access_token=None): + pass + + @staticmethod + def load_config(oauth, name, params): + if not oauth.config: + return {} + + rv = {} + for k in params: + conf_key = f'{name}_{k}'.upper() + v = oauth.config.get(conf_key, default=None) + if v is not None: + rv[k] = v + return rv diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/__init__.py b/myenv/lib/python3.12/site-packages/authlib/jose/__init__.py new file mode 100644 index 0000000..2d6638a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/__init__.py @@ -0,0 +1,60 @@ +""" + authlib.jose + ~~~~~~~~~~~~ + + JOSE implementation in Authlib. Tracking the status of JOSE specs at + https://tools.ietf.org/wg/jose/ +""" +from .rfc7515 import ( + JsonWebSignature, JWSAlgorithm, JWSHeader, JWSObject, +) +from .rfc7516 import ( + JsonWebEncryption, JWEAlgorithm, JWEEncAlgorithm, JWEZipAlgorithm, +) +from .rfc7517 import Key, KeySet, JsonWebKey +from .rfc7518 import ( + register_jws_rfc7518, + register_jwe_rfc7518, + ECDHESAlgorithm, + OctKey, + RSAKey, + ECKey, +) +from .rfc7519 import JsonWebToken, BaseClaims, JWTClaims +from .rfc8037 import OKPKey, register_jws_rfc8037 + +from .errors import JoseError + +# register algorithms +register_jws_rfc7518(JsonWebSignature) +register_jws_rfc8037(JsonWebSignature) + +register_jwe_rfc7518(JsonWebEncryption) + +# attach algorithms +ECDHESAlgorithm.ALLOWED_KEY_CLS = (ECKey, OKPKey) + +# register supported keys +JsonWebKey.JWK_KEY_CLS = { + OctKey.kty: OctKey, + RSAKey.kty: RSAKey, + ECKey.kty: ECKey, + OKPKey.kty: OKPKey, +} + +jwt = JsonWebToken(list(JsonWebSignature.ALGORITHMS_REGISTRY.keys())) + + +__all__ = [ + 'JoseError', + + 'JsonWebSignature', 'JWSAlgorithm', 'JWSHeader', 'JWSObject', + 'JsonWebEncryption', 'JWEAlgorithm', 'JWEEncAlgorithm', 'JWEZipAlgorithm', + + 'JsonWebKey', 'Key', 'KeySet', + + 'OctKey', 'RSAKey', 'ECKey', 'OKPKey', + + 'JsonWebToken', 'BaseClaims', 'JWTClaims', + 'jwt', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..61d3d4c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..021026f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/jwk.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/jwk.cpython-312.pyc new file mode 100644 index 0000000..defedd1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/jwk.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/util.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..a42bde3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/__pycache__/util.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__init__.py b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__init__.py new file mode 100644 index 0000000..3044585 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__init__.py @@ -0,0 +1,17 @@ +from ._jwe_algorithms import JWE_DRAFT_ALG_ALGORITHMS +from ._jwe_enc_cryptography import C20PEncAlgorithm +try: + from ._jwe_enc_cryptodome import XC20PEncAlgorithm +except ImportError: + XC20PEncAlgorithm = None + + +def register_jwe_draft(cls): + for alg in JWE_DRAFT_ALG_ALGORITHMS: + cls.register_algorithm(alg) + + cls.register_algorithm(C20PEncAlgorithm(256)) # C20P + if XC20PEncAlgorithm is not None: + cls.register_algorithm(XC20PEncAlgorithm(256)) # XC20P + +__all__ = ['register_jwe_draft'] diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..5dfae96 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/_jwe_algorithms.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/_jwe_algorithms.cpython-312.pyc new file mode 100644 index 0000000..3b2fb88 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/_jwe_algorithms.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/_jwe_enc_cryptodome.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/_jwe_enc_cryptodome.cpython-312.pyc new file mode 100644 index 0000000..bcb56fb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/_jwe_enc_cryptodome.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/_jwe_enc_cryptography.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/_jwe_enc_cryptography.cpython-312.pyc new file mode 100644 index 0000000..01ebfe5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/__pycache__/_jwe_enc_cryptography.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/drafts/_jwe_algorithms.py b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/_jwe_algorithms.py new file mode 100644 index 0000000..c01b7e7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/_jwe_algorithms.py @@ -0,0 +1,188 @@ +import struct +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.kdf.concatkdf import ConcatKDFHash + +from authlib.jose.errors import InvalidEncryptionAlgorithmForECDH1PUWithKeyWrappingError +from authlib.jose.rfc7516 import JWEAlgorithmWithTagAwareKeyAgreement +from authlib.jose.rfc7518 import AESAlgorithm, CBCHS2EncAlgorithm, ECKey, u32be_len_input +from authlib.jose.rfc8037 import OKPKey + + +class ECDH1PUAlgorithm(JWEAlgorithmWithTagAwareKeyAgreement): + EXTRA_HEADERS = ['epk', 'apu', 'apv', 'skid'] + ALLOWED_KEY_CLS = (ECKey, OKPKey) + + # https://datatracker.ietf.org/doc/html/draft-madden-jose-ecdh-1pu-04 + def __init__(self, key_size=None): + if key_size is None: + self.name = 'ECDH-1PU' + self.description = 'ECDH-1PU in the Direct Key Agreement mode' + else: + self.name = f'ECDH-1PU+A{key_size}KW' + self.description = ( + 'ECDH-1PU using Concat KDF and CEK wrapped ' + 'with A{}KW').format(key_size) + self.key_size = key_size + self.aeskw = AESAlgorithm(key_size) + + def prepare_key(self, raw_data): + if isinstance(raw_data, self.ALLOWED_KEY_CLS): + return raw_data + return ECKey.import_key(raw_data) + + def generate_preset(self, enc_alg, key): + epk = self._generate_ephemeral_key(key) + h = self._prepare_headers(epk) + preset = {'epk': epk, 'header': h} + if self.key_size is not None: + cek = enc_alg.generate_cek() + preset['cek'] = cek + return preset + + def compute_shared_key(self, shared_key_e, shared_key_s): + return shared_key_e + shared_key_s + + def compute_fixed_info(self, headers, bit_size, tag): + if tag is None: + cctag = b'' + else: + cctag = u32be_len_input(tag) + + # AlgorithmID + if self.key_size is None: + alg_id = u32be_len_input(headers['enc']) + else: + alg_id = u32be_len_input(headers['alg']) + + # PartyUInfo + apu_info = u32be_len_input(headers.get('apu'), True) + + # PartyVInfo + apv_info = u32be_len_input(headers.get('apv'), True) + + # SuppPubInfo + pub_info = struct.pack('>I', bit_size) + cctag + + return alg_id + apu_info + apv_info + pub_info + + def compute_derived_key(self, shared_key, fixed_info, bit_size): + ckdf = ConcatKDFHash( + algorithm=hashes.SHA256(), + length=bit_size // 8, + otherinfo=fixed_info, + backend=default_backend() + ) + return ckdf.derive(shared_key) + + def deliver_at_sender(self, sender_static_key, sender_ephemeral_key, recipient_pubkey, headers, bit_size, tag): + shared_key_s = sender_static_key.exchange_shared_key(recipient_pubkey) + shared_key_e = sender_ephemeral_key.exchange_shared_key(recipient_pubkey) + shared_key = self.compute_shared_key(shared_key_e, shared_key_s) + + fixed_info = self.compute_fixed_info(headers, bit_size, tag) + + return self.compute_derived_key(shared_key, fixed_info, bit_size) + + def deliver_at_recipient(self, recipient_key, sender_static_pubkey, sender_ephemeral_pubkey, headers, bit_size, tag): + shared_key_s = recipient_key.exchange_shared_key(sender_static_pubkey) + shared_key_e = recipient_key.exchange_shared_key(sender_ephemeral_pubkey) + shared_key = self.compute_shared_key(shared_key_e, shared_key_s) + + fixed_info = self.compute_fixed_info(headers, bit_size, tag) + + return self.compute_derived_key(shared_key, fixed_info, bit_size) + + def _generate_ephemeral_key(self, key): + return key.generate_key(key['crv'], is_private=True) + + def _prepare_headers(self, epk): + # REQUIRED_JSON_FIELDS contains only public fields + pub_epk = {k: epk[k] for k in epk.REQUIRED_JSON_FIELDS} + pub_epk['kty'] = epk.kty + return {'epk': pub_epk} + + def generate_keys_and_prepare_headers(self, enc_alg, key, sender_key, preset=None): + if not isinstance(enc_alg, CBCHS2EncAlgorithm): + raise InvalidEncryptionAlgorithmForECDH1PUWithKeyWrappingError() + + if preset and 'epk' in preset: + epk = preset['epk'] + h = {} + else: + epk = self._generate_ephemeral_key(key) + h = self._prepare_headers(epk) + + if preset and 'cek' in preset: + cek = preset['cek'] + else: + cek = enc_alg.generate_cek() + + return {'epk': epk, 'cek': cek, 'header': h} + + def _agree_upon_key_at_sender(self, enc_alg, headers, key, sender_key, epk, tag=None): + if self.key_size is None: + bit_size = enc_alg.CEK_SIZE + else: + bit_size = self.key_size + + public_key = key.get_op_key('wrapKey') + + return self.deliver_at_sender(sender_key, epk, public_key, headers, bit_size, tag) + + def _wrap_cek(self, cek, dk): + kek = self.aeskw.prepare_key(dk) + return self.aeskw.wrap_cek(cek, kek) + + def agree_upon_key_and_wrap_cek(self, enc_alg, headers, key, sender_key, epk, cek, tag): + dk = self._agree_upon_key_at_sender(enc_alg, headers, key, sender_key, epk, tag) + return self._wrap_cek(cek, dk) + + def wrap(self, enc_alg, headers, key, sender_key, preset=None): + # In this class this method is used in direct key agreement mode only + if self.key_size is not None: + raise RuntimeError('Invalid algorithm state detected') + + if preset and 'epk' in preset: + epk = preset['epk'] + h = {} + else: + epk = self._generate_ephemeral_key(key) + h = self._prepare_headers(epk) + + dk = self._agree_upon_key_at_sender(enc_alg, headers, key, sender_key, epk) + + return {'ek': b'', 'cek': dk, 'header': h} + + def unwrap(self, enc_alg, ek, headers, key, sender_key, tag=None): + if 'epk' not in headers: + raise ValueError('Missing "epk" in headers') + + if self.key_size is None: + bit_size = enc_alg.CEK_SIZE + else: + bit_size = self.key_size + + sender_pubkey = sender_key.get_op_key('wrapKey') + epk = key.import_key(headers['epk']) + epk_pubkey = epk.get_op_key('wrapKey') + dk = self.deliver_at_recipient(key, sender_pubkey, epk_pubkey, headers, bit_size, tag) + + if self.key_size is None: + return dk + + kek = self.aeskw.prepare_key(dk) + return self.aeskw.unwrap(enc_alg, ek, headers, kek) + + +JWE_DRAFT_ALG_ALGORITHMS = [ + ECDH1PUAlgorithm(None), # ECDH-1PU + ECDH1PUAlgorithm(128), # ECDH-1PU+A128KW + ECDH1PUAlgorithm(192), # ECDH-1PU+A192KW + ECDH1PUAlgorithm(256), # ECDH-1PU+A256KW +] + + +def register_jwe_alg_draft(cls): + for alg in JWE_DRAFT_ALG_ALGORITHMS: + cls.register_algorithm(alg) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/drafts/_jwe_enc_cryptodome.py b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/_jwe_enc_cryptodome.py new file mode 100644 index 0000000..cb6fcea --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/_jwe_enc_cryptodome.py @@ -0,0 +1,52 @@ +""" + authlib.jose.draft + ~~~~~~~~~~~~~~~~~~~~ + + Content Encryption per `Section 4`_. + + .. _`Section 4`: https://datatracker.ietf.org/doc/html/draft-amringer-jose-chacha-02#section-4 +""" +from authlib.jose.rfc7516 import JWEEncAlgorithm +from Cryptodome.Cipher import ChaCha20_Poly1305 as Cryptodome_ChaCha20_Poly1305 + + +class XC20PEncAlgorithm(JWEEncAlgorithm): + # Use of an IV of size 192 bits is REQUIRED with this algorithm. + # https://datatracker.ietf.org/doc/html/draft-amringer-jose-chacha-02#section-4.1 + IV_SIZE = 192 + + def __init__(self, key_size): + self.name = 'XC20P' + self.description = 'XChaCha20-Poly1305' + self.key_size = key_size + self.CEK_SIZE = key_size + + def encrypt(self, msg, aad, iv, key): + """Content Encryption with AEAD_XCHACHA20_POLY1305 + + :param msg: text to be encrypt in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param key: encrypted key in bytes + :return: (ciphertext, tag) + """ + self.check_iv(iv) + chacha = Cryptodome_ChaCha20_Poly1305.new(key=key, nonce=iv) + chacha.update(aad) + ciphertext, tag = chacha.encrypt_and_digest(msg) + return ciphertext, tag + + def decrypt(self, ciphertext, aad, iv, tag, key): + """Content Decryption with AEAD_XCHACHA20_POLY1305 + + :param ciphertext: ciphertext in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param tag: authentication tag in bytes + :param key: encrypted key in bytes + :return: message + """ + self.check_iv(iv) + chacha = Cryptodome_ChaCha20_Poly1305.new(key=key, nonce=iv) + chacha.update(aad) + return chacha.decrypt_and_verify(ciphertext, tag) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/drafts/_jwe_enc_cryptography.py b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/_jwe_enc_cryptography.py new file mode 100644 index 0000000..1b0c852 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/drafts/_jwe_enc_cryptography.py @@ -0,0 +1,50 @@ +""" + authlib.jose.draft + ~~~~~~~~~~~~~~~~~~~~ + + Content Encryption per `Section 4`_. + + .. _`Section 4`: https://datatracker.ietf.org/doc/html/draft-amringer-jose-chacha-02#section-4 +""" +from cryptography.hazmat.primitives.ciphers.aead import ChaCha20Poly1305 +from authlib.jose.rfc7516 import JWEEncAlgorithm + + +class C20PEncAlgorithm(JWEEncAlgorithm): + # Use of an IV of size 96 bits is REQUIRED with this algorithm. + # https://datatracker.ietf.org/doc/html/draft-amringer-jose-chacha-02#section-4.1 + IV_SIZE = 96 + + def __init__(self, key_size): + self.name = 'C20P' + self.description = 'ChaCha20-Poly1305' + self.key_size = key_size + self.CEK_SIZE = key_size + + def encrypt(self, msg, aad, iv, key): + """Content Encryption with AEAD_CHACHA20_POLY1305 + + :param msg: text to be encrypt in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param key: encrypted key in bytes + :return: (ciphertext, tag) + """ + self.check_iv(iv) + chacha = ChaCha20Poly1305(key) + ciphertext = chacha.encrypt(iv, msg, aad) + return ciphertext[:-16], ciphertext[-16:] + + def decrypt(self, ciphertext, aad, iv, tag, key): + """Content Decryption with AEAD_CHACHA20_POLY1305 + + :param ciphertext: ciphertext in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param tag: authentication tag in bytes + :param key: encrypted key in bytes + :return: message + """ + self.check_iv(iv) + chacha = ChaCha20Poly1305(key) + return chacha.decrypt(iv, ciphertext + tag, aad) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/errors.py b/myenv/lib/python3.12/site-packages/authlib/jose/errors.py new file mode 100644 index 0000000..fb02eb4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/errors.py @@ -0,0 +1,113 @@ +from authlib.common.errors import AuthlibBaseError + + +class JoseError(AuthlibBaseError): + pass + + +class DecodeError(JoseError): + error = 'decode_error' + + +class MissingAlgorithmError(JoseError): + error = 'missing_algorithm' + + +class UnsupportedAlgorithmError(JoseError): + error = 'unsupported_algorithm' + + +class BadSignatureError(JoseError): + error = 'bad_signature' + + def __init__(self, result): + super().__init__() + self.result = result + + +class InvalidHeaderParameterNameError(JoseError): + error = 'invalid_header_parameter_name' + + def __init__(self, name): + description = f'Invalid Header Parameter Name: {name}' + super().__init__( + description=description) + + +class InvalidEncryptionAlgorithmForECDH1PUWithKeyWrappingError(JoseError): + error = 'invalid_encryption_algorithm_for_ECDH_1PU_with_key_wrapping' + + def __init__(self): + description = 'In key agreement with key wrapping mode ECDH-1PU algorithm ' \ + 'only supports AES_CBC_HMAC_SHA2 family encryption algorithms' + super().__init__( + description=description) + + +class InvalidAlgorithmForMultipleRecipientsMode(JoseError): + error = 'invalid_algorithm_for_multiple_recipients_mode' + + def __init__(self, alg): + description = f'{alg} algorithm cannot be used in multiple recipients mode' + super().__init__( + description=description) + + +class KeyMismatchError(JoseError): + error = 'key_mismatch_error' + description = 'Key does not match to any recipient' + + +class MissingEncryptionAlgorithmError(JoseError): + error = 'missing_encryption_algorithm' + description = 'Missing "enc" in header' + + +class UnsupportedEncryptionAlgorithmError(JoseError): + error = 'unsupported_encryption_algorithm' + description = 'Unsupported "enc" value in header' + + +class UnsupportedCompressionAlgorithmError(JoseError): + error = 'unsupported_compression_algorithm' + description = 'Unsupported "zip" value in header' + + +class InvalidUseError(JoseError): + error = 'invalid_use' + description = 'Key "use" is not valid for your usage' + + +class InvalidClaimError(JoseError): + error = 'invalid_claim' + + def __init__(self, claim): + self.claim_name = claim + description = f'Invalid claim "{claim}"' + super().__init__(description=description) + + +class MissingClaimError(JoseError): + error = 'missing_claim' + + def __init__(self, claim): + description = f'Missing "{claim}" claim' + super().__init__(description=description) + + +class InsecureClaimError(JoseError): + error = 'insecure_claim' + + def __init__(self, claim): + description = f'Insecure claim "{claim}"' + super().__init__(description=description) + + +class ExpiredTokenError(JoseError): + error = 'expired_token' + description = 'The token is expired' + + +class InvalidTokenError(JoseError): + error = 'invalid_token' + description = 'The token is not valid yet' diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/jwk.py b/myenv/lib/python3.12/site-packages/authlib/jose/jwk.py new file mode 100644 index 0000000..bc3b6eb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/jwk.py @@ -0,0 +1,19 @@ +from authlib.deprecate import deprecate +from .rfc7517 import JsonWebKey + + +def loads(obj, kid=None): + deprecate('Please use ``JsonWebKey`` directly.') + key_set = JsonWebKey.import_key_set(obj) + if key_set: + return key_set.find_by_kid(kid) + return JsonWebKey.import_key(obj) + + +def dumps(key, kty=None, **params): + deprecate('Please use ``JsonWebKey`` directly.') + if kty: + params['kty'] = kty + + key = JsonWebKey.import_key(key, params) + return dict(key) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__init__.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__init__.py new file mode 100644 index 0000000..5f8e0f5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__init__.py @@ -0,0 +1,18 @@ +""" + authlib.jose.rfc7515 + ~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + JSON Web Signature (JWS). + + https://tools.ietf.org/html/rfc7515 +""" + +from .jws import JsonWebSignature +from .models import JWSAlgorithm, JWSHeader, JWSObject + + +__all__ = [ + 'JsonWebSignature', + 'JWSAlgorithm', 'JWSHeader', 'JWSObject' +] diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d255fab Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__pycache__/jws.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__pycache__/jws.cpython-312.pyc new file mode 100644 index 0000000..21b9c47 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__pycache__/jws.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__pycache__/models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..3d2fbf9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/__pycache__/models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/jws.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/jws.py new file mode 100644 index 0000000..cf19c4b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/jws.py @@ -0,0 +1,304 @@ +from authlib.common.encoding import ( + to_bytes, + to_unicode, + urlsafe_b64encode, + json_b64encode, +) +from authlib.jose.util import ( + extract_header, + extract_segment, ensure_dict, +) +from authlib.jose.errors import ( + DecodeError, + MissingAlgorithmError, + UnsupportedAlgorithmError, + BadSignatureError, + InvalidHeaderParameterNameError, +) +from .models import JWSHeader, JWSObject + + +class JsonWebSignature: + + #: Registered Header Parameter Names defined by Section 4.1 + REGISTERED_HEADER_PARAMETER_NAMES = frozenset([ + 'alg', 'jku', 'jwk', 'kid', + 'x5u', 'x5c', 'x5t', 'x5t#S256', + 'typ', 'cty', 'crit' + ]) + + #: Defined available JWS algorithms in the registry + ALGORITHMS_REGISTRY = {} + + def __init__(self, algorithms=None, private_headers=None): + self._private_headers = private_headers + self._algorithms = algorithms + + @classmethod + def register_algorithm(cls, algorithm): + if not algorithm or algorithm.algorithm_type != 'JWS': + raise ValueError( + f'Invalid algorithm for JWS, {algorithm!r}') + cls.ALGORITHMS_REGISTRY[algorithm.name] = algorithm + + def serialize_compact(self, protected, payload, key): + """Generate a JWS Compact Serialization. The JWS Compact Serialization + represents digitally signed or MACed content as a compact, URL-safe + string, per `Section 7.1`_. + + .. code-block:: text + + BASE64URL(UTF8(JWS Protected Header)) || '.' || + BASE64URL(JWS Payload) || '.' || + BASE64URL(JWS Signature) + + :param protected: A dict of protected header + :param payload: A bytes/string of payload + :param key: Private key used to generate signature + :return: byte + """ + jws_header = JWSHeader(protected, None) + self._validate_private_headers(protected) + algorithm, key = self._prepare_algorithm_key(protected, payload, key) + + protected_segment = json_b64encode(jws_header.protected) + payload_segment = urlsafe_b64encode(to_bytes(payload)) + + # calculate signature + signing_input = b'.'.join([protected_segment, payload_segment]) + signature = urlsafe_b64encode(algorithm.sign(signing_input, key)) + return b'.'.join([protected_segment, payload_segment, signature]) + + def deserialize_compact(self, s, key, decode=None): + """Exact JWS Compact Serialization, and validate with the given key. + If key is not provided, the returned dict will contain the signature, + and signing input values. Via `Section 7.1`_. + + :param s: text of JWS Compact Serialization + :param key: key used to verify the signature + :param decode: a function to decode payload data + :return: JWSObject + :raise: BadSignatureError + + .. _`Section 7.1`: https://tools.ietf.org/html/rfc7515#section-7.1 + """ + try: + s = to_bytes(s) + signing_input, signature_segment = s.rsplit(b'.', 1) + protected_segment, payload_segment = signing_input.split(b'.', 1) + except ValueError: + raise DecodeError('Not enough segments') + + protected = _extract_header(protected_segment) + jws_header = JWSHeader(protected, None) + + payload = _extract_payload(payload_segment) + if decode: + payload = decode(payload) + + signature = _extract_signature(signature_segment) + rv = JWSObject(jws_header, payload, 'compact') + algorithm, key = self._prepare_algorithm_key(jws_header, payload, key) + if algorithm.verify(signing_input, signature, key): + return rv + raise BadSignatureError(rv) + + def serialize_json(self, header_obj, payload, key): + """Generate a JWS JSON Serialization. The JWS JSON Serialization + represents digitally signed or MACed content as a JSON object, + per `Section 7.2`_. + + :param header_obj: A dict/list of header + :param payload: A string/dict of payload + :param key: Private key used to generate signature + :return: JWSObject + + Example ``header_obj`` of JWS JSON Serialization:: + + { + "protected: {"alg": "HS256"}, + "header": {"kid": "jose"} + } + + Pass a dict to generate flattened JSON Serialization, pass a list of + header dict to generate standard JSON Serialization. + """ + payload_segment = json_b64encode(payload) + + def _sign(jws_header): + self._validate_private_headers(jws_header) + _alg, _key = self._prepare_algorithm_key(jws_header, payload, key) + + protected_segment = json_b64encode(jws_header.protected) + signing_input = b'.'.join([protected_segment, payload_segment]) + signature = urlsafe_b64encode(_alg.sign(signing_input, _key)) + + rv = { + 'protected': to_unicode(protected_segment), + 'signature': to_unicode(signature) + } + if jws_header.header is not None: + rv['header'] = jws_header.header + return rv + + if isinstance(header_obj, dict): + data = _sign(JWSHeader.from_dict(header_obj)) + data['payload'] = to_unicode(payload_segment) + return data + + signatures = [_sign(JWSHeader.from_dict(h)) for h in header_obj] + return { + 'payload': to_unicode(payload_segment), + 'signatures': signatures + } + + def deserialize_json(self, obj, key, decode=None): + """Exact JWS JSON Serialization, and validate with the given key. + If key is not provided, it will return a dict without signature + verification. Header will still be validated. Via `Section 7.2`_. + + :param obj: text of JWS JSON Serialization + :param key: key used to verify the signature + :param decode: a function to decode payload data + :return: JWSObject + :raise: BadSignatureError + + .. _`Section 7.2`: https://tools.ietf.org/html/rfc7515#section-7.2 + """ + obj = ensure_dict(obj, 'JWS') + + payload_segment = obj.get('payload') + if payload_segment is None: + raise DecodeError('Missing "payload" value') + + payload_segment = to_bytes(payload_segment) + payload = _extract_payload(payload_segment) + if decode: + payload = decode(payload) + + if 'signatures' not in obj: + # flattened JSON JWS + jws_header, valid = self._validate_json_jws( + payload_segment, payload, obj, key) + + rv = JWSObject(jws_header, payload, 'flat') + if valid: + return rv + raise BadSignatureError(rv) + + headers = [] + is_valid = True + for header_obj in obj['signatures']: + jws_header, valid = self._validate_json_jws( + payload_segment, payload, header_obj, key) + headers.append(jws_header) + if not valid: + is_valid = False + + rv = JWSObject(headers, payload, 'json') + if is_valid: + return rv + raise BadSignatureError(rv) + + def serialize(self, header, payload, key): + """Generate a JWS Serialization. It will automatically generate a + Compact or JSON Serialization depending on the given header. If a + header is in a JSON header format, it will call + :meth:`serialize_json`, otherwise it will call + :meth:`serialize_compact`. + + :param header: A dict/list of header + :param payload: A string/dict of payload + :param key: Private key used to generate signature + :return: byte/dict + """ + if isinstance(header, (list, tuple)): + return self.serialize_json(header, payload, key) + if 'protected' in header: + return self.serialize_json(header, payload, key) + return self.serialize_compact(header, payload, key) + + def deserialize(self, s, key, decode=None): + """Deserialize JWS Serialization, both compact and JSON format. + It will automatically deserialize depending on the given JWS. + + :param s: text of JWS Compact/JSON Serialization + :param key: key used to verify the signature + :param decode: a function to decode payload data + :return: dict + :raise: BadSignatureError + + If key is not provided, it will still deserialize the serialization + without verification. + """ + if isinstance(s, dict): + return self.deserialize_json(s, key, decode) + + s = to_bytes(s) + if s.startswith(b'{') and s.endswith(b'}'): + return self.deserialize_json(s, key, decode) + return self.deserialize_compact(s, key, decode) + + def _prepare_algorithm_key(self, header, payload, key): + if 'alg' not in header: + raise MissingAlgorithmError() + + alg = header['alg'] + if self._algorithms is not None and alg not in self._algorithms: + raise UnsupportedAlgorithmError() + if alg not in self.ALGORITHMS_REGISTRY: + raise UnsupportedAlgorithmError() + + algorithm = self.ALGORITHMS_REGISTRY[alg] + if callable(key): + key = key(header, payload) + elif key is None and 'jwk' in header: + key = header['jwk'] + key = algorithm.prepare_key(key) + return algorithm, key + + def _validate_private_headers(self, header): + # only validate private headers when developers set + # private headers explicitly + if self._private_headers is not None: + names = self.REGISTERED_HEADER_PARAMETER_NAMES.copy() + names = names.union(self._private_headers) + + for k in header: + if k not in names: + raise InvalidHeaderParameterNameError(k) + + def _validate_json_jws(self, payload_segment, payload, header_obj, key): + protected_segment = header_obj.get('protected') + if not protected_segment: + raise DecodeError('Missing "protected" value') + + signature_segment = header_obj.get('signature') + if not signature_segment: + raise DecodeError('Missing "signature" value') + + protected_segment = to_bytes(protected_segment) + protected = _extract_header(protected_segment) + header = header_obj.get('header') + if header and not isinstance(header, dict): + raise DecodeError('Invalid "header" value') + + jws_header = JWSHeader(protected, header) + algorithm, key = self._prepare_algorithm_key(jws_header, payload, key) + signing_input = b'.'.join([protected_segment, payload_segment]) + signature = _extract_signature(to_bytes(signature_segment)) + if algorithm.verify(signing_input, signature, key): + return jws_header, True + return jws_header, False + + +def _extract_header(header_segment): + return extract_header(header_segment, DecodeError) + + +def _extract_signature(signature_segment): + return extract_segment(signature_segment, DecodeError, 'signature') + + +def _extract_payload(payload_segment): + return extract_segment(payload_segment, DecodeError, 'payload') diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/models.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/models.py new file mode 100644 index 0000000..5da3c7e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7515/models.py @@ -0,0 +1,81 @@ +class JWSAlgorithm: + """Interface for JWS algorithm. JWA specification (RFC7518) SHOULD + implement the algorithms for JWS with this base implementation. + """ + name = None + description = None + algorithm_type = 'JWS' + algorithm_location = 'alg' + + def prepare_key(self, raw_data): + """Prepare key for signing and verifying signature.""" + raise NotImplementedError() + + def sign(self, msg, key): + """Sign the text msg with a private/sign key. + + :param msg: message bytes to be signed + :param key: private key to sign the message + :return: bytes + """ + raise NotImplementedError + + def verify(self, msg, sig, key): + """Verify the signature of text msg with a public/verify key. + + :param msg: message bytes to be signed + :param sig: result signature to be compared + :param key: public key to verify the signature + :return: boolean + """ + raise NotImplementedError + + +class JWSHeader(dict): + """Header object for JWS. It combine the protected header and unprotected + header together. JWSHeader itself is a dict of the combined dict. e.g. + + >>> protected = {'alg': 'HS256'} + >>> header = {'kid': 'a'} + >>> jws_header = JWSHeader(protected, header) + >>> print(jws_header) + {'alg': 'HS256', 'kid': 'a'} + >>> jws_header.protected == protected + >>> jws_header.header == header + + :param protected: dict of protected header + :param header: dict of unprotected header + """ + def __init__(self, protected, header): + obj = {} + if protected: + obj.update(protected) + if header: + obj.update(header) + super().__init__(obj) + self.protected = protected + self.header = header + + @classmethod + def from_dict(cls, obj): + if isinstance(obj, cls): + return obj + return cls(obj.get('protected'), obj.get('header')) + + +class JWSObject(dict): + """A dict instance to represent a JWS object.""" + def __init__(self, header, payload, type='compact'): + super().__init__( + header=header, + payload=payload, + ) + self.header = header + self.payload = payload + self.type = type + + @property + def headers(self): + """Alias of ``header`` for JSON typed JWS.""" + if self.type == 'json': + return self['header'] diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__init__.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__init__.py new file mode 100644 index 0000000..4a02433 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__init__.py @@ -0,0 +1,18 @@ +""" + authlib.jose.rfc7516 + ~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + JSON Web Encryption (JWE). + + https://tools.ietf.org/html/rfc7516 +""" + +from .jwe import JsonWebEncryption +from .models import JWEAlgorithm, JWEAlgorithmWithTagAwareKeyAgreement, JWEEncAlgorithm, JWEZipAlgorithm + + +__all__ = [ + 'JsonWebEncryption', + 'JWEAlgorithm', 'JWEAlgorithmWithTagAwareKeyAgreement', 'JWEEncAlgorithm', 'JWEZipAlgorithm' +] diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..67aba80 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__pycache__/jwe.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__pycache__/jwe.cpython-312.pyc new file mode 100644 index 0000000..42d826f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__pycache__/jwe.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__pycache__/models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..8cb2f60 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/__pycache__/models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/jwe.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/jwe.py new file mode 100644 index 0000000..084bcca --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/jwe.py @@ -0,0 +1,722 @@ +from collections import OrderedDict +from copy import deepcopy + +from authlib.common.encoding import ( + to_bytes, urlsafe_b64encode, json_b64encode, to_unicode +) +from authlib.jose.rfc7516.models import JWEAlgorithmWithTagAwareKeyAgreement, JWESharedHeader, JWEHeader +from authlib.jose.util import ( + extract_header, + extract_segment, ensure_dict, +) +from authlib.jose.errors import ( + DecodeError, + MissingAlgorithmError, + UnsupportedAlgorithmError, + MissingEncryptionAlgorithmError, + UnsupportedEncryptionAlgorithmError, + UnsupportedCompressionAlgorithmError, + InvalidHeaderParameterNameError, InvalidAlgorithmForMultipleRecipientsMode, KeyMismatchError, +) + + +class JsonWebEncryption: + #: Registered Header Parameter Names defined by Section 4.1 + REGISTERED_HEADER_PARAMETER_NAMES = frozenset([ + 'alg', 'enc', 'zip', + 'jku', 'jwk', 'kid', + 'x5u', 'x5c', 'x5t', 'x5t#S256', + 'typ', 'cty', 'crit' + ]) + + ALG_REGISTRY = {} + ENC_REGISTRY = {} + ZIP_REGISTRY = {} + + def __init__(self, algorithms=None, private_headers=None): + self._algorithms = algorithms + self._private_headers = private_headers + + @classmethod + def register_algorithm(cls, algorithm): + """Register an algorithm for ``alg`` or ``enc`` or ``zip`` of JWE.""" + if not algorithm or algorithm.algorithm_type != 'JWE': + raise ValueError( + f'Invalid algorithm for JWE, {algorithm!r}') + + if algorithm.algorithm_location == 'alg': + cls.ALG_REGISTRY[algorithm.name] = algorithm + elif algorithm.algorithm_location == 'enc': + cls.ENC_REGISTRY[algorithm.name] = algorithm + elif algorithm.algorithm_location == 'zip': + cls.ZIP_REGISTRY[algorithm.name] = algorithm + + def serialize_compact(self, protected, payload, key, sender_key=None): + """Generate a JWE Compact Serialization. + + The JWE Compact Serialization represents encrypted content as a compact, + URL-safe string. This string is:: + + BASE64URL(UTF8(JWE Protected Header)) || '.' || + BASE64URL(JWE Encrypted Key) || '.' || + BASE64URL(JWE Initialization Vector) || '.' || + BASE64URL(JWE Ciphertext) || '.' || + BASE64URL(JWE Authentication Tag) + + Only one recipient is supported by the JWE Compact Serialization and + it provides no syntax to represent JWE Shared Unprotected Header, JWE + Per-Recipient Unprotected Header, or JWE AAD values. + + :param protected: A dict of protected header + :param payload: Payload (bytes or a value convertible to bytes) + :param key: Public key used to encrypt payload + :param sender_key: Sender's private key in case + JWEAlgorithmWithTagAwareKeyAgreement is used + :return: JWE compact serialization as bytes + """ + + # step 1: Prepare algorithms & key + alg = self.get_header_alg(protected) + enc = self.get_header_enc(protected) + zip_alg = self.get_header_zip(protected) + + self._validate_sender_key(sender_key, alg) + self._validate_private_headers(protected, alg) + + key = prepare_key(alg, protected, key) + if sender_key is not None: + sender_key = alg.prepare_key(sender_key) + + # self._post_validate_header(protected, algorithm) + + # step 2: Generate a random Content Encryption Key (CEK) + # use enc_alg.generate_cek() in scope of upcoming .wrap or .generate_keys_and_prepare_headers call + + # step 3: Encrypt the CEK with the recipient's public key + if isinstance(alg, JWEAlgorithmWithTagAwareKeyAgreement) and alg.key_size is not None: + # For a JWE algorithm with tag-aware key agreement in case key agreement with key wrapping mode is used: + # Defer key agreement with key wrapping until authentication tag is computed + prep = alg.generate_keys_and_prepare_headers(enc, key, sender_key) + epk = prep['epk'] + cek = prep['cek'] + protected.update(prep['header']) + else: + # In any other case: + # Keep the normal steps order defined by RFC 7516 + if isinstance(alg, JWEAlgorithmWithTagAwareKeyAgreement): + wrapped = alg.wrap(enc, protected, key, sender_key) + else: + wrapped = alg.wrap(enc, protected, key) + cek = wrapped['cek'] + ek = wrapped['ek'] + if 'header' in wrapped: + protected.update(wrapped['header']) + + # step 4: Generate a random JWE Initialization Vector + iv = enc.generate_iv() + + # step 5: Let the Additional Authenticated Data encryption parameter + # be ASCII(BASE64URL(UTF8(JWE Protected Header))) + protected_segment = json_b64encode(protected) + aad = to_bytes(protected_segment, 'ascii') + + # step 6: compress message if required + if zip_alg: + msg = zip_alg.compress(to_bytes(payload)) + else: + msg = to_bytes(payload) + + # step 7: perform encryption + ciphertext, tag = enc.encrypt(msg, aad, iv, cek) + + if isinstance(alg, JWEAlgorithmWithTagAwareKeyAgreement) and alg.key_size is not None: + # For a JWE algorithm with tag-aware key agreement in case key agreement with key wrapping mode is used: + # Perform key agreement with key wrapping deferred at step 3 + wrapped = alg.agree_upon_key_and_wrap_cek(enc, protected, key, sender_key, epk, cek, tag) + ek = wrapped['ek'] + + # step 8: build resulting message + return b'.'.join([ + protected_segment, + urlsafe_b64encode(ek), + urlsafe_b64encode(iv), + urlsafe_b64encode(ciphertext), + urlsafe_b64encode(tag) + ]) + + def serialize_json(self, header_obj, payload, keys, sender_key=None): + """Generate a JWE JSON Serialization (in fully general syntax). + + The JWE JSON Serialization represents encrypted content as a JSON + object. This representation is neither optimized for compactness nor + URL safe. + + The following members are defined for use in top-level JSON objects + used for the fully general JWE JSON Serialization syntax: + + protected + The "protected" member MUST be present and contain the value + BASE64URL(UTF8(JWE Protected Header)) when the JWE Protected + Header value is non-empty; otherwise, it MUST be absent. These + Header Parameter values are integrity protected. + + unprotected + The "unprotected" member MUST be present and contain the value JWE + Shared Unprotected Header when the JWE Shared Unprotected Header + value is non-empty; otherwise, it MUST be absent. This value is + represented as an unencoded JSON object, rather than as a string. + These Header Parameter values are not integrity protected. + + iv + The "iv" member MUST be present and contain the value + BASE64URL(JWE Initialization Vector) when the JWE Initialization + Vector value is non-empty; otherwise, it MUST be absent. + + aad + The "aad" member MUST be present and contain the value + BASE64URL(JWE AAD)) when the JWE AAD value is non-empty; + otherwise, it MUST be absent. A JWE AAD value can be included to + supply a base64url-encoded value to be integrity protected but not + encrypted. + + ciphertext + The "ciphertext" member MUST be present and contain the value + BASE64URL(JWE Ciphertext). + + tag + The "tag" member MUST be present and contain the value + BASE64URL(JWE Authentication Tag) when the JWE Authentication Tag + value is non-empty; otherwise, it MUST be absent. + + recipients + The "recipients" member value MUST be an array of JSON objects. + Each object contains information specific to a single recipient. + This member MUST be present with exactly one array element per + recipient, even if some or all of the array element values are the + empty JSON object "{}" (which can happen when all Header Parameter + values are shared between all recipients and when no encrypted key + is used, such as when doing Direct Encryption). + + The following members are defined for use in the JSON objects that + are elements of the "recipients" array: + + header + The "header" member MUST be present and contain the value JWE Per- + Recipient Unprotected Header when the JWE Per-Recipient + Unprotected Header value is non-empty; otherwise, it MUST be + absent. This value is represented as an unencoded JSON object, + rather than as a string. These Header Parameter values are not + integrity protected. + + encrypted_key + The "encrypted_key" member MUST be present and contain the value + BASE64URL(JWE Encrypted Key) when the JWE Encrypted Key value is + non-empty; otherwise, it MUST be absent. + + This implementation assumes that "alg" and "enc" header fields are + contained in the protected or shared unprotected header. + + :param header_obj: A dict of headers (in addition optionally contains JWE AAD) + :param payload: Payload (bytes or a value convertible to bytes) + :param keys: Public keys (or a single public key) used to encrypt payload + :param sender_key: Sender's private key in case + JWEAlgorithmWithTagAwareKeyAgreement is used + :return: JWE JSON serialization (in fully general syntax) as dict + + Example of `header_obj`:: + + { + "protected": { + "alg": "ECDH-1PU+A128KW", + "enc": "A256CBC-HS512", + "apu": "QWxpY2U", + "apv": "Qm9iIGFuZCBDaGFybGll" + }, + "unprotected": { + "jku": "https://alice.example.com/keys.jwks" + }, + "recipients": [ + { + "header": { + "kid": "bob-key-2" + } + }, + { + "header": { + "kid": "2021-05-06" + } + } + ], + "aad": b'Authenticate me too.' + } + """ + if not isinstance(keys, list): # single key + keys = [keys] + + if not keys: + raise ValueError("No keys have been provided") + + header_obj = deepcopy(header_obj) + + shared_header = JWESharedHeader.from_dict(header_obj) + + recipients = header_obj.get('recipients') + if recipients is None: + recipients = [{} for _ in keys] + for i in range(len(recipients)): + if recipients[i] is None: + recipients[i] = {} + if 'header' not in recipients[i]: + recipients[i]['header'] = {} + + jwe_aad = header_obj.get('aad') + + if len(keys) != len(recipients): + raise ValueError("Count of recipient keys {} does not equal to count of recipients {}" + .format(len(keys), len(recipients))) + + # step 1: Prepare algorithms & key + alg = self.get_header_alg(shared_header) + enc = self.get_header_enc(shared_header) + zip_alg = self.get_header_zip(shared_header) + + self._validate_sender_key(sender_key, alg) + self._validate_private_headers(shared_header, alg) + for recipient in recipients: + self._validate_private_headers(recipient['header'], alg) + + for i in range(len(keys)): + keys[i] = prepare_key(alg, recipients[i]['header'], keys[i]) + if sender_key is not None: + sender_key = alg.prepare_key(sender_key) + + # self._post_validate_header(protected, algorithm) + + # step 2: Generate a random Content Encryption Key (CEK) + # use enc_alg.generate_cek() in scope of upcoming .wrap or .generate_keys_and_prepare_headers call + + # step 3: Encrypt the CEK with the recipient's public key + preset = alg.generate_preset(enc, keys[0]) + if 'cek' in preset: + cek = preset['cek'] + else: + cek = None + if len(keys) > 1 and cek is None: + raise InvalidAlgorithmForMultipleRecipientsMode(alg.name) + if 'header' in preset: + shared_header.update_protected(preset['header']) + + if isinstance(alg, JWEAlgorithmWithTagAwareKeyAgreement) and alg.key_size is not None: + # For a JWE algorithm with tag-aware key agreement in case key agreement with key wrapping mode is used: + # Defer key agreement with key wrapping until authentication tag is computed + epks = [] + for i in range(len(keys)): + prep = alg.generate_keys_and_prepare_headers(enc, keys[i], sender_key, preset) + if cek is None: + cek = prep['cek'] + epks.append(prep['epk']) + recipients[i]['header'].update(prep['header']) + else: + # In any other case: + # Keep the normal steps order defined by RFC 7516 + for i in range(len(keys)): + if isinstance(alg, JWEAlgorithmWithTagAwareKeyAgreement): + wrapped = alg.wrap(enc, shared_header, keys[i], sender_key, preset) + else: + wrapped = alg.wrap(enc, shared_header, keys[i], preset) + if cek is None: + cek = wrapped['cek'] + recipients[i]['encrypted_key'] = wrapped['ek'] + if 'header' in wrapped: + recipients[i]['header'].update(wrapped['header']) + + # step 4: Generate a random JWE Initialization Vector + iv = enc.generate_iv() + + # step 5: Compute the Encoded Protected Header value + # BASE64URL(UTF8(JWE Protected Header)). If the JWE Protected Header + # is not present, let this value be the empty string. + # Let the Additional Authenticated Data encryption parameter be + # ASCII(Encoded Protected Header). However, if a JWE AAD value is + # present, instead let the Additional Authenticated Data encryption + # parameter be ASCII(Encoded Protected Header || '.' || BASE64URL(JWE AAD)). + aad = json_b64encode(shared_header.protected) if shared_header.protected else b'' + if jwe_aad is not None: + aad += b'.' + urlsafe_b64encode(jwe_aad) + aad = to_bytes(aad, 'ascii') + + # step 6: compress message if required + if zip_alg: + msg = zip_alg.compress(to_bytes(payload)) + else: + msg = to_bytes(payload) + + # step 7: perform encryption + ciphertext, tag = enc.encrypt(msg, aad, iv, cek) + + if isinstance(alg, JWEAlgorithmWithTagAwareKeyAgreement) and alg.key_size is not None: + # For a JWE algorithm with tag-aware key agreement in case key agreement with key wrapping mode is used: + # Perform key agreement with key wrapping deferred at step 3 + for i in range(len(keys)): + wrapped = alg.agree_upon_key_and_wrap_cek(enc, shared_header, keys[i], sender_key, epks[i], cek, tag) + recipients[i]['encrypted_key'] = wrapped['ek'] + + # step 8: build resulting message + obj = OrderedDict() + + if shared_header.protected: + obj['protected'] = to_unicode(json_b64encode(shared_header.protected)) + + if shared_header.unprotected: + obj['unprotected'] = shared_header.unprotected + + for recipient in recipients: + if not recipient['header']: + del recipient['header'] + recipient['encrypted_key'] = to_unicode(urlsafe_b64encode(recipient['encrypted_key'])) + for member in set(recipient.keys()): + if member not in {'header', 'encrypted_key'}: + del recipient[member] + obj['recipients'] = recipients + + if jwe_aad is not None: + obj['aad'] = to_unicode(urlsafe_b64encode(jwe_aad)) + + obj['iv'] = to_unicode(urlsafe_b64encode(iv)) + + obj['ciphertext'] = to_unicode(urlsafe_b64encode(ciphertext)) + + obj['tag'] = to_unicode(urlsafe_b64encode(tag)) + + return obj + + def serialize(self, header, payload, key, sender_key=None): + """Generate a JWE Serialization. + + It will automatically generate a compact or JSON serialization depending + on `header` argument. If `header` is a dict with "protected", + "unprotected" and/or "recipients" keys, it will call `serialize_json`, + otherwise it will call `serialize_compact`. + + :param header: A dict of header(s) + :param payload: Payload (bytes or a value convertible to bytes) + :param key: Public key(s) used to encrypt payload + :param sender_key: Sender's private key in case + JWEAlgorithmWithTagAwareKeyAgreement is used + :return: JWE compact serialization as bytes or + JWE JSON serialization as dict + """ + if 'protected' in header or 'unprotected' in header or 'recipients' in header: + return self.serialize_json(header, payload, key, sender_key) + + return self.serialize_compact(header, payload, key, sender_key) + + def deserialize_compact(self, s, key, decode=None, sender_key=None): + """Extract JWE Compact Serialization. + + :param s: JWE Compact Serialization as bytes + :param key: Private key used to decrypt payload + (optionally can be a tuple of kid and essentially key) + :param decode: Function to decode payload data + :param sender_key: Sender's public key in case + JWEAlgorithmWithTagAwareKeyAgreement is used + :return: dict with `header` and `payload` keys where `header` value is + a dict containing protected header fields + """ + try: + s = to_bytes(s) + protected_s, ek_s, iv_s, ciphertext_s, tag_s = s.rsplit(b'.') + except ValueError: + raise DecodeError('Not enough segments') + + protected = extract_header(protected_s, DecodeError) + ek = extract_segment(ek_s, DecodeError, 'encryption key') + iv = extract_segment(iv_s, DecodeError, 'initialization vector') + ciphertext = extract_segment(ciphertext_s, DecodeError, 'ciphertext') + tag = extract_segment(tag_s, DecodeError, 'authentication tag') + + alg = self.get_header_alg(protected) + enc = self.get_header_enc(protected) + zip_alg = self.get_header_zip(protected) + + self._validate_sender_key(sender_key, alg) + self._validate_private_headers(protected, alg) + + if isinstance(key, tuple) and len(key) == 2: + # Ignore separately provided kid, extract essentially key only + key = key[1] + + key = prepare_key(alg, protected, key) + + if sender_key is not None: + sender_key = alg.prepare_key(sender_key) + + if isinstance(alg, JWEAlgorithmWithTagAwareKeyAgreement): + # For a JWE algorithm with tag-aware key agreement: + if alg.key_size is not None: + # In case key agreement with key wrapping mode is used: + # Provide authentication tag to .unwrap method + cek = alg.unwrap(enc, ek, protected, key, sender_key, tag) + else: + # Otherwise, don't provide authentication tag to .unwrap method + cek = alg.unwrap(enc, ek, protected, key, sender_key) + else: + # For any other JWE algorithm: + # Don't provide authentication tag to .unwrap method + cek = alg.unwrap(enc, ek, protected, key) + + aad = to_bytes(protected_s, 'ascii') + msg = enc.decrypt(ciphertext, aad, iv, tag, cek) + + if zip_alg: + payload = zip_alg.decompress(to_bytes(msg)) + else: + payload = msg + + if decode: + payload = decode(payload) + return {'header': protected, 'payload': payload} + + def deserialize_json(self, obj, key, decode=None, sender_key=None): + """Extract JWE JSON Serialization. + + :param obj: JWE JSON Serialization as dict or str + :param key: Private key used to decrypt payload + (optionally can be a tuple of kid and essentially key) + :param decode: Function to decode payload data + :param sender_key: Sender's public key in case + JWEAlgorithmWithTagAwareKeyAgreement is used + :return: dict with `header` and `payload` keys where `header` value is + a dict containing `protected`, `unprotected`, `recipients` and/or + `aad` keys + """ + obj = ensure_dict(obj, 'JWE') + obj = deepcopy(obj) + + if 'protected' in obj: + protected = extract_header(to_bytes(obj['protected']), DecodeError) + else: + protected = None + + unprotected = obj.get('unprotected') + + recipients = obj['recipients'] + for recipient in recipients: + if 'header' not in recipient: + recipient['header'] = {} + recipient['encrypted_key'] = extract_segment( + to_bytes(recipient['encrypted_key']), DecodeError, 'encrypted key') + + if 'aad' in obj: + jwe_aad = extract_segment(to_bytes(obj['aad']), DecodeError, 'JWE AAD') + else: + jwe_aad = None + + iv = extract_segment(to_bytes(obj['iv']), DecodeError, 'initialization vector') + + ciphertext = extract_segment(to_bytes(obj['ciphertext']), DecodeError, 'ciphertext') + + tag = extract_segment(to_bytes(obj['tag']), DecodeError, 'authentication tag') + + shared_header = JWESharedHeader(protected, unprotected) + + alg = self.get_header_alg(shared_header) + enc = self.get_header_enc(shared_header) + zip_alg = self.get_header_zip(shared_header) + + self._validate_sender_key(sender_key, alg) + self._validate_private_headers(shared_header, alg) + for recipient in recipients: + self._validate_private_headers(recipient['header'], alg) + + kid = None + if isinstance(key, tuple) and len(key) == 2: + # Extract separately provided kid and essentially key + kid = key[0] + key = key[1] + + key = alg.prepare_key(key) + + if kid is None: + # If kid has not been provided separately, try to get it from key itself + kid = key.kid + + if sender_key is not None: + sender_key = alg.prepare_key(sender_key) + + def _unwrap_with_sender_key_and_tag(ek, header): + return alg.unwrap(enc, ek, header, key, sender_key, tag) + + def _unwrap_with_sender_key_and_without_tag(ek, header): + return alg.unwrap(enc, ek, header, key, sender_key) + + def _unwrap_without_sender_key_and_tag(ek, header): + return alg.unwrap(enc, ek, header, key) + + def _unwrap_for_matching_recipient(unwrap_func): + if kid is not None: + for recipient in recipients: + if recipient['header'].get('kid') == kid: + header = JWEHeader(protected, unprotected, recipient['header']) + return unwrap_func(recipient['encrypted_key'], header) + + # Since no explicit match has been found, iterate over all the recipients + error = None + for recipient in recipients: + header = JWEHeader(protected, unprotected, recipient['header']) + try: + return unwrap_func(recipient['encrypted_key'], header) + except Exception as e: + error = e + else: + if error is None: + raise KeyMismatchError() + else: + raise error + + if isinstance(alg, JWEAlgorithmWithTagAwareKeyAgreement): + # For a JWE algorithm with tag-aware key agreement: + if alg.key_size is not None: + # In case key agreement with key wrapping mode is used: + # Provide authentication tag to .unwrap method + cek = _unwrap_for_matching_recipient(_unwrap_with_sender_key_and_tag) + else: + # Otherwise, don't provide authentication tag to .unwrap method + cek = _unwrap_for_matching_recipient(_unwrap_with_sender_key_and_without_tag) + else: + # For any other JWE algorithm: + # Don't provide authentication tag to .unwrap method + cek = _unwrap_for_matching_recipient(_unwrap_without_sender_key_and_tag) + + aad = to_bytes(obj.get('protected', '')) + if 'aad' in obj: + aad += b'.' + to_bytes(obj['aad']) + aad = to_bytes(aad, 'ascii') + + msg = enc.decrypt(ciphertext, aad, iv, tag, cek) + + if zip_alg: + payload = zip_alg.decompress(to_bytes(msg)) + else: + payload = msg + + if decode: + payload = decode(payload) + + for recipient in recipients: + if not recipient['header']: + del recipient['header'] + for member in set(recipient.keys()): + if member != 'header': + del recipient[member] + + header = {} + if protected: + header['protected'] = protected + if unprotected: + header['unprotected'] = unprotected + header['recipients'] = recipients + if jwe_aad is not None: + header['aad'] = jwe_aad + + return { + 'header': header, + 'payload': payload + } + + def deserialize(self, obj, key, decode=None, sender_key=None): + """Extract a JWE Serialization. + + It supports both compact and JSON serialization. + + :param obj: JWE compact serialization as bytes or + JWE JSON serialization as dict or str + :param key: Private key used to decrypt payload + (optionally can be a tuple of kid and essentially key) + :param decode: Function to decode payload data + :param sender_key: Sender's public key in case + JWEAlgorithmWithTagAwareKeyAgreement is used + :return: dict with `header` and `payload` keys + """ + if isinstance(obj, dict): + return self.deserialize_json(obj, key, decode, sender_key) + + obj = to_bytes(obj) + if obj.startswith(b'{') and obj.endswith(b'}'): + return self.deserialize_json(obj, key, decode, sender_key) + + return self.deserialize_compact(obj, key, decode, sender_key) + + @staticmethod + def parse_json(obj): + """Parse JWE JSON Serialization. + + :param obj: JWE JSON Serialization as str or dict + :return: Parsed JWE JSON Serialization as dict if `obj` is an str, + or `obj` as is if `obj` is already a dict + """ + return ensure_dict(obj, 'JWE') + + def get_header_alg(self, header): + if 'alg' not in header: + raise MissingAlgorithmError() + + alg = header['alg'] + if self._algorithms is not None and alg not in self._algorithms: + raise UnsupportedAlgorithmError() + if alg not in self.ALG_REGISTRY: + raise UnsupportedAlgorithmError() + return self.ALG_REGISTRY[alg] + + def get_header_enc(self, header): + if 'enc' not in header: + raise MissingEncryptionAlgorithmError() + enc = header['enc'] + if self._algorithms is not None and enc not in self._algorithms: + raise UnsupportedEncryptionAlgorithmError() + if enc not in self.ENC_REGISTRY: + raise UnsupportedEncryptionAlgorithmError() + return self.ENC_REGISTRY[enc] + + def get_header_zip(self, header): + if 'zip' in header: + z = header['zip'] + if self._algorithms is not None and z not in self._algorithms: + raise UnsupportedCompressionAlgorithmError() + if z not in self.ZIP_REGISTRY: + raise UnsupportedCompressionAlgorithmError() + return self.ZIP_REGISTRY[z] + + def _validate_sender_key(self, sender_key, alg): + if isinstance(alg, JWEAlgorithmWithTagAwareKeyAgreement): + if sender_key is None: + raise ValueError("{} algorithm requires sender_key but passed sender_key value is None" + .format(alg.name)) + else: + if sender_key is not None: + raise ValueError("{} algorithm does not use sender_key but passed sender_key value is not None" + .format(alg.name)) + + def _validate_private_headers(self, header, alg): + # only validate private headers when developers set + # private headers explicitly + if self._private_headers is None: + return + + names = self.REGISTERED_HEADER_PARAMETER_NAMES.copy() + names = names.union(self._private_headers) + + if alg.EXTRA_HEADERS: + names = names.union(alg.EXTRA_HEADERS) + + for k in header: + if k not in names: + raise InvalidHeaderParameterNameError(k) + + +def prepare_key(alg, header, key): + if callable(key): + key = key(header, None) + elif key is None and 'jwk' in header: + key = header['jwk'] + return alg.prepare_key(key) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/models.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/models.py new file mode 100644 index 0000000..279563c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7516/models.py @@ -0,0 +1,148 @@ +import os +from abc import ABCMeta + + +class JWEAlgorithmBase(metaclass=ABCMeta): + """Base interface for all JWE algorithms. + """ + EXTRA_HEADERS = None + + name = None + description = None + algorithm_type = 'JWE' + algorithm_location = 'alg' + + def prepare_key(self, raw_data): + raise NotImplementedError + + def generate_preset(self, enc_alg, key): + raise NotImplementedError + + +class JWEAlgorithm(JWEAlgorithmBase, metaclass=ABCMeta): + """Interface for JWE algorithm conforming to RFC7518. + JWA specification (RFC7518) SHOULD implement the algorithms for JWE with this base implementation. + """ + def wrap(self, enc_alg, headers, key, preset=None): + raise NotImplementedError + + def unwrap(self, enc_alg, ek, headers, key): + raise NotImplementedError + + +class JWEAlgorithmWithTagAwareKeyAgreement(JWEAlgorithmBase, metaclass=ABCMeta): + """Interface for JWE algorithm with tag-aware key agreement (in key agreement with key wrapping mode). + ECDH-1PU is an example of such an algorithm. + """ + def generate_keys_and_prepare_headers(self, enc_alg, key, sender_key, preset=None): + raise NotImplementedError + + def agree_upon_key_and_wrap_cek(self, enc_alg, headers, key, sender_key, epk, cek, tag): + raise NotImplementedError + + def wrap(self, enc_alg, headers, key, sender_key, preset=None): + raise NotImplementedError + + def unwrap(self, enc_alg, ek, headers, key, sender_key, tag=None): + raise NotImplementedError + + +class JWEEncAlgorithm: + name = None + description = None + algorithm_type = 'JWE' + algorithm_location = 'enc' + + IV_SIZE = None + CEK_SIZE = None + + def generate_cek(self): + return os.urandom(self.CEK_SIZE // 8) + + def generate_iv(self): + return os.urandom(self.IV_SIZE // 8) + + def check_iv(self, iv): + if len(iv) * 8 != self.IV_SIZE: + raise ValueError('Invalid "iv" size') + + def encrypt(self, msg, aad, iv, key): + """Encrypt the given "msg" text. + + :param msg: text to be encrypt in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param key: encrypted key in bytes + :return: (ciphertext, tag) + """ + raise NotImplementedError + + def decrypt(self, ciphertext, aad, iv, tag, key): + """Decrypt the given cipher text. + + :param ciphertext: ciphertext in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param tag: authentication tag in bytes + :param key: encrypted key in bytes + :return: message + """ + raise NotImplementedError + + +class JWEZipAlgorithm: + name = None + description = None + algorithm_type = 'JWE' + algorithm_location = 'zip' + + def compress(self, s): + raise NotImplementedError + + def decompress(self, s): + raise NotImplementedError + + +class JWESharedHeader(dict): + """Shared header object for JWE. + + Combines protected header and shared unprotected header together. + """ + def __init__(self, protected, unprotected): + obj = {} + if protected: + obj.update(protected) + if unprotected: + obj.update(unprotected) + super().__init__(obj) + self.protected = protected if protected else {} + self.unprotected = unprotected if unprotected else {} + + def update_protected(self, addition): + self.update(addition) + self.protected.update(addition) + + @classmethod + def from_dict(cls, obj): + if isinstance(obj, cls): + return obj + return cls(obj.get('protected'), obj.get('unprotected')) + + +class JWEHeader(dict): + """Header object for JWE. + + Combines protected header, shared unprotected header and specific recipient's unprotected header together. + """ + def __init__(self, protected, unprotected, header): + obj = {} + if protected: + obj.update(protected) + if unprotected: + obj.update(unprotected) + if header: + obj.update(header) + super().__init__(obj) + self.protected = protected if protected else {} + self.unprotected = unprotected if unprotected else {} + self.header = header if header else {} diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__init__.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__init__.py new file mode 100644 index 0000000..d3fbbb2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__init__.py @@ -0,0 +1,17 @@ +""" + authlib.jose.rfc7517 + ~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + JSON Web Key (JWK). + + https://tools.ietf.org/html/rfc7517 +""" +from ._cryptography_key import load_pem_key +from .base_key import Key +from .asymmetric_key import AsymmetricKey +from .key_set import KeySet +from .jwk import JsonWebKey + + +__all__ = ['Key', 'AsymmetricKey', 'KeySet', 'JsonWebKey', 'load_pem_key'] diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..b25f75d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/_cryptography_key.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/_cryptography_key.cpython-312.pyc new file mode 100644 index 0000000..355a45b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/_cryptography_key.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/asymmetric_key.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/asymmetric_key.cpython-312.pyc new file mode 100644 index 0000000..e9288e9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/asymmetric_key.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/base_key.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/base_key.cpython-312.pyc new file mode 100644 index 0000000..6e7acca Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/base_key.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/jwk.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/jwk.cpython-312.pyc new file mode 100644 index 0000000..f28f994 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/jwk.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/key_set.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/key_set.cpython-312.pyc new file mode 100644 index 0000000..ef74902 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/__pycache__/key_set.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/_cryptography_key.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/_cryptography_key.py new file mode 100644 index 0000000..f7194a3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/_cryptography_key.py @@ -0,0 +1,34 @@ +from cryptography.x509 import load_pem_x509_certificate +from cryptography.hazmat.primitives.serialization import ( + load_pem_private_key, load_pem_public_key, load_ssh_public_key, +) +from cryptography.hazmat.backends import default_backend +from authlib.common.encoding import to_bytes + + +def load_pem_key(raw, ssh_type=None, key_type=None, password=None): + raw = to_bytes(raw) + + if ssh_type and raw.startswith(ssh_type): + return load_ssh_public_key(raw, backend=default_backend()) + + if key_type == 'public': + return load_pem_public_key(raw, backend=default_backend()) + + if key_type == 'private' or password is not None: + return load_pem_private_key(raw, password=password, backend=default_backend()) + + if b'PUBLIC' in raw: + return load_pem_public_key(raw, backend=default_backend()) + + if b'PRIVATE' in raw: + return load_pem_private_key(raw, password=password, backend=default_backend()) + + if b'CERTIFICATE' in raw: + cert = load_pem_x509_certificate(raw, default_backend()) + return cert.public_key() + + try: + return load_pem_private_key(raw, password=password, backend=default_backend()) + except ValueError: + return load_pem_public_key(raw, backend=default_backend()) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/asymmetric_key.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/asymmetric_key.py new file mode 100644 index 0000000..35b1937 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/asymmetric_key.py @@ -0,0 +1,191 @@ +from authlib.common.encoding import to_bytes +from cryptography.hazmat.primitives.serialization import ( + Encoding, PrivateFormat, PublicFormat, + BestAvailableEncryption, NoEncryption, +) +from ._cryptography_key import load_pem_key +from .base_key import Key + + +class AsymmetricKey(Key): + """This is the base class for a JSON Web Key.""" + PUBLIC_KEY_FIELDS = [] + PRIVATE_KEY_FIELDS = [] + PRIVATE_KEY_CLS = bytes + PUBLIC_KEY_CLS = bytes + SSH_PUBLIC_PREFIX = b'' + + def __init__(self, private_key=None, public_key=None, options=None): + super().__init__(options) + self.private_key = private_key + self.public_key = public_key + + @property + def public_only(self): + if self.private_key: + return False + if 'd' in self.tokens: + return False + return True + + def get_op_key(self, operation): + """Get the raw key for the given key_op. This method will also + check if the given key_op is supported by this key. + + :param operation: key operation value, such as "sign", "encrypt". + :return: raw key + """ + self.check_key_op(operation) + if operation in self.PUBLIC_KEY_OPS: + return self.get_public_key() + return self.get_private_key() + + def get_public_key(self): + if self.public_key: + return self.public_key + + private_key = self.get_private_key() + if private_key: + return private_key.public_key() + + return self.public_key + + def get_private_key(self): + if self.private_key: + return self.private_key + + if self.tokens: + self.load_raw_key() + return self.private_key + + def load_raw_key(self): + if 'd' in self.tokens: + self.private_key = self.load_private_key() + else: + self.public_key = self.load_public_key() + + def load_dict_key(self): + if self.private_key: + self._dict_data.update(self.dumps_private_key()) + else: + self._dict_data.update(self.dumps_public_key()) + + def dumps_private_key(self): + raise NotImplementedError() + + def dumps_public_key(self): + raise NotImplementedError() + + def load_private_key(self): + raise NotImplementedError() + + def load_public_key(self): + raise NotImplementedError() + + def as_dict(self, is_private=False, **params): + """Represent this key as a dict of the JSON Web Key.""" + tokens = self.tokens + if is_private and 'd' not in tokens: + raise ValueError('This is a public key') + + kid = tokens.get('kid') + if 'd' in tokens and not is_private: + # filter out private fields + tokens = {k: tokens[k] for k in tokens if k in self.PUBLIC_KEY_FIELDS} + tokens['kty'] = self.kty + if kid: + tokens['kid'] = kid + + if not kid: + tokens['kid'] = self.thumbprint() + + tokens.update(params) + return tokens + + def as_key(self, is_private=False): + """Represent this key as raw key.""" + if is_private: + return self.get_private_key() + return self.get_public_key() + + def as_bytes(self, encoding=None, is_private=False, password=None): + """Export key into PEM/DER format bytes. + + :param encoding: "PEM" or "DER" + :param is_private: export private key or public key + :param password: encrypt private key with password + :return: bytes + """ + + if encoding is None or encoding == 'PEM': + encoding = Encoding.PEM + elif encoding == 'DER': + encoding = Encoding.DER + else: + raise ValueError(f'Invalid encoding: {encoding!r}') + + raw_key = self.as_key(is_private) + if is_private: + if not raw_key: + raise ValueError('This is a public key') + if password is None: + encryption_algorithm = NoEncryption() + else: + encryption_algorithm = BestAvailableEncryption(to_bytes(password)) + return raw_key.private_bytes( + encoding=encoding, + format=PrivateFormat.PKCS8, + encryption_algorithm=encryption_algorithm, + ) + return raw_key.public_bytes( + encoding=encoding, + format=PublicFormat.SubjectPublicKeyInfo, + ) + + def as_pem(self, is_private=False, password=None): + return self.as_bytes(is_private=is_private, password=password) + + def as_der(self, is_private=False, password=None): + return self.as_bytes(encoding='DER', is_private=is_private, password=password) + + @classmethod + def import_dict_key(cls, raw, options=None): + cls.check_required_fields(raw) + key = cls(options=options) + key._dict_data = raw + return key + + @classmethod + def import_key(cls, raw, options=None): + if isinstance(raw, cls): + if options is not None: + raw.options.update(options) + return raw + + if isinstance(raw, cls.PUBLIC_KEY_CLS): + key = cls(public_key=raw, options=options) + elif isinstance(raw, cls.PRIVATE_KEY_CLS): + key = cls(private_key=raw, options=options) + elif isinstance(raw, dict): + key = cls.import_dict_key(raw, options) + else: + if options is not None: + password = options.pop('password', None) + else: + password = None + raw_key = load_pem_key(raw, cls.SSH_PUBLIC_PREFIX, password=password) + if isinstance(raw_key, cls.PUBLIC_KEY_CLS): + key = cls(public_key=raw_key, options=options) + elif isinstance(raw_key, cls.PRIVATE_KEY_CLS): + key = cls(private_key=raw_key, options=options) + else: + raise ValueError('Invalid data for importing key') + return key + + @classmethod + def validate_raw_key(cls, key): + return isinstance(key, cls.PUBLIC_KEY_CLS) or isinstance(key, cls.PRIVATE_KEY_CLS) + + @classmethod + def generate_key(cls, crv_or_size, options=None, is_private=False): + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/base_key.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/base_key.py new file mode 100644 index 0000000..1afe8d4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/base_key.py @@ -0,0 +1,118 @@ +import hashlib +from collections import OrderedDict +from authlib.common.encoding import ( + json_dumps, + to_bytes, + to_unicode, + urlsafe_b64encode, +) +from ..errors import InvalidUseError + + +class Key: + """This is the base class for a JSON Web Key.""" + kty = '_' + + ALLOWED_PARAMS = [ + 'use', 'key_ops', 'alg', 'kid', + 'x5u', 'x5c', 'x5t', 'x5t#S256' + ] + + PRIVATE_KEY_OPS = [ + 'sign', 'decrypt', 'unwrapKey', + ] + PUBLIC_KEY_OPS = [ + 'verify', 'encrypt', 'wrapKey', + ] + + REQUIRED_JSON_FIELDS = [] + + def __init__(self, options=None): + self.options = options or {} + self._dict_data = {} + + @property + def tokens(self): + if not self._dict_data: + self.load_dict_key() + + rv = dict(self._dict_data) + rv['kty'] = self.kty + for k in self.ALLOWED_PARAMS: + if k not in rv and k in self.options: + rv[k] = self.options[k] + return rv + + @property + def kid(self): + return self.tokens.get('kid') + + def keys(self): + return self.tokens.keys() + + def __getitem__(self, item): + return self.tokens[item] + + @property + def public_only(self): + raise NotImplementedError() + + def load_raw_key(self): + raise NotImplementedError() + + def load_dict_key(self): + raise NotImplementedError() + + def check_key_op(self, operation): + """Check if the given key_op is supported by this key. + + :param operation: key operation value, such as "sign", "encrypt". + :raise: ValueError + """ + key_ops = self.tokens.get('key_ops') + if key_ops is not None and operation not in key_ops: + raise ValueError(f'Unsupported key_op "{operation}"') + + if operation in self.PRIVATE_KEY_OPS and self.public_only: + raise ValueError(f'Invalid key_op "{operation}" for public key') + + use = self.tokens.get('use') + if use: + if operation in ['sign', 'verify']: + if use != 'sig': + raise InvalidUseError() + elif operation in ['decrypt', 'encrypt', 'wrapKey', 'unwrapKey']: + if use != 'enc': + raise InvalidUseError() + + def as_dict(self, is_private=False, **params): + raise NotImplementedError() + + def as_json(self, is_private=False, **params): + """Represent this key as a JSON string.""" + obj = self.as_dict(is_private, **params) + return json_dumps(obj) + + def thumbprint(self): + """Implementation of RFC7638 JSON Web Key (JWK) Thumbprint.""" + fields = list(self.REQUIRED_JSON_FIELDS) + fields.append('kty') + fields.sort() + data = OrderedDict() + + for k in fields: + data[k] = self.tokens[k] + + json_data = json_dumps(data) + digest_data = hashlib.sha256(to_bytes(json_data)).digest() + return to_unicode(urlsafe_b64encode(digest_data)) + + @classmethod + def check_required_fields(cls, data): + for k in cls.REQUIRED_JSON_FIELDS: + if k not in data: + raise ValueError(f'Missing required field: "{k}"') + + @classmethod + def validate_raw_key(cls, key): + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/jwk.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/jwk.py new file mode 100644 index 0000000..b1578c4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/jwk.py @@ -0,0 +1,64 @@ +from authlib.common.encoding import json_loads +from .key_set import KeySet +from ._cryptography_key import load_pem_key + + +class JsonWebKey: + JWK_KEY_CLS = {} + + @classmethod + def generate_key(cls, kty, crv_or_size, options=None, is_private=False): + """Generate a Key with the given key type, curve name or bit size. + + :param kty: string of ``oct``, ``RSA``, ``EC``, ``OKP`` + :param crv_or_size: curve name or bit size + :param options: a dict of other options for Key + :param is_private: create a private key or public key + :return: Key instance + """ + key_cls = cls.JWK_KEY_CLS[kty] + return key_cls.generate_key(crv_or_size, options, is_private) + + @classmethod + def import_key(cls, raw, options=None): + """Import a Key from bytes, string, PEM or dict. + + :return: Key instance + """ + kty = None + if options is not None: + kty = options.get('kty') + + if kty is None and isinstance(raw, dict): + kty = raw.get('kty') + + if kty is None: + raw_key = load_pem_key(raw) + for _kty in cls.JWK_KEY_CLS: + key_cls = cls.JWK_KEY_CLS[_kty] + if key_cls.validate_raw_key(raw_key): + return key_cls.import_key(raw_key, options) + + key_cls = cls.JWK_KEY_CLS[kty] + return key_cls.import_key(raw, options) + + @classmethod + def import_key_set(cls, raw): + """Import KeySet from string, dict or a list of keys. + + :return: KeySet instance + """ + raw = _transform_raw_key(raw) + if isinstance(raw, dict) and 'keys' in raw: + keys = raw.get('keys') + return KeySet([cls.import_key(k) for k in keys]) + raise ValueError('Invalid key set format') + + +def _transform_raw_key(raw): + if isinstance(raw, str) and \ + raw.startswith('{') and raw.endswith('}'): + return json_loads(raw) + elif isinstance(raw, (tuple, list)): + return {'keys': raw} + return raw diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/key_set.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/key_set.py new file mode 100644 index 0000000..3416ce9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7517/key_set.py @@ -0,0 +1,29 @@ +from authlib.common.encoding import json_dumps + + +class KeySet: + """This class represents a JSON Web Key Set.""" + + def __init__(self, keys): + self.keys = keys + + def as_dict(self, is_private=False, **params): + """Represent this key as a dict of the JSON Web Key Set.""" + return {'keys': [k.as_dict(is_private, **params) for k in self.keys]} + + def as_json(self, is_private=False, **params): + """Represent this key set as a JSON string.""" + obj = self.as_dict(is_private, **params) + return json_dumps(obj) + + def find_by_kid(self, kid): + """Find the key matches the given kid value. + + :param kid: A string of kid + :return: Key instance + :raise: ValueError + """ + for k in self.keys: + if k.kid == kid: + return k + raise ValueError('Invalid JSON Web Key Set') diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__init__.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__init__.py new file mode 100644 index 0000000..360f6c6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__init__.py @@ -0,0 +1,35 @@ +from .oct_key import OctKey +from .rsa_key import RSAKey +from .ec_key import ECKey +from .jws_algs import JWS_ALGORITHMS +from .jwe_algs import JWE_ALG_ALGORITHMS, AESAlgorithm, ECDHESAlgorithm, u32be_len_input +from .jwe_encs import JWE_ENC_ALGORITHMS, CBCHS2EncAlgorithm +from .jwe_zips import DeflateZipAlgorithm + + +def register_jws_rfc7518(cls): + for algorithm in JWS_ALGORITHMS: + cls.register_algorithm(algorithm) + + +def register_jwe_rfc7518(cls): + for algorithm in JWE_ALG_ALGORITHMS: + cls.register_algorithm(algorithm) + + for algorithm in JWE_ENC_ALGORITHMS: + cls.register_algorithm(algorithm) + + cls.register_algorithm(DeflateZipAlgorithm()) + + +__all__ = [ + 'register_jws_rfc7518', + 'register_jwe_rfc7518', + 'OctKey', + 'RSAKey', + 'ECKey', + 'u32be_len_input', + 'AESAlgorithm', + 'ECDHESAlgorithm', + 'CBCHS2EncAlgorithm', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a91b638 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/ec_key.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/ec_key.cpython-312.pyc new file mode 100644 index 0000000..c7d4092 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/ec_key.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jwe_algs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jwe_algs.cpython-312.pyc new file mode 100644 index 0000000..8b748f0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jwe_algs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jwe_encs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jwe_encs.cpython-312.pyc new file mode 100644 index 0000000..934f00b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jwe_encs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jwe_zips.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jwe_zips.cpython-312.pyc new file mode 100644 index 0000000..371e52d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jwe_zips.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jws_algs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jws_algs.cpython-312.pyc new file mode 100644 index 0000000..9b145f2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/jws_algs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/oct_key.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/oct_key.cpython-312.pyc new file mode 100644 index 0000000..7eafb6c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/oct_key.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/rsa_key.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/rsa_key.cpython-312.pyc new file mode 100644 index 0000000..5d4a4a9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/rsa_key.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/util.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..3ada1a1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/__pycache__/util.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/ec_key.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/ec_key.py new file mode 100644 index 0000000..05f0c04 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/ec_key.py @@ -0,0 +1,101 @@ +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.asymmetric.ec import ( + EllipticCurvePublicKey, EllipticCurvePrivateKeyWithSerialization, + EllipticCurvePrivateNumbers, EllipticCurvePublicNumbers, + SECP256R1, SECP384R1, SECP521R1, SECP256K1, +) +from cryptography.hazmat.backends import default_backend +from authlib.common.encoding import base64_to_int, int_to_base64 +from ..rfc7517 import AsymmetricKey + + +class ECKey(AsymmetricKey): + """Key class of the ``EC`` key type.""" + + kty = 'EC' + DSS_CURVES = { + 'P-256': SECP256R1, + 'P-384': SECP384R1, + 'P-521': SECP521R1, + # https://tools.ietf.org/html/rfc8812#section-3.1 + 'secp256k1': SECP256K1, + } + CURVES_DSS = { + SECP256R1.name: 'P-256', + SECP384R1.name: 'P-384', + SECP521R1.name: 'P-521', + SECP256K1.name: 'secp256k1', + } + REQUIRED_JSON_FIELDS = ['crv', 'x', 'y'] + + PUBLIC_KEY_FIELDS = REQUIRED_JSON_FIELDS + PRIVATE_KEY_FIELDS = ['crv', 'd', 'x', 'y'] + + PUBLIC_KEY_CLS = EllipticCurvePublicKey + PRIVATE_KEY_CLS = EllipticCurvePrivateKeyWithSerialization + SSH_PUBLIC_PREFIX = b'ecdsa-sha2-' + + def exchange_shared_key(self, pubkey): + # # used in ECDHESAlgorithm + private_key = self.get_private_key() + if private_key: + return private_key.exchange(ec.ECDH(), pubkey) + raise ValueError('Invalid key for exchanging shared key') + + @property + def curve_key_size(self): + raw_key = self.get_private_key() + if not raw_key: + raw_key = self.public_key + return raw_key.curve.key_size + + def load_private_key(self): + curve = self.DSS_CURVES[self._dict_data['crv']]() + public_numbers = EllipticCurvePublicNumbers( + base64_to_int(self._dict_data['x']), + base64_to_int(self._dict_data['y']), + curve, + ) + private_numbers = EllipticCurvePrivateNumbers( + base64_to_int(self.tokens['d']), + public_numbers + ) + return private_numbers.private_key(default_backend()) + + def load_public_key(self): + curve = self.DSS_CURVES[self._dict_data['crv']]() + public_numbers = EllipticCurvePublicNumbers( + base64_to_int(self._dict_data['x']), + base64_to_int(self._dict_data['y']), + curve, + ) + return public_numbers.public_key(default_backend()) + + def dumps_private_key(self): + numbers = self.private_key.private_numbers() + return { + 'crv': self.CURVES_DSS[self.private_key.curve.name], + 'x': int_to_base64(numbers.public_numbers.x), + 'y': int_to_base64(numbers.public_numbers.y), + 'd': int_to_base64(numbers.private_value), + } + + def dumps_public_key(self): + numbers = self.public_key.public_numbers() + return { + 'crv': self.CURVES_DSS[numbers.curve.name], + 'x': int_to_base64(numbers.x), + 'y': int_to_base64(numbers.y) + } + + @classmethod + def generate_key(cls, crv='P-256', options=None, is_private=False) -> 'ECKey': + if crv not in cls.DSS_CURVES: + raise ValueError(f'Invalid crv value: "{crv}"') + raw_key = ec.generate_private_key( + curve=cls.DSS_CURVES[crv](), + backend=default_backend(), + ) + if not is_private: + raw_key = raw_key.public_key() + return cls.import_key(raw_key, options=options) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jwe_algs.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jwe_algs.py new file mode 100644 index 0000000..b57654a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jwe_algs.py @@ -0,0 +1,349 @@ +import os +import struct +from cryptography.hazmat.primitives.asymmetric import padding +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.keywrap import ( + aes_key_wrap, + aes_key_unwrap +) +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import GCM +from cryptography.hazmat.primitives.kdf.concatkdf import ConcatKDFHash +from authlib.common.encoding import ( + to_bytes, to_native, + urlsafe_b64decode, + urlsafe_b64encode +) +from authlib.jose.rfc7516 import JWEAlgorithm +from .rsa_key import RSAKey +from .ec_key import ECKey +from .oct_key import OctKey + + +class DirectAlgorithm(JWEAlgorithm): + name = 'dir' + description = 'Direct use of a shared symmetric key' + + def prepare_key(self, raw_data): + return OctKey.import_key(raw_data) + + def generate_preset(self, enc_alg, key): + return {} + + def wrap(self, enc_alg, headers, key, preset=None): + cek = key.get_op_key('encrypt') + if len(cek) * 8 != enc_alg.CEK_SIZE: + raise ValueError('Invalid "cek" length') + return {'ek': b'', 'cek': cek} + + def unwrap(self, enc_alg, ek, headers, key): + cek = key.get_op_key('decrypt') + if len(cek) * 8 != enc_alg.CEK_SIZE: + raise ValueError('Invalid "cek" length') + return cek + + +class RSAAlgorithm(JWEAlgorithm): + #: A key of size 2048 bits or larger MUST be used with these algorithms + #: RSA1_5, RSA-OAEP, RSA-OAEP-256 + key_size = 2048 + + def __init__(self, name, description, pad_fn): + self.name = name + self.description = description + self.padding = pad_fn + + def prepare_key(self, raw_data): + return RSAKey.import_key(raw_data) + + def generate_preset(self, enc_alg, key): + cek = enc_alg.generate_cek() + return {'cek': cek} + + def wrap(self, enc_alg, headers, key, preset=None): + if preset and 'cek' in preset: + cek = preset['cek'] + else: + cek = enc_alg.generate_cek() + + op_key = key.get_op_key('wrapKey') + if op_key.key_size < self.key_size: + raise ValueError('A key of size 2048 bits or larger MUST be used') + ek = op_key.encrypt(cek, self.padding) + return {'ek': ek, 'cek': cek} + + def unwrap(self, enc_alg, ek, headers, key): + # it will raise ValueError if failed + op_key = key.get_op_key('unwrapKey') + cek = op_key.decrypt(ek, self.padding) + if len(cek) * 8 != enc_alg.CEK_SIZE: + raise ValueError('Invalid "cek" length') + return cek + + +class AESAlgorithm(JWEAlgorithm): + def __init__(self, key_size): + self.name = f'A{key_size}KW' + self.description = f'AES Key Wrap using {key_size}-bit key' + self.key_size = key_size + + def prepare_key(self, raw_data): + return OctKey.import_key(raw_data) + + def generate_preset(self, enc_alg, key): + cek = enc_alg.generate_cek() + return {'cek': cek} + + def _check_key(self, key): + if len(key) * 8 != self.key_size: + raise ValueError( + f'A key of size {self.key_size} bits is required.') + + def wrap_cek(self, cek, key): + op_key = key.get_op_key('wrapKey') + self._check_key(op_key) + ek = aes_key_wrap(op_key, cek, default_backend()) + return {'ek': ek, 'cek': cek} + + def wrap(self, enc_alg, headers, key, preset=None): + if preset and 'cek' in preset: + cek = preset['cek'] + else: + cek = enc_alg.generate_cek() + return self.wrap_cek(cek, key) + + def unwrap(self, enc_alg, ek, headers, key): + op_key = key.get_op_key('unwrapKey') + self._check_key(op_key) + cek = aes_key_unwrap(op_key, ek, default_backend()) + if len(cek) * 8 != enc_alg.CEK_SIZE: + raise ValueError('Invalid "cek" length') + return cek + + +class AESGCMAlgorithm(JWEAlgorithm): + EXTRA_HEADERS = frozenset(['iv', 'tag']) + + def __init__(self, key_size): + self.name = f'A{key_size}GCMKW' + self.description = f'Key wrapping with AES GCM using {key_size}-bit key' + self.key_size = key_size + + def prepare_key(self, raw_data): + return OctKey.import_key(raw_data) + + def generate_preset(self, enc_alg, key): + cek = enc_alg.generate_cek() + return {'cek': cek} + + def _check_key(self, key): + if len(key) * 8 != self.key_size: + raise ValueError( + f'A key of size {self.key_size} bits is required.') + + def wrap(self, enc_alg, headers, key, preset=None): + if preset and 'cek' in preset: + cek = preset['cek'] + else: + cek = enc_alg.generate_cek() + + op_key = key.get_op_key('wrapKey') + self._check_key(op_key) + + #: https://tools.ietf.org/html/rfc7518#section-4.7.1.1 + #: The "iv" (initialization vector) Header Parameter value is the + #: base64url-encoded representation of the 96-bit IV value + iv_size = 96 + iv = os.urandom(iv_size // 8) + + cipher = Cipher(AES(op_key), GCM(iv), backend=default_backend()) + enc = cipher.encryptor() + ek = enc.update(cek) + enc.finalize() + + h = { + 'iv': to_native(urlsafe_b64encode(iv)), + 'tag': to_native(urlsafe_b64encode(enc.tag)) + } + return {'ek': ek, 'cek': cek, 'header': h} + + def unwrap(self, enc_alg, ek, headers, key): + op_key = key.get_op_key('unwrapKey') + self._check_key(op_key) + + iv = headers.get('iv') + if not iv: + raise ValueError('Missing "iv" in headers') + + tag = headers.get('tag') + if not tag: + raise ValueError('Missing "tag" in headers') + + iv = urlsafe_b64decode(to_bytes(iv)) + tag = urlsafe_b64decode(to_bytes(tag)) + + cipher = Cipher(AES(op_key), GCM(iv, tag), backend=default_backend()) + d = cipher.decryptor() + cek = d.update(ek) + d.finalize() + if len(cek) * 8 != enc_alg.CEK_SIZE: + raise ValueError('Invalid "cek" length') + return cek + + +class ECDHESAlgorithm(JWEAlgorithm): + EXTRA_HEADERS = ['epk', 'apu', 'apv'] + ALLOWED_KEY_CLS = ECKey + + # https://tools.ietf.org/html/rfc7518#section-4.6 + def __init__(self, key_size=None): + if key_size is None: + self.name = 'ECDH-ES' + self.description = 'ECDH-ES in the Direct Key Agreement mode' + else: + self.name = f'ECDH-ES+A{key_size}KW' + self.description = ( + 'ECDH-ES using Concat KDF and CEK wrapped ' + 'with A{}KW').format(key_size) + self.key_size = key_size + self.aeskw = AESAlgorithm(key_size) + + def prepare_key(self, raw_data): + if isinstance(raw_data, self.ALLOWED_KEY_CLS): + return raw_data + return ECKey.import_key(raw_data) + + def generate_preset(self, enc_alg, key): + epk = self._generate_ephemeral_key(key) + h = self._prepare_headers(epk) + preset = {'epk': epk, 'header': h} + if self.key_size is not None: + cek = enc_alg.generate_cek() + preset['cek'] = cek + return preset + + def compute_fixed_info(self, headers, bit_size): + # AlgorithmID + if self.key_size is None: + alg_id = u32be_len_input(headers['enc']) + else: + alg_id = u32be_len_input(headers['alg']) + + # PartyUInfo + apu_info = u32be_len_input(headers.get('apu'), True) + + # PartyVInfo + apv_info = u32be_len_input(headers.get('apv'), True) + + # SuppPubInfo + pub_info = struct.pack('>I', bit_size) + + return alg_id + apu_info + apv_info + pub_info + + def compute_derived_key(self, shared_key, fixed_info, bit_size): + ckdf = ConcatKDFHash( + algorithm=hashes.SHA256(), + length=bit_size // 8, + otherinfo=fixed_info, + backend=default_backend() + ) + return ckdf.derive(shared_key) + + def deliver(self, key, pubkey, headers, bit_size): + shared_key = key.exchange_shared_key(pubkey) + fixed_info = self.compute_fixed_info(headers, bit_size) + return self.compute_derived_key(shared_key, fixed_info, bit_size) + + def _generate_ephemeral_key(self, key): + return key.generate_key(key['crv'], is_private=True) + + def _prepare_headers(self, epk): + # REQUIRED_JSON_FIELDS contains only public fields + pub_epk = {k: epk[k] for k in epk.REQUIRED_JSON_FIELDS} + pub_epk['kty'] = epk.kty + return {'epk': pub_epk} + + def wrap(self, enc_alg, headers, key, preset=None): + if self.key_size is None: + bit_size = enc_alg.CEK_SIZE + else: + bit_size = self.key_size + + if preset and 'epk' in preset: + epk = preset['epk'] + h = {} + else: + epk = self._generate_ephemeral_key(key) + h = self._prepare_headers(epk) + + public_key = key.get_op_key('wrapKey') + dk = self.deliver(epk, public_key, headers, bit_size) + + if self.key_size is None: + return {'ek': b'', 'cek': dk, 'header': h} + + if preset and 'cek' in preset: + preset_for_kw = {'cek': preset['cek']} + else: + preset_for_kw = None + + kek = self.aeskw.prepare_key(dk) + rv = self.aeskw.wrap(enc_alg, headers, kek, preset_for_kw) + rv['header'] = h + return rv + + def unwrap(self, enc_alg, ek, headers, key): + if 'epk' not in headers: + raise ValueError('Missing "epk" in headers') + + if self.key_size is None: + bit_size = enc_alg.CEK_SIZE + else: + bit_size = self.key_size + + epk = key.import_key(headers['epk']) + public_key = epk.get_op_key('wrapKey') + dk = self.deliver(key, public_key, headers, bit_size) + + if self.key_size is None: + return dk + + kek = self.aeskw.prepare_key(dk) + return self.aeskw.unwrap(enc_alg, ek, headers, kek) + + +def u32be_len_input(s, base64=False): + if not s: + return b'\x00\x00\x00\x00' + if base64: + s = urlsafe_b64decode(to_bytes(s)) + else: + s = to_bytes(s) + return struct.pack('>I', len(s)) + s + + +JWE_ALG_ALGORITHMS = [ + DirectAlgorithm(), # dir + RSAAlgorithm('RSA1_5', 'RSAES-PKCS1-v1_5', padding.PKCS1v15()), + RSAAlgorithm( + 'RSA-OAEP', 'RSAES OAEP using default parameters', + padding.OAEP(padding.MGF1(hashes.SHA1()), hashes.SHA1(), None)), + RSAAlgorithm( + 'RSA-OAEP-256', 'RSAES OAEP using SHA-256 and MGF1 with SHA-256', + padding.OAEP(padding.MGF1(hashes.SHA256()), hashes.SHA256(), None)), + + AESAlgorithm(128), # A128KW + AESAlgorithm(192), # A192KW + AESAlgorithm(256), # A256KW + AESGCMAlgorithm(128), # A128GCMKW + AESGCMAlgorithm(192), # A192GCMKW + AESGCMAlgorithm(256), # A256GCMKW + ECDHESAlgorithm(None), # ECDH-ES + ECDHESAlgorithm(128), # ECDH-ES+A128KW + ECDHESAlgorithm(192), # ECDH-ES+A192KW + ECDHESAlgorithm(256), # ECDH-ES+A256KW +] + +# 'PBES2-HS256+A128KW': '', +# 'PBES2-HS384+A192KW': '', +# 'PBES2-HS512+A256KW': '', diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jwe_encs.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jwe_encs.py new file mode 100644 index 0000000..f951d10 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jwe_encs.py @@ -0,0 +1,144 @@ +""" + authlib.jose.rfc7518 + ~~~~~~~~~~~~~~~~~~~~ + + Cryptographic Algorithms for Cryptographic Algorithms for Content + Encryption per `Section 5`_. + + .. _`Section 5`: https://tools.ietf.org/html/rfc7518#section-5 +""" +import hmac +import hashlib +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import GCM, CBC +from cryptography.hazmat.primitives.padding import PKCS7 +from cryptography.exceptions import InvalidTag +from ..rfc7516 import JWEEncAlgorithm +from .util import encode_int + + +class CBCHS2EncAlgorithm(JWEEncAlgorithm): + # The IV used is a 128-bit value generated randomly or + # pseudo-randomly for use in the cipher. + IV_SIZE = 128 + + def __init__(self, key_size, hash_type): + self.name = f'A{key_size}CBC-HS{hash_type}' + tpl = 'AES_{}_CBC_HMAC_SHA_{} authenticated encryption algorithm' + self.description = tpl.format(key_size, hash_type) + + # bit length + self.key_size = key_size + # byte length + self.key_len = key_size // 8 + + self.CEK_SIZE = key_size * 2 + self.hash_alg = getattr(hashlib, f'sha{hash_type}') + + def _hmac(self, ciphertext, aad, iv, key): + al = encode_int(len(aad) * 8, 64) + msg = aad + iv + ciphertext + al + d = hmac.new(key, msg, self.hash_alg).digest() + return d[:self.key_len] + + def encrypt(self, msg, aad, iv, key): + """Key Encryption with AES_CBC_HMAC_SHA2. + + :param msg: text to be encrypt in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param key: encrypted key in bytes + :return: (ciphertext, iv, tag) + """ + self.check_iv(iv) + hkey = key[:self.key_len] + ekey = key[self.key_len:] + + pad = PKCS7(AES.block_size).padder() + padded_data = pad.update(msg) + pad.finalize() + + cipher = Cipher(AES(ekey), CBC(iv), backend=default_backend()) + enc = cipher.encryptor() + ciphertext = enc.update(padded_data) + enc.finalize() + tag = self._hmac(ciphertext, aad, iv, hkey) + return ciphertext, tag + + def decrypt(self, ciphertext, aad, iv, tag, key): + """Key Decryption with AES AES_CBC_HMAC_SHA2. + + :param ciphertext: ciphertext in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param tag: authentication tag in bytes + :param key: encrypted key in bytes + :return: message + """ + self.check_iv(iv) + hkey = key[:self.key_len] + dkey = key[self.key_len:] + + _tag = self._hmac(ciphertext, aad, iv, hkey) + if not hmac.compare_digest(_tag, tag): + raise InvalidTag() + + cipher = Cipher(AES(dkey), CBC(iv), backend=default_backend()) + d = cipher.decryptor() + data = d.update(ciphertext) + d.finalize() + unpad = PKCS7(AES.block_size).unpadder() + return unpad.update(data) + unpad.finalize() + + +class GCMEncAlgorithm(JWEEncAlgorithm): + # Use of an IV of size 96 bits is REQUIRED with this algorithm. + # https://tools.ietf.org/html/rfc7518#section-5.3 + IV_SIZE = 96 + + def __init__(self, key_size): + self.name = f'A{key_size}GCM' + self.description = f'AES GCM using {key_size}-bit key' + self.key_size = key_size + self.CEK_SIZE = key_size + + def encrypt(self, msg, aad, iv, key): + """Key Encryption with AES GCM + + :param msg: text to be encrypt in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param key: encrypted key in bytes + :return: (ciphertext, iv, tag) + """ + self.check_iv(iv) + cipher = Cipher(AES(key), GCM(iv), backend=default_backend()) + enc = cipher.encryptor() + enc.authenticate_additional_data(aad) + ciphertext = enc.update(msg) + enc.finalize() + return ciphertext, enc.tag + + def decrypt(self, ciphertext, aad, iv, tag, key): + """Key Decryption with AES GCM + + :param ciphertext: ciphertext in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param tag: authentication tag in bytes + :param key: encrypted key in bytes + :return: message + """ + self.check_iv(iv) + cipher = Cipher(AES(key), GCM(iv, tag), backend=default_backend()) + d = cipher.decryptor() + d.authenticate_additional_data(aad) + return d.update(ciphertext) + d.finalize() + + +JWE_ENC_ALGORITHMS = [ + CBCHS2EncAlgorithm(128, 256), # A128CBC-HS256 + CBCHS2EncAlgorithm(192, 384), # A192CBC-HS384 + CBCHS2EncAlgorithm(256, 512), # A256CBC-HS512 + GCMEncAlgorithm(128), # A128GCM + GCMEncAlgorithm(192), # A192GCM + GCMEncAlgorithm(256), # A256GCM +] diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jwe_zips.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jwe_zips.py new file mode 100644 index 0000000..2396861 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jwe_zips.py @@ -0,0 +1,21 @@ +import zlib +from ..rfc7516 import JWEZipAlgorithm, JsonWebEncryption + + +class DeflateZipAlgorithm(JWEZipAlgorithm): + name = 'DEF' + description = 'DEFLATE' + + def compress(self, s): + """Compress bytes data with DEFLATE algorithm.""" + data = zlib.compress(s) + # drop gzip headers and tail + return data[2:-4] + + def decompress(self, s): + """Decompress DEFLATE bytes data.""" + return zlib.decompress(s, -zlib.MAX_WBITS) + + +def register_jwe_rfc7518(): + JsonWebEncryption.register_algorithm(DeflateZipAlgorithm()) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jws_algs.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jws_algs.py new file mode 100644 index 0000000..2c02840 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/jws_algs.py @@ -0,0 +1,215 @@ +""" + authlib.jose.rfc7518 + ~~~~~~~~~~~~~~~~~~~~ + + "alg" (Algorithm) Header Parameter Values for JWS per `Section 3`_. + + .. _`Section 3`: https://tools.ietf.org/html/rfc7518#section-3 +""" + +import hmac +import hashlib +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric.utils import ( + decode_dss_signature, encode_dss_signature +) +from cryptography.hazmat.primitives.asymmetric.ec import ECDSA +from cryptography.hazmat.primitives.asymmetric import padding +from cryptography.exceptions import InvalidSignature +from ..rfc7515 import JWSAlgorithm +from .oct_key import OctKey +from .rsa_key import RSAKey +from .ec_key import ECKey +from .util import encode_int, decode_int + + +class NoneAlgorithm(JWSAlgorithm): + name = 'none' + description = 'No digital signature or MAC performed' + + def prepare_key(self, raw_data): + return None + + def sign(self, msg, key): + return b'' + + def verify(self, msg, sig, key): + return False + + +class HMACAlgorithm(JWSAlgorithm): + """HMAC using SHA algorithms for JWS. Available algorithms: + + - HS256: HMAC using SHA-256 + - HS384: HMAC using SHA-384 + - HS512: HMAC using SHA-512 + """ + SHA256 = hashlib.sha256 + SHA384 = hashlib.sha384 + SHA512 = hashlib.sha512 + + def __init__(self, sha_type): + self.name = f'HS{sha_type}' + self.description = f'HMAC using SHA-{sha_type}' + self.hash_alg = getattr(self, f'SHA{sha_type}') + + def prepare_key(self, raw_data): + return OctKey.import_key(raw_data) + + def sign(self, msg, key): + # it is faster than the one in cryptography + op_key = key.get_op_key('sign') + return hmac.new(op_key, msg, self.hash_alg).digest() + + def verify(self, msg, sig, key): + op_key = key.get_op_key('verify') + v_sig = hmac.new(op_key, msg, self.hash_alg).digest() + return hmac.compare_digest(sig, v_sig) + + +class RSAAlgorithm(JWSAlgorithm): + """RSA using SHA algorithms for JWS. Available algorithms: + + - RS256: RSASSA-PKCS1-v1_5 using SHA-256 + - RS384: RSASSA-PKCS1-v1_5 using SHA-384 + - RS512: RSASSA-PKCS1-v1_5 using SHA-512 + """ + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, sha_type): + self.name = f'RS{sha_type}' + self.description = f'RSASSA-PKCS1-v1_5 using SHA-{sha_type}' + self.hash_alg = getattr(self, f'SHA{sha_type}') + self.padding = padding.PKCS1v15() + + def prepare_key(self, raw_data): + return RSAKey.import_key(raw_data) + + def sign(self, msg, key): + op_key = key.get_op_key('sign') + return op_key.sign(msg, self.padding, self.hash_alg()) + + def verify(self, msg, sig, key): + op_key = key.get_op_key('verify') + try: + op_key.verify(sig, msg, self.padding, self.hash_alg()) + return True + except InvalidSignature: + return False + + +class ECAlgorithm(JWSAlgorithm): + """ECDSA using SHA algorithms for JWS. Available algorithms: + + - ES256: ECDSA using P-256 and SHA-256 + - ES384: ECDSA using P-384 and SHA-384 + - ES512: ECDSA using P-521 and SHA-512 + """ + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, name, curve, sha_type): + self.name = name + self.curve = curve + self.description = f'ECDSA using {self.curve} and SHA-{sha_type}' + self.hash_alg = getattr(self, f'SHA{sha_type}') + + def prepare_key(self, raw_data): + key = ECKey.import_key(raw_data) + if key['crv'] != self.curve: + raise ValueError(f'Key for "{self.name}" not supported, only "{self.curve}" allowed') + return key + + def sign(self, msg, key): + op_key = key.get_op_key('sign') + der_sig = op_key.sign(msg, ECDSA(self.hash_alg())) + r, s = decode_dss_signature(der_sig) + size = key.curve_key_size + return encode_int(r, size) + encode_int(s, size) + + def verify(self, msg, sig, key): + key_size = key.curve_key_size + length = (key_size + 7) // 8 + + if len(sig) != 2 * length: + return False + + r = decode_int(sig[:length]) + s = decode_int(sig[length:]) + der_sig = encode_dss_signature(r, s) + + try: + op_key = key.get_op_key('verify') + op_key.verify(der_sig, msg, ECDSA(self.hash_alg())) + return True + except InvalidSignature: + return False + + +class RSAPSSAlgorithm(JWSAlgorithm): + """RSASSA-PSS using SHA algorithms for JWS. Available algorithms: + + - PS256: RSASSA-PSS using SHA-256 and MGF1 with SHA-256 + - PS384: RSASSA-PSS using SHA-384 and MGF1 with SHA-384 + - PS512: RSASSA-PSS using SHA-512 and MGF1 with SHA-512 + """ + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, sha_type): + self.name = f'PS{sha_type}' + tpl = 'RSASSA-PSS using SHA-{} and MGF1 with SHA-{}' + self.description = tpl.format(sha_type, sha_type) + self.hash_alg = getattr(self, f'SHA{sha_type}') + + def prepare_key(self, raw_data): + return RSAKey.import_key(raw_data) + + def sign(self, msg, key): + op_key = key.get_op_key('sign') + return op_key.sign( + msg, + padding.PSS( + mgf=padding.MGF1(self.hash_alg()), + salt_length=self.hash_alg.digest_size + ), + self.hash_alg() + ) + + def verify(self, msg, sig, key): + op_key = key.get_op_key('verify') + try: + op_key.verify( + sig, + msg, + padding.PSS( + mgf=padding.MGF1(self.hash_alg()), + salt_length=self.hash_alg.digest_size + ), + self.hash_alg() + ) + return True + except InvalidSignature: + return False + + +JWS_ALGORITHMS = [ + NoneAlgorithm(), # none + HMACAlgorithm(256), # HS256 + HMACAlgorithm(384), # HS384 + HMACAlgorithm(512), # HS512 + RSAAlgorithm(256), # RS256 + RSAAlgorithm(384), # RS384 + RSAAlgorithm(512), # RS512 + ECAlgorithm('ES256', 'P-256', 256), + ECAlgorithm('ES384', 'P-384', 384), + ECAlgorithm('ES512', 'P-521', 512), + ECAlgorithm('ES256K', 'secp256k1', 256), # defined in RFC8812 + RSAPSSAlgorithm(256), # PS256 + RSAPSSAlgorithm(384), # PS384 + RSAPSSAlgorithm(512), # PS512 +] diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/oct_key.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/oct_key.py new file mode 100644 index 0000000..44e1f72 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/oct_key.py @@ -0,0 +1,95 @@ +from authlib.common.encoding import ( + to_bytes, to_unicode, + urlsafe_b64encode, urlsafe_b64decode, +) +from authlib.common.security import generate_token +from ..rfc7517 import Key + + +POSSIBLE_UNSAFE_KEYS = ( + b"-----BEGIN ", + b"---- BEGIN ", + b"ssh-rsa ", + b"ssh-dss ", + b"ssh-ed25519 ", + b"ecdsa-sha2-", +) + + +class OctKey(Key): + """Key class of the ``oct`` key type.""" + + kty = 'oct' + REQUIRED_JSON_FIELDS = ['k'] + + def __init__(self, raw_key=None, options=None): + super().__init__(options) + self.raw_key = raw_key + + @property + def public_only(self): + return False + + def get_op_key(self, operation): + """Get the raw key for the given key_op. This method will also + check if the given key_op is supported by this key. + + :param operation: key operation value, such as "sign", "encrypt". + :return: raw key + """ + self.check_key_op(operation) + if not self.raw_key: + self.load_raw_key() + return self.raw_key + + def load_raw_key(self): + self.raw_key = urlsafe_b64decode(to_bytes(self.tokens['k'])) + + def load_dict_key(self): + k = to_unicode(urlsafe_b64encode(self.raw_key)) + self._dict_data = {'kty': self.kty, 'k': k} + + def as_dict(self, is_private=False, **params): + tokens = self.tokens + if 'kid' not in tokens: + tokens['kid'] = self.thumbprint() + + tokens.update(params) + return tokens + + @classmethod + def validate_raw_key(cls, key): + return isinstance(key, bytes) + + @classmethod + def import_key(cls, raw, options=None): + """Import a key from bytes, string, or dict data.""" + if isinstance(raw, cls): + if options is not None: + raw.options.update(options) + return raw + + if isinstance(raw, dict): + cls.check_required_fields(raw) + key = cls(options=options) + key._dict_data = raw + else: + raw_key = to_bytes(raw) + + # security check + if raw_key.startswith(POSSIBLE_UNSAFE_KEYS): + raise ValueError("This key may not be safe to import") + + key = cls(raw_key=raw_key, options=options) + return key + + @classmethod + def generate_key(cls, key_size=256, options=None, is_private=True): + """Generate a ``OctKey`` with the given bit size.""" + if not is_private: + raise ValueError('oct key can not be generated as public') + + if key_size % 8 != 0: + raise ValueError('Invalid bit size for oct key') + + return cls.import_key(generate_token(key_size // 8), options) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/rsa_key.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/rsa_key.py new file mode 100644 index 0000000..53bd995 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/rsa_key.py @@ -0,0 +1,123 @@ +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPublicKey, RSAPrivateKeyWithSerialization, + RSAPrivateNumbers, RSAPublicNumbers, + rsa_recover_prime_factors, rsa_crt_dmp1, rsa_crt_dmq1, rsa_crt_iqmp +) +from cryptography.hazmat.backends import default_backend +from authlib.common.encoding import base64_to_int, int_to_base64 +from ..rfc7517 import AsymmetricKey + + +class RSAKey(AsymmetricKey): + """Key class of the ``RSA`` key type.""" + + kty = 'RSA' + PUBLIC_KEY_CLS = RSAPublicKey + PRIVATE_KEY_CLS = RSAPrivateKeyWithSerialization + + PUBLIC_KEY_FIELDS = ['e', 'n'] + PRIVATE_KEY_FIELDS = ['d', 'dp', 'dq', 'e', 'n', 'p', 'q', 'qi'] + REQUIRED_JSON_FIELDS = ['e', 'n'] + SSH_PUBLIC_PREFIX = b'ssh-rsa' + + def dumps_private_key(self): + numbers = self.private_key.private_numbers() + return { + 'n': int_to_base64(numbers.public_numbers.n), + 'e': int_to_base64(numbers.public_numbers.e), + 'd': int_to_base64(numbers.d), + 'p': int_to_base64(numbers.p), + 'q': int_to_base64(numbers.q), + 'dp': int_to_base64(numbers.dmp1), + 'dq': int_to_base64(numbers.dmq1), + 'qi': int_to_base64(numbers.iqmp) + } + + def dumps_public_key(self): + numbers = self.public_key.public_numbers() + return { + 'n': int_to_base64(numbers.n), + 'e': int_to_base64(numbers.e) + } + + def load_private_key(self): + obj = self._dict_data + + if 'oth' in obj: # pragma: no cover + # https://tools.ietf.org/html/rfc7518#section-6.3.2.7 + raise ValueError('"oth" is not supported yet') + + public_numbers = RSAPublicNumbers( + base64_to_int(obj['e']), base64_to_int(obj['n'])) + + if has_all_prime_factors(obj): + numbers = RSAPrivateNumbers( + d=base64_to_int(obj['d']), + p=base64_to_int(obj['p']), + q=base64_to_int(obj['q']), + dmp1=base64_to_int(obj['dp']), + dmq1=base64_to_int(obj['dq']), + iqmp=base64_to_int(obj['qi']), + public_numbers=public_numbers) + else: + d = base64_to_int(obj['d']) + p, q = rsa_recover_prime_factors( + public_numbers.n, d, public_numbers.e) + numbers = RSAPrivateNumbers( + d=d, + p=p, + q=q, + dmp1=rsa_crt_dmp1(d, p), + dmq1=rsa_crt_dmq1(d, q), + iqmp=rsa_crt_iqmp(p, q), + public_numbers=public_numbers) + + return numbers.private_key(default_backend()) + + def load_public_key(self): + numbers = RSAPublicNumbers( + base64_to_int(self._dict_data['e']), + base64_to_int(self._dict_data['n']) + ) + return numbers.public_key(default_backend()) + + @classmethod + def generate_key(cls, key_size=2048, options=None, is_private=False) -> 'RSAKey': + if key_size < 512: + raise ValueError('key_size must not be less than 512') + if key_size % 8 != 0: + raise ValueError('Invalid key_size for RSAKey') + raw_key = rsa.generate_private_key( + public_exponent=65537, + key_size=key_size, + backend=default_backend(), + ) + if not is_private: + raw_key = raw_key.public_key() + return cls.import_key(raw_key, options=options) + + @classmethod + def import_dict_key(cls, raw, options=None): + cls.check_required_fields(raw) + key = cls(options=options) + key._dict_data = raw + if 'd' in raw and not has_all_prime_factors(raw): + # reload dict key + key.load_raw_key() + key.load_dict_key() + return key + + +def has_all_prime_factors(obj): + props = ['p', 'q', 'dp', 'dq', 'qi'] + props_found = [prop in obj for prop in props] + if all(props_found): + return True + + if any(props_found): + raise ValueError( + 'RSA key must include all parameters ' + 'if any are present besides d') + + return False diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/util.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/util.py new file mode 100644 index 0000000..d2d13ec --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7518/util.py @@ -0,0 +1,12 @@ +import binascii + + +def encode_int(num, bits): + length = ((bits + 7) // 8) * 2 + padded_hex = '%0*x' % (length, num) + big_endian = binascii.a2b_hex(padded_hex.encode('ascii')) + return big_endian + + +def decode_int(b): + return int(binascii.b2a_hex(b), 16) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__init__.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__init__.py new file mode 100644 index 0000000..5eea5b7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__init__.py @@ -0,0 +1,15 @@ +""" + authlib.jose.rfc7519 + ~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + JSON Web Token (JWT). + + https://tools.ietf.org/html/rfc7519 +""" + +from .jwt import JsonWebToken +from .claims import BaseClaims, JWTClaims + + +__all__ = ['JsonWebToken', 'BaseClaims', 'JWTClaims'] diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..8f94e0f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__pycache__/claims.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__pycache__/claims.cpython-312.pyc new file mode 100644 index 0000000..3f0c2fd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__pycache__/claims.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__pycache__/jwt.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__pycache__/jwt.cpython-312.pyc new file mode 100644 index 0000000..2176311 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/__pycache__/jwt.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/claims.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/claims.py new file mode 100644 index 0000000..6a9877b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/claims.py @@ -0,0 +1,227 @@ +import time +from authlib.jose.errors import ( + MissingClaimError, + InvalidClaimError, + ExpiredTokenError, + InvalidTokenError, +) + + +class BaseClaims(dict): + """Payload claims for JWT, which contains a validate interface. + + :param payload: the payload dict of JWT + :param header: the header dict of JWT + :param options: validate options + :param params: other params + + An example on ``options`` parameter, the format is inspired by + `OpenID Connect Claims`_:: + + { + "iss": { + "essential": True, + "values": ["https://example.com", "https://example.org"] + }, + "sub": { + "essential": True + "value": "248289761001" + }, + "jti": { + "validate": validate_jti + } + } + + .. _`OpenID Connect Claims`: + http://openid.net/specs/openid-connect-core-1_0.html#IndividualClaimsRequests + """ + REGISTERED_CLAIMS = [] + + def __init__(self, payload, header, options=None, params=None): + super().__init__(payload) + self.header = header + self.options = options or {} + self.params = params or {} + + def __getattr__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError as error: + if key in self.REGISTERED_CLAIMS: + return self.get(key) + raise error + + def _validate_essential_claims(self): + for k in self.options: + if self.options[k].get('essential'): + if k not in self: + raise MissingClaimError(k) + elif not self.get(k): + raise InvalidClaimError(k) + + def _validate_claim_value(self, claim_name): + option = self.options.get(claim_name) + if not option: + return + + value = self.get(claim_name) + option_value = option.get('value') + if option_value and value != option_value: + raise InvalidClaimError(claim_name) + + option_values = option.get('values') + if option_values and value not in option_values: + raise InvalidClaimError(claim_name) + + validate = option.get('validate') + if validate and not validate(self, value): + raise InvalidClaimError(claim_name) + + def get_registered_claims(self): + rv = {} + for k in self.REGISTERED_CLAIMS: + if k in self: + rv[k] = self[k] + return rv + + +class JWTClaims(BaseClaims): + REGISTERED_CLAIMS = ['iss', 'sub', 'aud', 'exp', 'nbf', 'iat', 'jti'] + + def validate(self, now=None, leeway=0): + """Validate everything in claims payload.""" + self._validate_essential_claims() + + if now is None: + now = int(time.time()) + + self.validate_iss() + self.validate_sub() + self.validate_aud() + self.validate_exp(now, leeway) + self.validate_nbf(now, leeway) + self.validate_iat(now, leeway) + self.validate_jti() + + # Validate custom claims + for key in self.options.keys(): + if key not in self.REGISTERED_CLAIMS: + self._validate_claim_value(key) + + def validate_iss(self): + """The "iss" (issuer) claim identifies the principal that issued the + JWT. The processing of this claim is generally application specific. + The "iss" value is a case-sensitive string containing a StringOrURI + value. Use of this claim is OPTIONAL. + """ + self._validate_claim_value('iss') + + def validate_sub(self): + """The "sub" (subject) claim identifies the principal that is the + subject of the JWT. The claims in a JWT are normally statements + about the subject. The subject value MUST either be scoped to be + locally unique in the context of the issuer or be globally unique. + The processing of this claim is generally application specific. The + "sub" value is a case-sensitive string containing a StringOrURI + value. Use of this claim is OPTIONAL. + """ + self._validate_claim_value('sub') + + def validate_aud(self): + """The "aud" (audience) claim identifies the recipients that the JWT is + intended for. Each principal intended to process the JWT MUST + identify itself with a value in the audience claim. If the principal + processing the claim does not identify itself with a value in the + "aud" claim when this claim is present, then the JWT MUST be + rejected. In the general case, the "aud" value is an array of case- + sensitive strings, each containing a StringOrURI value. In the + special case when the JWT has one audience, the "aud" value MAY be a + single case-sensitive string containing a StringOrURI value. The + interpretation of audience values is generally application specific. + Use of this claim is OPTIONAL. + """ + aud_option = self.options.get('aud') + aud = self.get('aud') + if not aud_option or not aud: + return + + aud_values = aud_option.get('values') + if not aud_values: + aud_value = aud_option.get('value') + if aud_value: + aud_values = [aud_value] + + if not aud_values: + return + + if isinstance(self['aud'], list): + aud_list = self['aud'] + else: + aud_list = [self['aud']] + + if not any([v in aud_list for v in aud_values]): + raise InvalidClaimError('aud') + + def validate_exp(self, now, leeway): + """The "exp" (expiration time) claim identifies the expiration time on + or after which the JWT MUST NOT be accepted for processing. The + processing of the "exp" claim requires that the current date/time + MUST be before the expiration date/time listed in the "exp" claim. + Implementers MAY provide for some small leeway, usually no more than + a few minutes, to account for clock skew. Its value MUST be a number + containing a NumericDate value. Use of this claim is OPTIONAL. + """ + if 'exp' in self: + exp = self['exp'] + if not _validate_numeric_time(exp): + raise InvalidClaimError('exp') + if exp < (now - leeway): + raise ExpiredTokenError() + + def validate_nbf(self, now, leeway): + """The "nbf" (not before) claim identifies the time before which the JWT + MUST NOT be accepted for processing. The processing of the "nbf" + claim requires that the current date/time MUST be after or equal to + the not-before date/time listed in the "nbf" claim. Implementers MAY + provide for some small leeway, usually no more than a few minutes, to + account for clock skew. Its value MUST be a number containing a + NumericDate value. Use of this claim is OPTIONAL. + """ + if 'nbf' in self: + nbf = self['nbf'] + if not _validate_numeric_time(nbf): + raise InvalidClaimError('nbf') + if nbf > (now + leeway): + raise InvalidTokenError() + + def validate_iat(self, now, leeway): + """The "iat" (issued at) claim identifies the time at which the JWT was + issued. This claim can be used to determine the age of the JWT. + Implementers MAY provide for some small leeway, usually no more + than a few minutes, to account for clock skew. Its value MUST be a + number containing a NumericDate value. Use of this claim is OPTIONAL. + """ + if 'iat' in self: + iat = self['iat'] + if not _validate_numeric_time(iat): + raise InvalidClaimError('iat') + if iat > (now + leeway): + raise InvalidTokenError( + description='The token is not valid as it was issued in the future' + ) + + def validate_jti(self): + """The "jti" (JWT ID) claim provides a unique identifier for the JWT. + The identifier value MUST be assigned in a manner that ensures that + there is a negligible probability that the same value will be + accidentally assigned to a different data object; if the application + uses multiple issuers, collisions MUST be prevented among values + produced by different issuers as well. The "jti" claim can be used + to prevent the JWT from being replayed. The "jti" value is a case- + sensitive string. Use of this claim is OPTIONAL. + """ + self._validate_claim_value('jti') + + +def _validate_numeric_time(s): + return isinstance(s, (int, float)) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/jwt.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/jwt.py new file mode 100644 index 0000000..ba27998 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc7519/jwt.py @@ -0,0 +1,183 @@ +import re +import random +import datetime +import calendar +from authlib.common.encoding import ( + to_bytes, to_unicode, + json_loads, json_dumps, +) +from .claims import JWTClaims +from ..errors import DecodeError, InsecureClaimError +from ..rfc7515 import JsonWebSignature +from ..rfc7516 import JsonWebEncryption +from ..rfc7517 import KeySet, Key + + +class JsonWebToken: + SENSITIVE_NAMES = ('password', 'token', 'secret', 'secret_key') + # Thanks to sentry SensitiveDataFilter + SENSITIVE_VALUES = re.compile(r'|'.join([ + # http://www.richardsramblings.com/regex/credit-card-numbers/ + r'\b(?:3[47]\d|(?:4\d|5[1-5]|65)\d{2}|6011)\d{12}\b', + # various private keys + r'-----BEGIN[A-Z ]+PRIVATE KEY-----.+-----END[A-Z ]+PRIVATE KEY-----', + # social security numbers (US) + r'^\b(?!(000|666|9))\d{3}-(?!00)\d{2}-(?!0000)\d{4}\b', + ]), re.DOTALL) + + def __init__(self, algorithms, private_headers=None): + self._jws = JsonWebSignature(algorithms, private_headers=private_headers) + self._jwe = JsonWebEncryption(algorithms, private_headers=private_headers) + + def check_sensitive_data(self, payload): + """Check if payload contains sensitive information.""" + for k in payload: + # check claims key name + if k in self.SENSITIVE_NAMES: + raise InsecureClaimError(k) + + # check claims values + v = payload[k] + if isinstance(v, str) and self.SENSITIVE_VALUES.search(v): + raise InsecureClaimError(k) + + def encode(self, header, payload, key, check=True): + """Encode a JWT with the given header, payload and key. + + :param header: A dict of JWS header + :param payload: A dict to be encoded + :param key: key used to sign the signature + :param check: check if sensitive data in payload + :return: bytes + """ + header.setdefault('typ', 'JWT') + + for k in ['exp', 'iat', 'nbf']: + # convert datetime into timestamp + claim = payload.get(k) + if isinstance(claim, datetime.datetime): + payload[k] = calendar.timegm(claim.utctimetuple()) + + if check: + self.check_sensitive_data(payload) + + key = find_encode_key(key, header) + text = to_bytes(json_dumps(payload)) + if 'enc' in header: + return self._jwe.serialize_compact(header, text, key) + else: + return self._jws.serialize_compact(header, text, key) + + def decode(self, s, key, claims_cls=None, + claims_options=None, claims_params=None): + """Decode the JWT with the given key. This is similar with + :meth:`verify`, except that it will raise BadSignatureError when + signature doesn't match. + + :param s: text of JWT + :param key: key used to verify the signature + :param claims_cls: class to be used for JWT claims + :param claims_options: `options` parameters for claims_cls + :param claims_params: `params` parameters for claims_cls + :return: claims_cls instance + :raise: BadSignatureError + """ + if claims_cls is None: + claims_cls = JWTClaims + + if callable(key): + load_key = key + else: + load_key = create_load_key(prepare_raw_key(key)) + + s = to_bytes(s) + dot_count = s.count(b'.') + if dot_count == 2: + data = self._jws.deserialize_compact(s, load_key, decode_payload) + elif dot_count == 4: + data = self._jwe.deserialize_compact(s, load_key, decode_payload) + else: + raise DecodeError('Invalid input segments length') + return claims_cls( + data['payload'], data['header'], + options=claims_options, + params=claims_params, + ) + + +def decode_payload(bytes_payload): + try: + payload = json_loads(to_unicode(bytes_payload)) + except ValueError: + raise DecodeError('Invalid payload value') + if not isinstance(payload, dict): + raise DecodeError('Invalid payload type') + return payload + + +def prepare_raw_key(raw): + if isinstance(raw, KeySet): + return raw + + if isinstance(raw, str) and \ + raw.startswith('{') and raw.endswith('}'): + raw = json_loads(raw) + elif isinstance(raw, (tuple, list)): + raw = {'keys': raw} + return raw + + +def find_encode_key(key, header): + if isinstance(key, KeySet): + kid = header.get('kid') + if kid: + return key.find_by_kid(kid) + + rv = random.choice(key.keys) + # use side effect to add kid value into header + header['kid'] = rv.kid + return rv + + if isinstance(key, dict) and 'keys' in key: + keys = key['keys'] + kid = header.get('kid') + for k in keys: + if k.get('kid') == kid: + return k + + if not kid: + rv = random.choice(keys) + header['kid'] = rv['kid'] + return rv + raise ValueError('Invalid JSON Web Key Set') + + # append kid into header + if isinstance(key, dict) and 'kid' in key: + header['kid'] = key['kid'] + elif isinstance(key, Key) and key.kid: + header['kid'] = key.kid + return key + + +def create_load_key(key): + def load_key(header, payload): + if isinstance(key, KeySet): + return key.find_by_kid(header.get('kid')) + + if isinstance(key, dict) and 'keys' in key: + keys = key['keys'] + kid = header.get('kid') + + if kid is not None: + # look for the requested key + for k in keys: + if k.get('kid') == kid: + return k + else: + # use the only key + if len(keys) == 1: + return keys[0] + raise ValueError('Invalid JSON Web Key Set') + return key + + return load_key diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__init__.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__init__.py new file mode 100644 index 0000000..fd0f3fe --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__init__.py @@ -0,0 +1,5 @@ +from .okp_key import OKPKey +from .jws_eddsa import register_jws_rfc8037 + + +__all__ = ['register_jws_rfc8037', 'OKPKey'] diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..91c6706 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__pycache__/jws_eddsa.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__pycache__/jws_eddsa.cpython-312.pyc new file mode 100644 index 0000000..149dc70 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__pycache__/jws_eddsa.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__pycache__/okp_key.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__pycache__/okp_key.cpython-312.pyc new file mode 100644 index 0000000..6ad05bb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/__pycache__/okp_key.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/jws_eddsa.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/jws_eddsa.py new file mode 100644 index 0000000..872da8e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/jws_eddsa.py @@ -0,0 +1,27 @@ +from cryptography.exceptions import InvalidSignature +from ..rfc7515 import JWSAlgorithm +from .okp_key import OKPKey + + +class EdDSAAlgorithm(JWSAlgorithm): + name = 'EdDSA' + description = 'Edwards-curve Digital Signature Algorithm for JWS' + + def prepare_key(self, raw_data): + return OKPKey.import_key(raw_data) + + def sign(self, msg, key): + op_key = key.get_op_key('sign') + return op_key.sign(msg) + + def verify(self, msg, sig, key): + op_key = key.get_op_key('verify') + try: + op_key.verify(sig, msg) + return True + except InvalidSignature: + return False + + +def register_jws_rfc8037(cls): + cls.register_algorithm(EdDSAAlgorithm()) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/okp_key.py b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/okp_key.py new file mode 100644 index 0000000..40f7468 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/rfc8037/okp_key.py @@ -0,0 +1,103 @@ +from cryptography.hazmat.primitives.asymmetric.ed25519 import ( + Ed25519PublicKey, Ed25519PrivateKey +) +from cryptography.hazmat.primitives.asymmetric.ed448 import ( + Ed448PublicKey, Ed448PrivateKey +) +from cryptography.hazmat.primitives.asymmetric.x25519 import ( + X25519PublicKey, X25519PrivateKey +) +from cryptography.hazmat.primitives.asymmetric.x448 import ( + X448PublicKey, X448PrivateKey +) +from cryptography.hazmat.primitives.serialization import ( + Encoding, PublicFormat, PrivateFormat, NoEncryption +) +from authlib.common.encoding import ( + to_unicode, to_bytes, + urlsafe_b64decode, urlsafe_b64encode, +) +from ..rfc7517 import AsymmetricKey + + +PUBLIC_KEYS_MAP = { + 'Ed25519': Ed25519PublicKey, + 'Ed448': Ed448PublicKey, + 'X25519': X25519PublicKey, + 'X448': X448PublicKey, +} +PRIVATE_KEYS_MAP = { + 'Ed25519': Ed25519PrivateKey, + 'Ed448': Ed448PrivateKey, + 'X25519': X25519PrivateKey, + 'X448': X448PrivateKey, +} + + +class OKPKey(AsymmetricKey): + """Key class of the ``OKP`` key type.""" + + kty = 'OKP' + REQUIRED_JSON_FIELDS = ['crv', 'x'] + PUBLIC_KEY_FIELDS = REQUIRED_JSON_FIELDS + PRIVATE_KEY_FIELDS = ['crv', 'd'] + PUBLIC_KEY_CLS = tuple(PUBLIC_KEYS_MAP.values()) + PRIVATE_KEY_CLS = tuple(PRIVATE_KEYS_MAP.values()) + SSH_PUBLIC_PREFIX = b'ssh-ed25519' + + def exchange_shared_key(self, pubkey): + # used in ECDHESAlgorithm + private_key = self.get_private_key() + if private_key and isinstance(private_key, (X25519PrivateKey, X448PrivateKey)): + return private_key.exchange(pubkey) + raise ValueError('Invalid key for exchanging shared key') + + @staticmethod + def get_key_curve(key): + if isinstance(key, (Ed25519PublicKey, Ed25519PrivateKey)): + return 'Ed25519' + elif isinstance(key, (Ed448PublicKey, Ed448PrivateKey)): + return 'Ed448' + elif isinstance(key, (X25519PublicKey, X25519PrivateKey)): + return 'X25519' + elif isinstance(key, (X448PublicKey, X448PrivateKey)): + return 'X448' + + def load_private_key(self): + crv_key = PRIVATE_KEYS_MAP[self._dict_data['crv']] + d_bytes = urlsafe_b64decode(to_bytes(self._dict_data['d'])) + return crv_key.from_private_bytes(d_bytes) + + def load_public_key(self): + crv_key = PUBLIC_KEYS_MAP[self._dict_data['crv']] + x_bytes = urlsafe_b64decode(to_bytes(self._dict_data['x'])) + return crv_key.from_public_bytes(x_bytes) + + def dumps_private_key(self): + obj = self.dumps_public_key(self.private_key.public_key()) + d_bytes = self.private_key.private_bytes( + Encoding.Raw, + PrivateFormat.Raw, + NoEncryption() + ) + obj['d'] = to_unicode(urlsafe_b64encode(d_bytes)) + return obj + + def dumps_public_key(self, public_key=None): + if public_key is None: + public_key = self.public_key + x_bytes = public_key.public_bytes(Encoding.Raw, PublicFormat.Raw) + return { + 'crv': self.get_key_curve(public_key), + 'x': to_unicode(urlsafe_b64encode(x_bytes)), + } + + @classmethod + def generate_key(cls, crv='Ed25519', options=None, is_private=False) -> 'OKPKey': + if crv not in PRIVATE_KEYS_MAP: + raise ValueError(f'Invalid crv value: "{crv}"') + private_key_cls = PRIVATE_KEYS_MAP[crv] + raw_key = private_key_cls.generate() + if not is_private: + raw_key = raw_key.public_key() + return cls.import_key(raw_key, options=options) diff --git a/myenv/lib/python3.12/site-packages/authlib/jose/util.py b/myenv/lib/python3.12/site-packages/authlib/jose/util.py new file mode 100644 index 0000000..5b0c759 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/jose/util.py @@ -0,0 +1,37 @@ +import binascii +from authlib.common.encoding import urlsafe_b64decode, json_loads, to_unicode +from authlib.jose.errors import DecodeError + + +def extract_header(header_segment, error_cls): + header_data = extract_segment(header_segment, error_cls, 'header') + + try: + header = json_loads(header_data.decode('utf-8')) + except ValueError as e: + raise error_cls(f'Invalid header string: {e}') + + if not isinstance(header, dict): + raise error_cls('Header must be a json object') + return header + + +def extract_segment(segment, error_cls, name='payload'): + try: + return urlsafe_b64decode(segment) + except (TypeError, binascii.Error): + msg = f'Invalid {name} padding' + raise error_cls(msg) + + +def ensure_dict(s, structure_name): + if not isinstance(s, dict): + try: + s = json_loads(to_unicode(s)) + except (ValueError, TypeError): + raise DecodeError(f'Invalid {structure_name}') + + if not isinstance(s, dict): + raise DecodeError(f'Invalid {structure_name}') + + return s diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/__init__.py new file mode 100644 index 0000000..c9a73dd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/__init__.py @@ -0,0 +1,34 @@ +from .rfc5849 import ( + OAuth1Request, + ClientAuth, + SIGNATURE_HMAC_SHA1, + SIGNATURE_RSA_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_QUERY, + SIGNATURE_TYPE_BODY, + ClientMixin, + TemporaryCredentialMixin, + TokenCredentialMixin, + TemporaryCredential, + AuthorizationServer, + ResourceProtector, +) + +__all__ = [ + 'OAuth1Request', + 'ClientAuth', + 'SIGNATURE_HMAC_SHA1', + 'SIGNATURE_RSA_SHA1', + 'SIGNATURE_PLAINTEXT', + 'SIGNATURE_TYPE_HEADER', + 'SIGNATURE_TYPE_QUERY', + 'SIGNATURE_TYPE_BODY', + + 'ClientMixin', + 'TemporaryCredentialMixin', + 'TokenCredentialMixin', + 'TemporaryCredential', + 'AuthorizationServer', + 'ResourceProtector', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d702521 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/__pycache__/client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/__pycache__/client.cpython-312.pyc new file mode 100644 index 0000000..1971ec3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/__pycache__/client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..f0d8e0a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/client.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/client.py new file mode 100644 index 0000000..1f74f32 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/client.py @@ -0,0 +1,172 @@ +from authlib.common.urls import ( + url_decode, + add_params_to_uri, + urlparse, +) +from authlib.common.encoding import json_loads +from .rfc5849 import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_TYPE_HEADER, + ClientAuth, +) + + +class OAuth1Client: + auth_class = ClientAuth + + def __init__(self, session, client_id, client_secret=None, + token=None, token_secret=None, + redirect_uri=None, rsa_key=None, verifier=None, + signature_method=SIGNATURE_HMAC_SHA1, + signature_type=SIGNATURE_TYPE_HEADER, + force_include_body=False, realm=None, **kwargs): + if not client_id: + raise ValueError('Missing "client_id"') + + self.session = session + self.auth = self.auth_class( + client_id, client_secret=client_secret, + token=token, token_secret=token_secret, + redirect_uri=redirect_uri, + signature_method=signature_method, + signature_type=signature_type, + rsa_key=rsa_key, + verifier=verifier, + realm=realm, + force_include_body=force_include_body + ) + self._kwargs = kwargs + + @property + def redirect_uri(self): + return self.auth.redirect_uri + + @redirect_uri.setter + def redirect_uri(self, uri): + self.auth.redirect_uri = uri + + @property + def token(self): + return dict( + oauth_token=self.auth.token, + oauth_token_secret=self.auth.token_secret, + oauth_verifier=self.auth.verifier + ) + + @token.setter + def token(self, token): + """This token setter is designed for an easy integration for + OAuthClient. Make sure both OAuth1Session and OAuth2Session + have token setters. + """ + if token is None: + self.auth.token = None + self.auth.token_secret = None + self.auth.verifier = None + elif 'oauth_token' in token: + self.auth.token = token['oauth_token'] + if 'oauth_token_secret' in token: + self.auth.token_secret = token['oauth_token_secret'] + if 'oauth_verifier' in token: + self.auth.verifier = token['oauth_verifier'] + else: + message = f'oauth_token is missing: {token!r}' + self.handle_error('missing_token', message) + + def create_authorization_url(self, url, request_token=None, **kwargs): + """Create an authorization URL by appending request_token and optional + kwargs to url. + + This is the second step in the OAuth 1 workflow. The user should be + redirected to this authorization URL, grant access to you, and then + be redirected back to you. The redirection back can either be specified + during client registration or by supplying a callback URI per request. + + :param url: The authorization endpoint URL. + :param request_token: The previously obtained request token. + :param kwargs: Optional parameters to append to the URL. + :returns: The authorization URL with new parameters embedded. + """ + kwargs['oauth_token'] = request_token or self.auth.token + if self.auth.redirect_uri: + kwargs['oauth_callback'] = self.auth.redirect_uri + return add_params_to_uri(url, kwargs.items()) + + def fetch_request_token(self, url, **kwargs): + """Method for fetching an access token from the token endpoint. + + This is the first step in the OAuth 1 workflow. A request token is + obtained by making a signed post request to url. The token is then + parsed from the application/x-www-form-urlencoded response and ready + to be used to construct an authorization url. + + :param url: Request Token endpoint. + :param kwargs: Extra parameters to include for fetching token. + :return: A Request Token dict. + """ + return self._fetch_token(url, **kwargs) + + def fetch_access_token(self, url, verifier=None, **kwargs): + """Method for fetching an access token from the token endpoint. + + This is the final step in the OAuth 1 workflow. An access token is + obtained using all previously obtained credentials, including the + verifier from the authorization step. + + :param url: Access Token endpoint. + :param verifier: A verifier string to prove authorization was granted. + :param kwargs: Extra parameters to include for fetching access token. + :return: A token dict. + """ + if verifier: + self.auth.verifier = verifier + if not self.auth.verifier: + self.handle_error('missing_verifier', 'Missing "verifier" value') + return self._fetch_token(url, **kwargs) + + def parse_authorization_response(self, url): + """Extract parameters from the post authorization redirect + response URL. + + :param url: The full URL that resulted from the user being redirected + back from the OAuth provider to you, the client. + :returns: A dict of parameters extracted from the URL. + """ + token = dict(url_decode(urlparse.urlparse(url).query)) + self.token = token + return token + + def _fetch_token(self, url, **kwargs): + resp = self.session.post(url, auth=self.auth, **kwargs) + token = self.parse_response_token(resp.status_code, resp.text) + self.token = token + self.auth.verifier = None + return token + + def parse_response_token(self, status_code, text): + if status_code >= 400: + message = ( + "Token request failed with code {}, " + "response was '{}'." + ).format(status_code, text) + self.handle_error('fetch_token_denied', message) + + try: + text = text.strip() + if text.startswith('{'): + token = json_loads(text) + else: + token = dict(url_decode(text)) + except (TypeError, ValueError) as e: + error = ( + "Unable to decode token from token response. " + "This is commonly caused by an unsuccessful request where" + " a non urlencoded error message is returned. " + "The decoding error was {}" + ).format(e) + raise ValueError(error) + return token + + @staticmethod + def handle_error(error_type, error_description): + raise ValueError(f'{error_type}: {error_description}') diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/errors.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/errors.py new file mode 100644 index 0000000..e7770da --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/errors.py @@ -0,0 +1,3 @@ +# flake8: noqa + +from .rfc5849.errors import * diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__init__.py new file mode 100644 index 0000000..1f029fb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__init__.py @@ -0,0 +1,45 @@ +""" + authlib.oauth1.rfc5849 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of The OAuth 1.0 Protocol. + + https://tools.ietf.org/html/rfc5849 +""" + +from .wrapper import OAuth1Request +from .client_auth import ClientAuth +from .signature import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_RSA_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_QUERY, + SIGNATURE_TYPE_BODY, +) +from .models import ( + ClientMixin, + TemporaryCredentialMixin, + TokenCredentialMixin, + TemporaryCredential, +) +from .authorization_server import AuthorizationServer +from .resource_protector import ResourceProtector + +__all__ = [ + 'OAuth1Request', + 'ClientAuth', + 'SIGNATURE_HMAC_SHA1', + 'SIGNATURE_RSA_SHA1', + 'SIGNATURE_PLAINTEXT', + 'SIGNATURE_TYPE_HEADER', + 'SIGNATURE_TYPE_QUERY', + 'SIGNATURE_TYPE_BODY', + + 'ClientMixin', + 'TemporaryCredentialMixin', + 'TokenCredentialMixin', + 'TemporaryCredential', + 'AuthorizationServer', + 'ResourceProtector', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6aeaf77 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/authorization_server.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/authorization_server.cpython-312.pyc new file mode 100644 index 0000000..b99f400 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/authorization_server.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/base_server.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/base_server.cpython-312.pyc new file mode 100644 index 0000000..3c4ae98 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/base_server.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/client_auth.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/client_auth.cpython-312.pyc new file mode 100644 index 0000000..e588365 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/client_auth.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..481e3c3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..8ae094c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/parameters.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/parameters.cpython-312.pyc new file mode 100644 index 0000000..9b28ef9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/parameters.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/resource_protector.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/resource_protector.cpython-312.pyc new file mode 100644 index 0000000..dd52303 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/resource_protector.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/rsa.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/rsa.cpython-312.pyc new file mode 100644 index 0000000..6766f6a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/rsa.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/signature.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/signature.cpython-312.pyc new file mode 100644 index 0000000..3dfce22 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/signature.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/util.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..1be0984 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/util.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/wrapper.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/wrapper.cpython-312.pyc new file mode 100644 index 0000000..0ba7709 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/__pycache__/wrapper.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/authorization_server.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/authorization_server.py new file mode 100644 index 0000000..54cf7ba --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/authorization_server.py @@ -0,0 +1,357 @@ +from authlib.common.urls import is_valid_url, add_params_to_uri +from .base_server import BaseServer +from .errors import ( + OAuth1Error, + InvalidRequestError, + MissingRequiredParameterError, + InvalidClientError, + InvalidTokenError, + AccessDeniedError, + MethodNotAllowedError, +) + + +class AuthorizationServer(BaseServer): + TOKEN_RESPONSE_HEADER = [ + ('Content-Type', 'application/x-www-form-urlencoded'), + ('Cache-Control', 'no-store'), + ('Pragma', 'no-cache'), + ] + + TEMPORARY_CREDENTIALS_METHOD = 'POST' + + def _get_client(self, request): + client = self.get_client_by_id(request.client_id) + request.client = client + return client + + def create_oauth1_request(self, request): + raise NotImplementedError() + + def handle_response(self, status_code, payload, headers): + raise NotImplementedError() + + def handle_error_response(self, error): + return self.handle_response( + error.status_code, + error.get_body(), + error.get_headers() + ) + + def validate_temporary_credentials_request(self, request): + """Validate HTTP request for temporary credentials.""" + + # The client obtains a set of temporary credentials from the server by + # making an authenticated (Section 3) HTTP "POST" request to the + # Temporary Credential Request endpoint (unless the server advertises + # another HTTP request method for the client to use). + if request.method.upper() != self.TEMPORARY_CREDENTIALS_METHOD: + raise MethodNotAllowedError() + + # REQUIRED parameter + if not request.client_id: + raise MissingRequiredParameterError('oauth_consumer_key') + + # REQUIRED parameter + oauth_callback = request.redirect_uri + if not request.redirect_uri: + raise MissingRequiredParameterError('oauth_callback') + + # An absolute URI or + # other means (the parameter value MUST be set to "oob" + if oauth_callback != 'oob' and not is_valid_url(oauth_callback): + raise InvalidRequestError('Invalid "oauth_callback" value') + + client = self._get_client(request) + if not client: + raise InvalidClientError() + + self.validate_timestamp_and_nonce(request) + self.validate_oauth_signature(request) + return request + + def create_temporary_credentials_response(self, request=None): + """Validate temporary credentials token request and create response + for temporary credentials token. Assume the endpoint of temporary + credentials request is ``https://photos.example.net/initiate``: + + .. code-block:: http + + POST /initiate HTTP/1.1 + Host: photos.example.net + Authorization: OAuth realm="Photos", + oauth_consumer_key="dpf43f3p2l4k3l03", + oauth_signature_method="HMAC-SHA1", + oauth_timestamp="137131200", + oauth_nonce="wIjqoS", + oauth_callback="http%3A%2F%2Fprinter.example.com%2Fready", + oauth_signature="74KNZJeDHnMBp0EMJ9ZHt%2FXKycU%3D" + + The server validates the request and replies with a set of temporary + credentials in the body of the HTTP response: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/x-www-form-urlencoded + + oauth_token=hh5s93j4hdidpola&oauth_token_secret=hdhd0244k9j7ao03& + oauth_callback_confirmed=true + + :param request: OAuth1Request instance. + :returns: (status_code, body, headers) + """ + try: + request = self.create_oauth1_request(request) + self.validate_temporary_credentials_request(request) + except OAuth1Error as error: + return self.handle_error_response(error) + + credential = self.create_temporary_credential(request) + payload = [ + ('oauth_token', credential.get_oauth_token()), + ('oauth_token_secret', credential.get_oauth_token_secret()), + ('oauth_callback_confirmed', True) + ] + return self.handle_response(200, payload, self.TOKEN_RESPONSE_HEADER) + + def validate_authorization_request(self, request): + """Validate the request for resource owner authorization.""" + if not request.token: + raise MissingRequiredParameterError('oauth_token') + + credential = self.get_temporary_credential(request) + if not credential: + raise InvalidTokenError() + + # assign credential for later use + request.credential = credential + return request + + def create_authorization_response(self, request, grant_user=None): + """Validate authorization request and create authorization response. + Assume the endpoint for authorization request is + ``https://photos.example.net/authorize``, the client redirects Jane's + user-agent to the server's Resource Owner Authorization endpoint to + obtain Jane's approval for accessing her private photos:: + + https://photos.example.net/authorize?oauth_token=hh5s93j4hdidpola + + The server requests Jane to sign in using her username and password + and if successful, asks her to approve granting 'printer.example.com' + access to her private photos. Jane approves the request and her + user-agent is redirected to the callback URI provided by the client + in the previous request (line breaks are for display purposes only):: + + http://printer.example.com/ready? + oauth_token=hh5s93j4hdidpola&oauth_verifier=hfdp7dh39dks9884 + + :param request: OAuth1Request instance. + :param grant_user: if granted, pass the grant user, otherwise None. + :returns: (status_code, body, headers) + """ + request = self.create_oauth1_request(request) + # authorize endpoint should try catch this error + self.validate_authorization_request(request) + + temporary_credentials = request.credential + redirect_uri = temporary_credentials.get_redirect_uri() + if not redirect_uri or redirect_uri == 'oob': + client_id = temporary_credentials.get_client_id() + client = self.get_client_by_id(client_id) + redirect_uri = client.get_default_redirect_uri() + + if grant_user is None: + error = AccessDeniedError() + location = add_params_to_uri(redirect_uri, error.get_body()) + return self.handle_response(302, '', [('Location', location)]) + + request.user = grant_user + verifier = self.create_authorization_verifier(request) + + params = [ + ('oauth_token', request.token), + ('oauth_verifier', verifier) + ] + location = add_params_to_uri(redirect_uri, params) + return self.handle_response(302, '', [('Location', location)]) + + def validate_token_request(self, request): + """Validate request for issuing token.""" + + if not request.client_id: + raise MissingRequiredParameterError('oauth_consumer_key') + + client = self._get_client(request) + if not client: + raise InvalidClientError() + + if not request.token: + raise MissingRequiredParameterError('oauth_token') + + token = self.get_temporary_credential(request) + if not token: + raise InvalidTokenError() + + verifier = request.oauth_params.get('oauth_verifier') + if not verifier: + raise MissingRequiredParameterError('oauth_verifier') + + if not token.check_verifier(verifier): + raise InvalidRequestError('Invalid "oauth_verifier"') + + request.credential = token + self.validate_timestamp_and_nonce(request) + self.validate_oauth_signature(request) + return request + + def create_token_response(self, request): + """Validate token request and create token response. Assuming the + endpoint of token request is ``https://photos.example.net/token``, + the callback request informs the client that Jane completed the + authorization process. The client then requests a set of token + credentials using its temporary credentials (over a secure Transport + Layer Security (TLS) channel): + + .. code-block:: http + + POST /token HTTP/1.1 + Host: photos.example.net + Authorization: OAuth realm="Photos", + oauth_consumer_key="dpf43f3p2l4k3l03", + oauth_token="hh5s93j4hdidpola", + oauth_signature_method="HMAC-SHA1", + oauth_timestamp="137131201", + oauth_nonce="walatlh", + oauth_verifier="hfdp7dh39dks9884", + oauth_signature="gKgrFCywp7rO0OXSjdot%2FIHF7IU%3D" + + The server validates the request and replies with a set of token + credentials in the body of the HTTP response: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/x-www-form-urlencoded + + oauth_token=nnch734d00sl2jdk&oauth_token_secret=pfkkdhi9sl3r4s00 + + :param request: OAuth1Request instance. + :returns: (status_code, body, headers) + """ + try: + request = self.create_oauth1_request(request) + except OAuth1Error as error: + return self.handle_error_response(error) + + try: + self.validate_token_request(request) + except OAuth1Error as error: + self.delete_temporary_credential(request) + return self.handle_error_response(error) + + credential = self.create_token_credential(request) + payload = [ + ('oauth_token', credential.get_oauth_token()), + ('oauth_token_secret', credential.get_oauth_token_secret()), + ] + self.delete_temporary_credential(request) + return self.handle_response(200, payload, self.TOKEN_RESPONSE_HEADER) + + def create_temporary_credential(self, request): + """Generate and save a temporary credential into database or cache. + A temporary credential is used for exchanging token credential. This + method should be re-implemented:: + + def create_temporary_credential(self, request): + oauth_token = generate_token(36) + oauth_token_secret = generate_token(48) + temporary_credential = TemporaryCredential( + oauth_token=oauth_token, + oauth_token_secret=oauth_token_secret, + client_id=request.client_id, + redirect_uri=request.redirect_uri, + ) + # if the credential has a save method + temporary_credential.save() + return temporary_credential + + :param request: OAuth1Request instance + :return: TemporaryCredential instance + """ + raise NotImplementedError() + + def get_temporary_credential(self, request): + """Get the temporary credential from database or cache. A temporary + credential should share the same methods as described in models of + ``TemporaryCredentialMixin``:: + + def get_temporary_credential(self, request): + key = 'a-key-prefix:{}'.format(request.token) + data = cache.get(key) + # TemporaryCredential shares methods from TemporaryCredentialMixin + return TemporaryCredential(data) + + :param request: OAuth1Request instance + :return: TemporaryCredential instance + """ + raise NotImplementedError() + + def delete_temporary_credential(self, request): + """Delete temporary credential from database or cache. For instance, + if temporary credential is saved in cache:: + + def delete_temporary_credential(self, request): + key = 'a-key-prefix:{}'.format(request.token) + cache.delete(key) + + :param request: OAuth1Request instance + """ + raise NotImplementedError() + + def create_authorization_verifier(self, request): + """Create and bind ``oauth_verifier`` to temporary credential. It + could be re-implemented in this way:: + + def create_authorization_verifier(self, request): + verifier = generate_token(36) + + temporary_credential = request.credential + user_id = request.user.id + + temporary_credential.user_id = user_id + temporary_credential.oauth_verifier = verifier + # if the credential has a save method + temporary_credential.save() + + # remember to return the verifier + return verifier + + :param request: OAuth1Request instance + :return: A string of ``oauth_verifier`` + """ + raise NotImplementedError() + + def create_token_credential(self, request): + """Create and save token credential into database. This method would + be re-implemented like this:: + + def create_token_credential(self, request): + oauth_token = generate_token(36) + oauth_token_secret = generate_token(48) + temporary_credential = request.credential + + token_credential = TokenCredential( + oauth_token=oauth_token, + oauth_token_secret=oauth_token_secret, + client_id=temporary_credential.get_client_id(), + user_id=temporary_credential.get_user_id() + ) + # if the credential has a save method + token_credential.save() + return token_credential + + :param request: OAuth1Request instance + :return: TokenCredential instance + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/base_server.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/base_server.py new file mode 100644 index 0000000..5d29deb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/base_server.py @@ -0,0 +1,119 @@ +import time +from .signature import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_RSA_SHA1, +) +from .signature import ( + verify_hmac_sha1, + verify_plaintext, + verify_rsa_sha1, +) +from .errors import ( + InvalidRequestError, + MissingRequiredParameterError, + UnsupportedSignatureMethodError, + InvalidNonceError, + InvalidSignatureError, +) + + +class BaseServer: + SIGNATURE_METHODS = { + SIGNATURE_HMAC_SHA1: verify_hmac_sha1, + SIGNATURE_RSA_SHA1: verify_rsa_sha1, + SIGNATURE_PLAINTEXT: verify_plaintext, + } + SUPPORTED_SIGNATURE_METHODS = [SIGNATURE_HMAC_SHA1] + EXPIRY_TIME = 300 + + @classmethod + def register_signature_method(cls, name, verify): + """Extend signature method verification. + + :param name: A string to represent signature method. + :param verify: A function to verify signature. + + The ``verify`` method accept ``OAuth1Request`` as parameter:: + + def verify_custom_method(request): + # verify this request, return True or False + return True + + Server.register_signature_method('custom-name', verify_custom_method) + """ + cls.SIGNATURE_METHODS[name] = verify + + def validate_timestamp_and_nonce(self, request): + """Validate ``oauth_timestamp`` and ``oauth_nonce`` in HTTP request. + + :param request: OAuth1Request instance + """ + timestamp = request.oauth_params.get('oauth_timestamp') + nonce = request.oauth_params.get('oauth_nonce') + + if request.signature_method == SIGNATURE_PLAINTEXT: + # The parameters MAY be omitted when using the "PLAINTEXT" + # signature method + if not timestamp and not nonce: + return + + if not timestamp: + raise MissingRequiredParameterError('oauth_timestamp') + + try: + # The timestamp value MUST be a positive integer + timestamp = int(timestamp) + if timestamp < 0: + raise InvalidRequestError('Invalid "oauth_timestamp" value') + + if self.EXPIRY_TIME and time.time() - timestamp > self.EXPIRY_TIME: + raise InvalidRequestError('Invalid "oauth_timestamp" value') + except (ValueError, TypeError): + raise InvalidRequestError('Invalid "oauth_timestamp" value') + + if not nonce: + raise MissingRequiredParameterError('oauth_nonce') + + if self.exists_nonce(nonce, request): + raise InvalidNonceError() + + def validate_oauth_signature(self, request): + """Validate ``oauth_signature`` from HTTP request. + + :param request: OAuth1Request instance + """ + method = request.signature_method + if not method: + raise MissingRequiredParameterError('oauth_signature_method') + + if method not in self.SUPPORTED_SIGNATURE_METHODS: + raise UnsupportedSignatureMethodError() + + if not request.signature: + raise MissingRequiredParameterError('oauth_signature') + + verify = self.SIGNATURE_METHODS.get(method) + if not verify: + raise UnsupportedSignatureMethodError() + + if not verify(request): + raise InvalidSignatureError() + + def get_client_by_id(self, client_id): + """Get client instance with the given ``client_id``. + + :param client_id: A string of client_id + :return: Client instance + """ + raise NotImplementedError() + + def exists_nonce(self, nonce, request): + """The nonce value MUST be unique across all requests with the same + timestamp, client credentials, and token combinations. + + :param nonce: A string value of ``oauth_nonce`` + :param request: OAuth1Request instance + :return: Boolean + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/client_auth.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/client_auth.py new file mode 100644 index 0000000..2c59b59 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/client_auth.py @@ -0,0 +1,187 @@ +import time +import base64 +import hashlib +from authlib.common.security import generate_token +from authlib.common.urls import extract_params +from authlib.common.encoding import to_native +from .wrapper import OAuth1Request +from .signature import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_RSA_SHA1, + SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_BODY, + SIGNATURE_TYPE_QUERY, +) +from .signature import ( + sign_hmac_sha1, + sign_rsa_sha1, + sign_plaintext +) +from .parameters import ( + prepare_form_encoded_body, + prepare_headers, + prepare_request_uri_query, +) + + +CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' +CONTENT_TYPE_MULTI_PART = 'multipart/form-data' + + +class ClientAuth: + SIGNATURE_METHODS = { + SIGNATURE_HMAC_SHA1: sign_hmac_sha1, + SIGNATURE_RSA_SHA1: sign_rsa_sha1, + SIGNATURE_PLAINTEXT: sign_plaintext, + } + + @classmethod + def register_signature_method(cls, name, sign): + """Extend client signature methods. + + :param name: A string to represent signature method. + :param sign: A function to generate signature. + + The ``sign`` method accept 2 parameters:: + + def custom_sign_method(client, request): + # client is the instance of Client. + return 'your-signed-string' + + Client.register_signature_method('custom-name', custom_sign_method) + """ + cls.SIGNATURE_METHODS[name] = sign + + def __init__(self, client_id, client_secret=None, + token=None, token_secret=None, + redirect_uri=None, rsa_key=None, verifier=None, + signature_method=SIGNATURE_HMAC_SHA1, + signature_type=SIGNATURE_TYPE_HEADER, + realm=None, force_include_body=False): + self.client_id = client_id + self.client_secret = client_secret + self.token = token + self.token_secret = token_secret + self.redirect_uri = redirect_uri + self.signature_method = signature_method + self.signature_type = signature_type + self.rsa_key = rsa_key + self.verifier = verifier + self.realm = realm + self.force_include_body = force_include_body + + def get_oauth_signature(self, method, uri, headers, body): + """Get an OAuth signature to be used in signing a request + + To satisfy `section 3.4.1.2`_ item 2, if the request argument's + headers dict attribute contains a Host item, its value will + replace any netloc part of the request argument's uri attribute + value. + + .. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2 + """ + sign = self.SIGNATURE_METHODS.get(self.signature_method) + if not sign: + raise ValueError('Invalid signature method.') + + request = OAuth1Request(method, uri, body=body, headers=headers) + return sign(self, request) + + def get_oauth_params(self, nonce, timestamp): + oauth_params = [ + ('oauth_nonce', nonce), + ('oauth_timestamp', timestamp), + ('oauth_version', '1.0'), + ('oauth_signature_method', self.signature_method), + ('oauth_consumer_key', self.client_id), + ] + if self.token: + oauth_params.append(('oauth_token', self.token)) + if self.redirect_uri: + oauth_params.append(('oauth_callback', self.redirect_uri)) + if self.verifier: + oauth_params.append(('oauth_verifier', self.verifier)) + return oauth_params + + def _render(self, uri, headers, body, oauth_params): + if self.signature_type == SIGNATURE_TYPE_HEADER: + headers = prepare_headers(oauth_params, headers, realm=self.realm) + elif self.signature_type == SIGNATURE_TYPE_BODY: + if CONTENT_TYPE_FORM_URLENCODED in headers.get('Content-Type', ''): + decoded_body = extract_params(body) or [] + body = prepare_form_encoded_body(oauth_params, decoded_body) + headers['Content-Type'] = CONTENT_TYPE_FORM_URLENCODED + elif self.signature_type == SIGNATURE_TYPE_QUERY: + uri = prepare_request_uri_query(oauth_params, uri) + else: + raise ValueError('Unknown signature type specified.') + return uri, headers, body + + def sign(self, method, uri, headers, body): + """Sign the HTTP request, add OAuth parameters and signature. + + :param method: HTTP method of the request. + :param uri: URI of the HTTP request. + :param body: Body payload of the HTTP request. + :param headers: Headers of the HTTP request. + :return: uri, headers, body + """ + nonce = generate_nonce() + timestamp = generate_timestamp() + if body is None: + body = b'' + + # transform int to str + timestamp = str(timestamp) + + if headers is None: + headers = {} + + oauth_params = self.get_oauth_params(nonce, timestamp) + + # https://datatracker.ietf.org/doc/html/draft-eaton-oauth-bodyhash-00.html + # include oauth_body_hash + if body and headers.get('Content-Type') != CONTENT_TYPE_FORM_URLENCODED: + oauth_body_hash = base64.b64encode(hashlib.sha1(body).digest()) + oauth_params.append(('oauth_body_hash', oauth_body_hash.decode('utf-8'))) + + uri, headers, body = self._render(uri, headers, body, oauth_params) + + sig = self.get_oauth_signature(method, uri, headers, body) + oauth_params.append(('oauth_signature', sig)) + + uri, headers, body = self._render(uri, headers, body, oauth_params) + return uri, headers, body + + def prepare(self, method, uri, headers, body): + """Add OAuth parameters to the request. + + Parameters may be included from the body if the content-type is + urlencoded, if no content type is set, a guess is made. + """ + content_type = to_native(headers.get('Content-Type', '')) + if self.signature_type == SIGNATURE_TYPE_BODY: + content_type = CONTENT_TYPE_FORM_URLENCODED + elif not content_type and extract_params(body): + content_type = CONTENT_TYPE_FORM_URLENCODED + + if CONTENT_TYPE_FORM_URLENCODED in content_type: + headers['Content-Type'] = CONTENT_TYPE_FORM_URLENCODED + uri, headers, body = self.sign(method, uri, headers, body) + elif self.force_include_body: + # To allow custom clients to work on non form encoded bodies. + uri, headers, body = self.sign(method, uri, headers, body) + else: + # Omit body data in the signing of non form-encoded requests + uri, headers, _ = self.sign(method, uri, headers, b'') + body = b'' + return uri, headers, body + + +def generate_nonce(): + return generate_token() + + +def generate_timestamp(): + return str(int(time.time())) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/errors.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/errors.py new file mode 100644 index 0000000..93396fc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/errors.py @@ -0,0 +1,89 @@ +""" + authlib.oauth1.rfc5849.errors + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + RFC5849 has no definition on errors. This module is designed by + Authlib based on OAuth 1.0a `Section 10`_ with some changes. + + .. _`Section 10`: https://oauth.net/core/1.0a/#rfc.section.10 +""" +from authlib.common.errors import AuthlibHTTPError +from authlib.common.security import is_secure_transport + + +class OAuth1Error(AuthlibHTTPError): + def __init__(self, description=None, uri=None, status_code=None): + super().__init__(None, description, uri, status_code) + + def get_headers(self): + """Get a list of headers.""" + return [ + ('Content-Type', 'application/x-www-form-urlencoded'), + ('Cache-Control', 'no-store'), + ('Pragma', 'no-cache') + ] + + +class InsecureTransportError(OAuth1Error): + error = 'insecure_transport' + description = 'OAuth 2 MUST utilize https.' + + @classmethod + def check(cls, uri): + if not is_secure_transport(uri): + raise cls() + + +class InvalidRequestError(OAuth1Error): + error = 'invalid_request' + + +class UnsupportedParameterError(OAuth1Error): + error = 'unsupported_parameter' + + +class UnsupportedSignatureMethodError(OAuth1Error): + error = 'unsupported_signature_method' + + +class MissingRequiredParameterError(OAuth1Error): + error = 'missing_required_parameter' + + def __init__(self, key): + description = f'missing "{key}" in parameters' + super().__init__(description=description) + + +class DuplicatedOAuthProtocolParameterError(OAuth1Error): + error = 'duplicated_oauth_protocol_parameter' + + +class InvalidClientError(OAuth1Error): + error = 'invalid_client' + status_code = 401 + + +class InvalidTokenError(OAuth1Error): + error = 'invalid_token' + description = 'Invalid or expired "oauth_token" in parameters' + status_code = 401 + + +class InvalidSignatureError(OAuth1Error): + error = 'invalid_signature' + status_code = 401 + + +class InvalidNonceError(OAuth1Error): + error = 'invalid_nonce' + status_code = 401 + + +class AccessDeniedError(OAuth1Error): + error = 'access_denied' + description = 'The resource owner or authorization server denied the request' + + +class MethodNotAllowedError(OAuth1Error): + error = 'method_not_allowed' + status_code = 405 diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/models.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/models.py new file mode 100644 index 0000000..c9f3ea6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/models.py @@ -0,0 +1,108 @@ +class ClientMixin: + def get_default_redirect_uri(self): + """A method to get client default redirect_uri. For instance, the + database table for client has a column called ``default_redirect_uri``:: + + def get_default_redirect_uri(self): + return self.default_redirect_uri + + :return: A URL string + """ + raise NotImplementedError() + + def get_client_secret(self): + """A method to return the client_secret of this client. For instance, + the database table has a column called ``client_secret``:: + + def get_client_secret(self): + return self.client_secret + """ + raise NotImplementedError() + + def get_rsa_public_key(self): + """A method to get the RSA public key for RSA-SHA1 signature method. + For instance, the value is saved on column ``rsa_public_key``:: + + def get_rsa_public_key(self): + return self.rsa_public_key + """ + raise NotImplementedError() + + +class TokenCredentialMixin: + def get_oauth_token(self): + """A method to get the value of ``oauth_token``. For instance, the + database table has a column called ``oauth_token``:: + + def get_oauth_token(self): + return self.oauth_token + + :return: A string + """ + raise NotImplementedError() + + def get_oauth_token_secret(self): + """A method to get the value of ``oauth_token_secret``. For instance, + the database table has a column called ``oauth_token_secret``:: + + def get_oauth_token_secret(self): + return self.oauth_token_secret + + :return: A string + """ + raise NotImplementedError() + + +class TemporaryCredentialMixin(TokenCredentialMixin): + def get_client_id(self): + """A method to get the client_id associated with this credential. + For instance, the table in the database has a column ``client_id``:: + + def get_client_id(self): + return self.client_id + """ + raise NotImplementedError() + + def get_redirect_uri(self): + """A method to get temporary credential's ``oauth_callback``. + For instance, the database table for temporary credential has a + column called ``oauth_callback``:: + + def get_redirect_uri(self): + return self.oauth_callback + + :return: A URL string + """ + raise NotImplementedError() + + def check_verifier(self, verifier): + """A method to check if the given verifier matches this temporary + credential. For instance that this temporary credential has recorded + the value in database as column ``oauth_verifier``:: + + def check_verifier(self, verifier): + return self.oauth_verifier == verifier + + :return: Boolean + """ + raise NotImplementedError() + + +class TemporaryCredential(dict, TemporaryCredentialMixin): + def get_client_id(self): + return self.get('client_id') + + def get_user_id(self): + return self.get('user_id') + + def get_redirect_uri(self): + return self.get('oauth_callback') + + def check_verifier(self, verifier): + return self.get('oauth_verifier') == verifier + + def get_oauth_token(self): + return self.get('oauth_token') + + def get_oauth_token_secret(self): + return self.get('oauth_token_secret') diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/parameters.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/parameters.py new file mode 100644 index 0000000..0e64e5c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/parameters.py @@ -0,0 +1,101 @@ +""" + authlib.spec.rfc5849.parameters + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This module contains methods related to `section 3.5`_ of the OAuth 1.0a spec. + + .. _`section 3.5`: https://tools.ietf.org/html/rfc5849#section-3.5 +""" +from authlib.common.urls import urlparse, url_encode, extract_params +from .util import escape + + +def prepare_headers(oauth_params, headers=None, realm=None): + """**Prepare the Authorization header.** + Per `section 3.5.1`_ of the spec. + + Protocol parameters can be transmitted using the HTTP "Authorization" + header field as defined by `RFC2617`_ with the auth-scheme name set to + "OAuth" (case insensitive). + + For example:: + + Authorization: OAuth realm="Photos", + oauth_consumer_key="dpf43f3p2l4k3l03", + oauth_signature_method="HMAC-SHA1", + oauth_timestamp="137131200", + oauth_nonce="wIjqoS", + oauth_callback="http%3A%2F%2Fprinter.example.com%2Fready", + oauth_signature="74KNZJeDHnMBp0EMJ9ZHt%2FXKycU%3D", + oauth_version="1.0" + + .. _`section 3.5.1`: https://tools.ietf.org/html/rfc5849#section-3.5.1 + .. _`RFC2617`: https://tools.ietf.org/html/rfc2617 + """ + headers = headers or {} + + # step 1, 2, 3 in Section 3.5.1 + header_parameters = ', '.join([ + f'{escape(k)}="{escape(v)}"' for k, v in oauth_params + if k.startswith('oauth_') + ]) + + # 4. The OPTIONAL "realm" parameter MAY be added and interpreted per + # `RFC2617 section 1.2`_. + # + # .. _`RFC2617 section 1.2`: https://tools.ietf.org/html/rfc2617#section-1.2 + if realm: + # NOTE: realm should *not* be escaped + header_parameters = f'realm="{realm}", ' + header_parameters + + # the auth-scheme name set to "OAuth" (case insensitive). + headers['Authorization'] = f'OAuth {header_parameters}' + return headers + + +def _append_params(oauth_params, params): + """Append OAuth params to an existing set of parameters. + + Both params and oauth_params is must be lists of 2-tuples. + + Per `section 3.5.2`_ and `3.5.3`_ of the spec. + + .. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2 + .. _`3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3 + + """ + merged = list(params) + merged.extend(oauth_params) + # The request URI / entity-body MAY include other request-specific + # parameters, in which case, the protocol parameters SHOULD be appended + # following the request-specific parameters, properly separated by an "&" + # character (ASCII code 38) + merged.sort(key=lambda i: i[0].startswith('oauth_')) + return merged + + +def prepare_form_encoded_body(oauth_params, body): + """Prepare the Form-Encoded Body. + + Per `section 3.5.2`_ of the spec. + + .. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2 + + """ + # append OAuth params to the existing body + return url_encode(_append_params(oauth_params, body)) + + +def prepare_request_uri_query(oauth_params, uri): + """Prepare the Request URI Query. + + Per `section 3.5.3`_ of the spec. + + .. _`section 3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3 + + """ + # append OAuth params to the existing set of query components + sch, net, path, par, query, fra = urlparse.urlparse(uri) + query = url_encode( + _append_params(oauth_params, extract_params(query) or [])) + return urlparse.urlunparse((sch, net, path, par, query, fra)) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/resource_protector.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/resource_protector.py new file mode 100644 index 0000000..2b5d781 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/resource_protector.py @@ -0,0 +1,41 @@ +from .base_server import BaseServer +from .wrapper import OAuth1Request +from .errors import ( + MissingRequiredParameterError, + InvalidClientError, + InvalidTokenError, +) + + +class ResourceProtector(BaseServer): + def validate_request(self, method, uri, body, headers): + request = OAuth1Request(method, uri, body, headers) + + if not request.client_id: + raise MissingRequiredParameterError('oauth_consumer_key') + + client = self.get_client_by_id(request.client_id) + if not client: + raise InvalidClientError() + request.client = client + + if not request.token: + raise MissingRequiredParameterError('oauth_token') + + token = self.get_token_credential(request) + if not token: + raise InvalidTokenError() + + request.credential = token + self.validate_timestamp_and_nonce(request) + self.validate_oauth_signature(request) + return request + + def get_token_credential(self, request): + """Fetch the token credential from data store like a database, + framework should implement this function. + + :param request: OAuth1Request instance + :return: Token model instance + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/rsa.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/rsa.py new file mode 100644 index 0000000..3785b0f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/rsa.py @@ -0,0 +1,29 @@ +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.serialization import ( + load_pem_private_key, load_pem_public_key +) +from cryptography.hazmat.primitives.asymmetric import padding +from cryptography.exceptions import InvalidSignature +from authlib.common.encoding import to_bytes + + +def sign_sha1(msg, rsa_private_key): + key = load_pem_private_key( + to_bytes(rsa_private_key), + password=None, + backend=default_backend() + ) + return key.sign(msg, padding.PKCS1v15(), hashes.SHA1()) + + +def verify_sha1(sig, msg, rsa_public_key): + key = load_pem_public_key( + to_bytes(rsa_public_key), + backend=default_backend() + ) + try: + key.verify(sig, msg, padding.PKCS1v15(), hashes.SHA1()) + return True + except InvalidSignature: + return False diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/signature.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/signature.py new file mode 100644 index 0000000..bfb87fe --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/signature.py @@ -0,0 +1,386 @@ +""" + authlib.oauth1.rfc5849.signature + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of `section 3.4`_ of the spec. + + .. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4 +""" +import binascii +import hashlib +import hmac +from authlib.common.urls import urlparse +from authlib.common.encoding import to_unicode, to_bytes +from .util import escape, unescape + +SIGNATURE_HMAC_SHA1 = "HMAC-SHA1" +SIGNATURE_RSA_SHA1 = "RSA-SHA1" +SIGNATURE_PLAINTEXT = "PLAINTEXT" + +SIGNATURE_TYPE_HEADER = 'HEADER' +SIGNATURE_TYPE_QUERY = 'QUERY' +SIGNATURE_TYPE_BODY = 'BODY' + + +def construct_base_string(method, uri, params, host=None): + """Generate signature base string from request, per `Section 3.4.1`_. + + For example, the HTTP request:: + + POST /request?b5=%3D%253D&a3=a&c%40=&a2=r%20b HTTP/1.1 + Host: example.com + Content-Type: application/x-www-form-urlencoded + Authorization: OAuth realm="Example", + oauth_consumer_key="9djdj82h48djs9d2", + oauth_token="kkk9d7dh3k39sjv7", + oauth_signature_method="HMAC-SHA1", + oauth_timestamp="137131201", + oauth_nonce="7d8f3e4a", + oauth_signature="bYT5CMsGcbgUdFHObYMEfcx6bsw%3D" + + c2&a3=2+q + + is represented by the following signature base string (line breaks + are for display purposes only):: + + POST&http%3A%2F%2Fexample.com%2Frequest&a2%3Dr%2520b%26a3%3D2%2520q + %26a3%3Da%26b5%3D%253D%25253D%26c%2540%3D%26c2%3D%26oauth_consumer_ + key%3D9djdj82h48djs9d2%26oauth_nonce%3D7d8f3e4a%26oauth_signature_m + ethod%3DHMAC-SHA1%26oauth_timestamp%3D137131201%26oauth_token%3Dkkk + 9d7dh3k39sjv7 + + .. _`Section 3.4.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1 + """ + + # Create base string URI per Section 3.4.1.2 + base_string_uri = normalize_base_string_uri(uri, host) + + # Cleanup parameter sources per Section 3.4.1.3.1 + unescaped_params = [] + for k, v in params: + # The "oauth_signature" parameter MUST be excluded from the signature + if k in ('oauth_signature', 'realm'): + continue + + # ensure oauth params are unescaped + if k.startswith('oauth_'): + v = unescape(v) + unescaped_params.append((k, v)) + + # Normalize parameters per Section 3.4.1.3.2 + normalized_params = normalize_parameters(unescaped_params) + + # construct base string + return '&'.join([ + escape(method.upper()), + escape(base_string_uri), + escape(normalized_params), + ]) + + +def normalize_base_string_uri(uri, host=None): + """Normalize Base String URI per `Section 3.4.1.2`_. + + For example, the HTTP request:: + + GET /r%20v/X?id=123 HTTP/1.1 + Host: EXAMPLE.COM:80 + + is represented by the base string URI: "http://example.com/r%20v/X". + + In another example, the HTTPS request:: + + GET /?q=1 HTTP/1.1 + Host: www.example.net:8080 + + is represented by the base string URI: "https://www.example.net:8080/". + + .. _`Section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2 + + The host argument overrides the netloc part of the uri argument. + """ + uri = to_unicode(uri) + scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri) + + # The scheme, authority, and path of the request resource URI `RFC3986` + # are included by constructing an "http" or "https" URI representing + # the request resource (without the query or fragment) as follows: + # + # .. _`RFC3986`: https://tools.ietf.org/html/rfc3986 + + if not scheme or not netloc: + raise ValueError('uri must include a scheme and netloc') + + # Per `RFC 2616 section 5.1.2`_: + # + # Note that the absolute path cannot be empty; if none is present in + # the original URI, it MUST be given as "/" (the server root). + # + # .. _`RFC 2616 section 5.1.2`: https://tools.ietf.org/html/rfc2616#section-5.1.2 + if not path: + path = '/' + + # 1. The scheme and host MUST be in lowercase. + scheme = scheme.lower() + netloc = netloc.lower() + + # 2. The host and port values MUST match the content of the HTTP + # request "Host" header field. + if host is not None: + netloc = host.lower() + + # 3. The port MUST be included if it is not the default port for the + # scheme, and MUST be excluded if it is the default. Specifically, + # the port MUST be excluded when making an HTTP request `RFC2616`_ + # to port 80 or when making an HTTPS request `RFC2818`_ to port 443. + # All other non-default port numbers MUST be included. + # + # .. _`RFC2616`: https://tools.ietf.org/html/rfc2616 + # .. _`RFC2818`: https://tools.ietf.org/html/rfc2818 + default_ports = ( + ('http', '80'), + ('https', '443'), + ) + if ':' in netloc: + host, port = netloc.split(':', 1) + if (scheme, port) in default_ports: + netloc = host + + return urlparse.urlunparse((scheme, netloc, path, params, '', '')) + + +def normalize_parameters(params): + """Normalize parameters per `Section 3.4.1.3.2`_. + + For example, the list of parameters from the previous section would + be normalized as follows: + + Encoded:: + + +------------------------+------------------+ + | Name | Value | + +------------------------+------------------+ + | b5 | %3D%253D | + | a3 | a | + | c%40 | | + | a2 | r%20b | + | oauth_consumer_key | 9djdj82h48djs9d2 | + | oauth_token | kkk9d7dh3k39sjv7 | + | oauth_signature_method | HMAC-SHA1 | + | oauth_timestamp | 137131201 | + | oauth_nonce | 7d8f3e4a | + | c2 | | + | a3 | 2%20q | + +------------------------+------------------+ + + Sorted:: + + +------------------------+------------------+ + | Name | Value | + +------------------------+------------------+ + | a2 | r%20b | + | a3 | 2%20q | + | a3 | a | + | b5 | %3D%253D | + | c%40 | | + | c2 | | + | oauth_consumer_key | 9djdj82h48djs9d2 | + | oauth_nonce | 7d8f3e4a | + | oauth_signature_method | HMAC-SHA1 | + | oauth_timestamp | 137131201 | + | oauth_token | kkk9d7dh3k39sjv7 | + +------------------------+------------------+ + + Concatenated Pairs:: + + +-------------------------------------+ + | Name=Value | + +-------------------------------------+ + | a2=r%20b | + | a3=2%20q | + | a3=a | + | b5=%3D%253D | + | c%40= | + | c2= | + | oauth_consumer_key=9djdj82h48djs9d2 | + | oauth_nonce=7d8f3e4a | + | oauth_signature_method=HMAC-SHA1 | + | oauth_timestamp=137131201 | + | oauth_token=kkk9d7dh3k39sjv7 | + +-------------------------------------+ + + and concatenated together into a single string (line breaks are for + display purposes only):: + + a2=r%20b&a3=2%20q&a3=a&b5=%3D%253D&c%40=&c2=&oauth_consumer_key=9dj + dj82h48djs9d2&oauth_nonce=7d8f3e4a&oauth_signature_method=HMAC-SHA1 + &oauth_timestamp=137131201&oauth_token=kkk9d7dh3k39sjv7 + + .. _`Section 3.4.1.3.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.2 + """ + + # 1. First, the name and value of each parameter are encoded + # (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key_values = [(escape(k), escape(v)) for k, v in params] + + # 2. The parameters are sorted by name, using ascending byte value + # ordering. If two or more parameters share the same name, they + # are sorted by their value. + key_values.sort() + + # 3. The name of each parameter is concatenated to its corresponding + # value using an "=" character (ASCII code 61) as a separator, even + # if the value is empty. + parameter_parts = [f'{k}={v}' for k, v in key_values] + + # 4. The sorted name/value pairs are concatenated together into a + # single string by using an "&" character (ASCII code 38) as + # separator. + return '&'.join(parameter_parts) + + +def generate_signature_base_string(request): + """Generate signature base string from request.""" + host = request.headers.get('Host', None) + return construct_base_string( + request.method, request.uri, request.params, host) + + +def hmac_sha1_signature(base_string, client_secret, token_secret): + """Generate signature via HMAC-SHA1 method, per `Section 3.4.2`_. + + The "HMAC-SHA1" signature method uses the HMAC-SHA1 signature + algorithm as defined in `RFC2104`_:: + + digest = HMAC-SHA1 (key, text) + + .. _`RFC2104`: https://tools.ietf.org/html/rfc2104 + .. _`Section 3.4.2`: https://tools.ietf.org/html/rfc5849#section-3.4.2 + """ + + # The HMAC-SHA1 function variables are used in following way: + + # text is set to the value of the signature base string from + # `Section 3.4.1.1`_. + # + # .. _`Section 3.4.1.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.1 + text = base_string + + # key is set to the concatenated values of: + # 1. The client shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key = escape(client_secret or '') + + # 2. An "&" character (ASCII code 38), which MUST be included + # even when either secret is empty. + key += '&' + + # 3. The token shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key += escape(token_secret or '') + + signature = hmac.new(to_bytes(key), to_bytes(text), hashlib.sha1) + + # digest is used to set the value of the "oauth_signature" protocol + # parameter, after the result octet string is base64-encoded + # per `RFC2045, Section 6.8`. + # + # .. _`RFC2045, Section 6.8`: https://tools.ietf.org/html/rfc2045#section-6.8 + sig = binascii.b2a_base64(signature.digest())[:-1] + return to_unicode(sig) + + +def rsa_sha1_signature(base_string, rsa_private_key): + """Generate signature via RSA-SHA1 method, per `Section 3.4.3`_. + + The "RSA-SHA1" signature method uses the RSASSA-PKCS1-v1_5 signature + algorithm as defined in `RFC3447, Section 8.2`_ (also known as + PKCS#1), using SHA-1 as the hash function for EMSA-PKCS1-v1_5. To + use this method, the client MUST have established client credentials + with the server that included its RSA public key (in a manner that is + beyond the scope of this specification). + + .. _`Section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3 + .. _`RFC3447, Section 8.2`: https://tools.ietf.org/html/rfc3447#section-8.2 + """ + from .rsa import sign_sha1 + base_string = to_bytes(base_string) + s = sign_sha1(to_bytes(base_string), rsa_private_key) + sig = binascii.b2a_base64(s)[:-1] + return to_unicode(sig) + + +def plaintext_signature(client_secret, token_secret): + """Generate signature via PLAINTEXT method, per `Section 3.4.4`_. + + The "PLAINTEXT" method does not employ a signature algorithm. It + MUST be used with a transport-layer mechanism such as TLS or SSL (or + sent over a secure channel with equivalent protections). It does not + utilize the signature base string or the "oauth_timestamp" and + "oauth_nonce" parameters. + + .. _`Section 3.4.4`: https://tools.ietf.org/html/rfc5849#section-3.4.4 + """ + + # The "oauth_signature" protocol parameter is set to the concatenated + # value of: + + # 1. The client shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + signature = escape(client_secret or '') + + # 2. An "&" character (ASCII code 38), which MUST be included even + # when either secret is empty. + signature += '&' + + # 3. The token shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + signature += escape(token_secret or '') + + return signature + + +def sign_hmac_sha1(client, request): + """Sign a HMAC-SHA1 signature.""" + base_string = generate_signature_base_string(request) + return hmac_sha1_signature( + base_string, client.client_secret, client.token_secret) + + +def sign_rsa_sha1(client, request): + """Sign a RSASSA-PKCS #1 v1.5 base64 encoded signature.""" + base_string = generate_signature_base_string(request) + return rsa_sha1_signature(base_string, client.rsa_key) + + +def sign_plaintext(client, request): + """Sign a PLAINTEXT signature.""" + return plaintext_signature(client.client_secret, client.token_secret) + + +def verify_hmac_sha1(request): + """Verify a HMAC-SHA1 signature.""" + base_string = generate_signature_base_string(request) + sig = hmac_sha1_signature( + base_string, request.client_secret, request.token_secret) + return hmac.compare_digest(sig, request.signature) + + +def verify_rsa_sha1(request): + """Verify a RSASSA-PKCS #1 v1.5 base64 encoded signature.""" + from .rsa import verify_sha1 + base_string = generate_signature_base_string(request) + sig = binascii.a2b_base64(to_bytes(request.signature)) + return verify_sha1(sig, to_bytes(base_string), request.rsa_public_key) + + +def verify_plaintext(request): + """Verify a PLAINTEXT signature.""" + sig = plaintext_signature(request.client_secret, request.token_secret) + return hmac.compare_digest(sig, request.signature) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/util.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/util.py new file mode 100644 index 0000000..9383e22 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/util.py @@ -0,0 +1,9 @@ +from authlib.common.urls import quote, unquote + + +def escape(s): + return quote(s, safe=b'~') + + +def unescape(s): + return unquote(s) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/wrapper.py b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/wrapper.py new file mode 100644 index 0000000..c03687e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth1/rfc5849/wrapper.py @@ -0,0 +1,129 @@ +from urllib.request import parse_keqv_list, parse_http_list +from authlib.common.urls import ( + urlparse, extract_params, url_decode, +) +from .signature import ( + SIGNATURE_TYPE_QUERY, + SIGNATURE_TYPE_BODY, + SIGNATURE_TYPE_HEADER +) +from .errors import ( + InsecureTransportError, + DuplicatedOAuthProtocolParameterError +) +from .util import unescape + + +class OAuth1Request: + def __init__(self, method, uri, body=None, headers=None): + InsecureTransportError.check(uri) + self.method = method + self.uri = uri + self.body = body + self.headers = headers or {} + + # states namespaces + self.client = None + self.credential = None + self.user = None + + self.query = urlparse.urlparse(uri).query + self.query_params = url_decode(self.query) + self.body_params = extract_params(body) or [] + + self.auth_params, self.realm = _parse_authorization_header(headers) + self.signature_type, self.oauth_params = _parse_oauth_params( + self.query_params, self.body_params, self.auth_params) + + params = [] + params.extend(self.query_params) + params.extend(self.body_params) + params.extend(self.auth_params) + self.params = params + + @property + def client_id(self): + return self.oauth_params.get('oauth_consumer_key') + + @property + def client_secret(self): + if self.client: + return self.client.get_client_secret() + + @property + def rsa_public_key(self): + if self.client: + return self.client.get_rsa_public_key() + + @property + def timestamp(self): + return self.oauth_params.get('oauth_timestamp') + + @property + def redirect_uri(self): + return self.oauth_params.get('oauth_callback') + + @property + def signature(self): + return self.oauth_params.get('oauth_signature') + + @property + def signature_method(self): + return self.oauth_params.get('oauth_signature_method') + + @property + def token(self): + return self.oauth_params.get('oauth_token') + + @property + def token_secret(self): + if self.credential: + return self.credential.get_oauth_token_secret() + + +def _filter_oauth(params): + for k, v in params: + if k.startswith('oauth_'): + yield (k, v) + + +def _parse_authorization_header(headers): + """Parse an OAuth authorization header into a list of 2-tuples""" + authorization_header = headers.get('Authorization') + if not authorization_header: + return [], None + + auth_scheme = 'oauth ' + if authorization_header.lower().startswith(auth_scheme): + items = parse_http_list(authorization_header[len(auth_scheme):]) + try: + items = parse_keqv_list(items).items() + auth_params = [(unescape(k), unescape(v)) for k, v in items] + realm = dict(auth_params).get('realm') + return auth_params, realm + except (IndexError, ValueError): + pass + raise ValueError('Malformed authorization header') + + +def _parse_oauth_params(query_params, body_params, auth_params): + oauth_params_set = [ + (SIGNATURE_TYPE_QUERY, list(_filter_oauth(query_params))), + (SIGNATURE_TYPE_BODY, list(_filter_oauth(body_params))), + (SIGNATURE_TYPE_HEADER, list(_filter_oauth(auth_params))) + ] + oauth_params_set = [params for params in oauth_params_set if params[1]] + if len(oauth_params_set) > 1: + found_types = [p[0] for p in oauth_params_set] + raise DuplicatedOAuthProtocolParameterError( + '"oauth_" params must come from only 1 signature type ' + 'but were found in {}'.format(','.join(found_types)) + ) + + if oauth_params_set: + signature_type = oauth_params_set[0][0] + oauth_params = dict(oauth_params_set[0][1]) + else: + signature_type = None + oauth_params = {} + return signature_type, oauth_params diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/__init__.py new file mode 100644 index 0000000..05fdf30 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/__init__.py @@ -0,0 +1,16 @@ +from .base import OAuth2Error +from .auth import ClientAuth, TokenAuth +from .client import OAuth2Client +from .rfc6749 import ( + OAuth2Request, + JsonRequest, + AuthorizationServer, + ClientAuthentication, + ResourceProtector, +) + +__all__ = [ + 'OAuth2Error', 'ClientAuth', 'TokenAuth', 'OAuth2Client', + 'OAuth2Request', 'JsonRequest', 'AuthorizationServer', + 'ClientAuthentication', 'ResourceProtector', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..e270c32 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/auth.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000..d4e9fff Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/auth.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..ce0be6b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/client.cpython-312.pyc new file mode 100644 index 0000000..1b71576 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/__pycache__/client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/auth.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/auth.py new file mode 100644 index 0000000..c87241a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/auth.py @@ -0,0 +1,105 @@ +import base64 +from authlib.common.urls import add_params_to_qs, add_params_to_uri +from authlib.common.encoding import to_bytes, to_native +from .rfc6749 import OAuth2Token +from .rfc6750 import add_bearer_token + + +def encode_client_secret_basic(client, method, uri, headers, body): + text = f'{client.client_id}:{client.client_secret}' + auth = to_native(base64.b64encode(to_bytes(text, 'latin1'))) + headers['Authorization'] = f'Basic {auth}' + return uri, headers, body + + +def encode_client_secret_post(client, method, uri, headers, body): + body = add_params_to_qs(body or '', [ + ('client_id', client.client_id), + ('client_secret', client.client_secret or '') + ]) + if 'Content-Length' in headers: + headers['Content-Length'] = str(len(body)) + return uri, headers, body + + +def encode_none(client, method, uri, headers, body): + if method == 'GET': + uri = add_params_to_uri(uri, [('client_id', client.client_id)]) + return uri, headers, body + body = add_params_to_qs(body, [('client_id', client.client_id)]) + if 'Content-Length' in headers: + headers['Content-Length'] = str(len(body)) + return uri, headers, body + + +class ClientAuth: + """Attaches OAuth Client Information to HTTP requests. + + :param client_id: Client ID, which you get from client registration. + :param client_secret: Client Secret, which you get from registration. + :param auth_method: Client auth method for token endpoint. The supported + methods for now: + + * client_secret_basic (default) + * client_secret_post + * none + """ + DEFAULT_AUTH_METHODS = { + 'client_secret_basic': encode_client_secret_basic, + 'client_secret_post': encode_client_secret_post, + 'none': encode_none, + } + + def __init__(self, client_id, client_secret, auth_method=None): + if auth_method is None: + auth_method = 'client_secret_basic' + + self.client_id = client_id + self.client_secret = client_secret + + if auth_method in self.DEFAULT_AUTH_METHODS: + auth_method = self.DEFAULT_AUTH_METHODS[auth_method] + + self.auth_method = auth_method + + def prepare(self, method, uri, headers, body): + return self.auth_method(self, method, uri, headers, body) + + +class TokenAuth: + """Attach token information to HTTP requests. + + :param token: A dict or OAuth2Token instance of an OAuth 2.0 token + :param token_placement: The placement of the token, default is ``header``, + available choices: + + * header (default) + * body + * uri + """ + DEFAULT_TOKEN_TYPE = 'bearer' + SIGN_METHODS = { + 'bearer': add_bearer_token + } + + def __init__(self, token, token_placement='header', client=None): + self.token = OAuth2Token.from_dict(token) + self.token_placement = token_placement + self.client = client + self.hooks = set() + + def set_token(self, token): + self.token = OAuth2Token.from_dict(token) + + def prepare(self, uri, headers, body): + token_type = self.token.get('token_type', self.DEFAULT_TOKEN_TYPE) + sign = self.SIGN_METHODS[token_type.lower()] + uri, headers, body = sign( + self.token['access_token'], + uri, headers, body, + self.token_placement) + + for hook in self.hooks: + uri, headers, body = hook(uri, headers, body) + + return uri, headers, body diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/base.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/base.py new file mode 100644 index 0000000..9bcb15f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/base.py @@ -0,0 +1,26 @@ +from authlib.common.errors import AuthlibHTTPError +from authlib.common.urls import add_params_to_uri + + +class OAuth2Error(AuthlibHTTPError): + def __init__(self, description=None, uri=None, + status_code=None, state=None, + redirect_uri=None, redirect_fragment=False, error=None): + super().__init__(error, description, uri, status_code) + self.state = state + self.redirect_uri = redirect_uri + self.redirect_fragment = redirect_fragment + + def get_body(self): + """Get a list of body.""" + error = super().get_body() + if self.state: + error.append(('state', self.state)) + return error + + def __call__(self, uri=None): + if self.redirect_uri: + params = self.get_body() + loc = add_params_to_uri(self.redirect_uri, params, self.redirect_fragment) + return 302, '', [('Location', loc)] + return super().__call__(uri=uri) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/client.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/client.py new file mode 100644 index 0000000..d36d93f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/client.py @@ -0,0 +1,449 @@ +from authlib.common.security import generate_token +from authlib.common.urls import url_decode +from .rfc6749.parameters import ( + prepare_grant_uri, + prepare_token_request, + parse_authorization_code_response, + parse_implicit_response, +) +from .rfc7009 import prepare_revoke_token_request +from .rfc7636 import create_s256_code_challenge +from .auth import TokenAuth, ClientAuth +from .base import OAuth2Error + +DEFAULT_HEADERS = { + 'Accept': 'application/json', + 'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8' +} + + +class OAuth2Client: + """Construct a new OAuth 2 protocol client. + + :param session: Requests session object to communicate with + authorization server. + :param client_id: Client ID, which you get from client registration. + :param client_secret: Client Secret, which you get from registration. + :param token_endpoint_auth_method: client authentication method for + token endpoint. + :param revocation_endpoint_auth_method: client authentication method for + revocation endpoint. + :param scope: Scope that you needed to access user resources. + :param state: Shared secret to prevent CSRF attack. + :param redirect_uri: Redirect URI you registered as callback. + :param code_challenge_method: PKCE method name, only S256 is supported. + :param token: A dict of token attributes such as ``access_token``, + ``token_type`` and ``expires_at``. + :param token_placement: The place to put token in HTTP request. Available + values: "header", "body", "uri". + :param update_token: A function for you to update token. It accept a + :class:`OAuth2Token` as parameter. + :param leeway: Time window in seconds before the actual expiration of the + authentication token, that the token is considered expired and will + be refreshed. + """ + client_auth_class = ClientAuth + token_auth_class = TokenAuth + oauth_error_class = OAuth2Error + + EXTRA_AUTHORIZE_PARAMS = ( + 'response_mode', 'nonce', 'prompt', 'login_hint' + ) + SESSION_REQUEST_PARAMS = [] + + def __init__(self, session, client_id=None, client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, state=None, redirect_uri=None, code_challenge_method=None, + token=None, token_placement='header', update_token=None, leeway=60, + **metadata): + + self.session = session + self.client_id = client_id + self.client_secret = client_secret + self.state = state + + if token_endpoint_auth_method is None: + if client_secret: + token_endpoint_auth_method = 'client_secret_basic' + else: + token_endpoint_auth_method = 'none' + + self.token_endpoint_auth_method = token_endpoint_auth_method + + if revocation_endpoint_auth_method is None: + if client_secret: + revocation_endpoint_auth_method = 'client_secret_basic' + else: + revocation_endpoint_auth_method = 'none' + + self.revocation_endpoint_auth_method = revocation_endpoint_auth_method + + self.scope = scope + self.redirect_uri = redirect_uri + self.code_challenge_method = code_challenge_method + + self.token_auth = self.token_auth_class(token, token_placement, self) + self.update_token = update_token + + token_updater = metadata.pop('token_updater', None) + if token_updater: + raise ValueError('update token has been redesigned, checkout the documentation') + + self.metadata = metadata + + self.compliance_hook = { + 'access_token_response': set(), + 'refresh_token_request': set(), + 'refresh_token_response': set(), + 'revoke_token_request': set(), + 'introspect_token_request': set(), + } + self._auth_methods = {} + + self.leeway = leeway + + def register_client_auth_method(self, auth): + """Extend client authenticate for token endpoint. + + :param auth: an instance to sign the request + """ + if isinstance(auth, tuple): + self._auth_methods[auth[0]] = auth[1] + else: + self._auth_methods[auth.name] = auth + + def client_auth(self, auth_method): + if isinstance(auth_method, str) and auth_method in self._auth_methods: + auth_method = self._auth_methods[auth_method] + return self.client_auth_class( + client_id=self.client_id, + client_secret=self.client_secret, + auth_method=auth_method, + ) + + @property + def token(self): + return self.token_auth.token + + @token.setter + def token(self, token): + self.token_auth.set_token(token) + + def create_authorization_url(self, url, state=None, code_verifier=None, **kwargs): + """Generate an authorization URL and state. + + :param url: Authorization endpoint url, must be HTTPS. + :param state: An optional state string for CSRF protection. If not + given it will be generated for you. + :param code_verifier: An optional code_verifier for code challenge. + :param kwargs: Extra parameters to include. + :return: authorization_url, state + """ + if state is None: + state = generate_token() + + response_type = self.metadata.get('response_type', 'code') + response_type = kwargs.pop('response_type', response_type) + if 'redirect_uri' not in kwargs: + kwargs['redirect_uri'] = self.redirect_uri + if 'scope' not in kwargs: + kwargs['scope'] = self.scope + + if code_verifier and response_type == 'code' and self.code_challenge_method == 'S256': + kwargs['code_challenge'] = create_s256_code_challenge(code_verifier) + kwargs['code_challenge_method'] = self.code_challenge_method + + for k in self.EXTRA_AUTHORIZE_PARAMS: + if k not in kwargs and k in self.metadata: + kwargs[k] = self.metadata[k] + + uri = prepare_grant_uri( + url, client_id=self.client_id, response_type=response_type, + state=state, **kwargs) + return uri, state + + def fetch_token(self, url=None, body='', method='POST', headers=None, + auth=None, grant_type=None, state=None, **kwargs): + """Generic method for fetching an access token from the token endpoint. + + :param url: Access Token endpoint URL, if not configured, + ``authorization_response`` is used to extract token from + its fragment (implicit way). + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param method: The HTTP method used to make the request. Defaults + to POST, but may also be GET. Other methods should + be added as needed. + :param headers: Dict to default request headers with. + :param auth: An auth tuple or method as accepted by requests. + :param grant_type: Use specified grant_type to fetch token + :return: A :class:`OAuth2Token` object (a dict too). + """ + state = state or self.state + # implicit grant_type + authorization_response = kwargs.pop('authorization_response', None) + if authorization_response and '#' in authorization_response: + return self.token_from_fragment(authorization_response, state) + + session_kwargs = self._extract_session_request_params(kwargs) + + if authorization_response and 'code=' in authorization_response: + grant_type = 'authorization_code' + params = parse_authorization_code_response( + authorization_response, + state=state, + ) + kwargs['code'] = params['code'] + + if grant_type is None: + grant_type = self.metadata.get('grant_type') + + if grant_type is None: + grant_type = _guess_grant_type(kwargs) + self.metadata['grant_type'] = grant_type + + body = self._prepare_token_endpoint_body(body, grant_type, **kwargs) + + if auth is None: + auth = self.client_auth(self.token_endpoint_auth_method) + + if headers is None: + headers = DEFAULT_HEADERS + + if url is None: + url = self.metadata.get('token_endpoint') + + return self._fetch_token( + url, body=body, auth=auth, method=method, + headers=headers, **session_kwargs + ) + + def token_from_fragment(self, authorization_response, state=None): + token = parse_implicit_response(authorization_response, state) + if 'error' in token: + raise self.oauth_error_class( + error=token['error'], + description=token.get('error_description') + ) + self.token = token + return token + + def refresh_token(self, url=None, refresh_token=None, body='', + auth=None, headers=None, **kwargs): + """Fetch a new access token using a refresh token. + + :param url: Refresh Token endpoint, must be HTTPS. + :param refresh_token: The refresh_token to use. + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param auth: An auth tuple or method as accepted by requests. + :param headers: Dict to default request headers with. + :return: A :class:`OAuth2Token` object (a dict too). + """ + session_kwargs = self._extract_session_request_params(kwargs) + refresh_token = refresh_token or self.token.get('refresh_token') + if 'scope' not in kwargs and self.scope: + kwargs['scope'] = self.scope + body = prepare_token_request( + 'refresh_token', body, + refresh_token=refresh_token, **kwargs + ) + + if headers is None: + headers = DEFAULT_HEADERS.copy() + + if url is None: + url = self.metadata.get('token_endpoint') + + for hook in self.compliance_hook['refresh_token_request']: + url, headers, body = hook(url, headers, body) + + if auth is None: + auth = self.client_auth(self.token_endpoint_auth_method) + + return self._refresh_token( + url, refresh_token=refresh_token, body=body, headers=headers, + auth=auth, **session_kwargs) + + def ensure_active_token(self, token=None): + if token is None: + token = self.token + if not token.is_expired(leeway=self.leeway): + return True + refresh_token = token.get('refresh_token') + url = self.metadata.get('token_endpoint') + if refresh_token and url: + self.refresh_token(url, refresh_token=refresh_token) + return True + elif self.metadata.get('grant_type') == 'client_credentials': + access_token = token['access_token'] + new_token = self.fetch_token(url, grant_type='client_credentials') + if self.update_token: + self.update_token(new_token, access_token=access_token) + return True + + def revoke_token(self, url, token=None, token_type_hint=None, + body=None, auth=None, headers=None, **kwargs): + """Revoke token method defined via `RFC7009`_. + + :param url: Revoke Token endpoint, must be HTTPS. + :param token: The token to be revoked. + :param token_type_hint: The type of the token that to be revoked. + It can be "access_token" or "refresh_token". + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param auth: An auth tuple or method as accepted by requests. + :param headers: Dict to default request headers with. + :return: Revocation Response + + .. _`RFC7009`: https://tools.ietf.org/html/rfc7009 + """ + return self._handle_token_hint( + 'revoke_token_request', url, + token=token, token_type_hint=token_type_hint, + body=body, auth=auth, headers=headers, **kwargs) + + def introspect_token(self, url, token=None, token_type_hint=None, + body=None, auth=None, headers=None, **kwargs): + """Implementation of OAuth 2.0 Token Introspection defined via `RFC7662`_. + + :param url: Introspection Endpoint, must be HTTPS. + :param token: The token to be introspected. + :param token_type_hint: The type of the token that to be revoked. + It can be "access_token" or "refresh_token". + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param auth: An auth tuple or method as accepted by requests. + :param headers: Dict to default request headers with. + :return: Introspection Response + + .. _`RFC7662`: https://tools.ietf.org/html/rfc7662 + """ + return self._handle_token_hint( + 'introspect_token_request', url, + token=token, token_type_hint=token_type_hint, + body=body, auth=auth, headers=headers, **kwargs) + + def register_compliance_hook(self, hook_type, hook): + """Register a hook for request/response tweaking. + + Available hooks are: + + * access_token_response: invoked before token parsing. + * refresh_token_request: invoked before refreshing token. + * refresh_token_response: invoked before refresh token parsing. + * protected_request: invoked before making a request. + * revoke_token_request: invoked before revoking a token. + * introspect_token_request: invoked before introspecting a token. + """ + if hook_type == 'protected_request': + self.token_auth.hooks.add(hook) + return + + if hook_type not in self.compliance_hook: + raise ValueError('Hook type %s is not in %s.', + hook_type, self.compliance_hook) + self.compliance_hook[hook_type].add(hook) + + def parse_response_token(self, resp): + if resp.status_code >= 500: + resp.raise_for_status() + + token = resp.json() + if 'error' in token: + raise self.oauth_error_class( + error=token['error'], + description=token.get('error_description') + ) + self.token = token + return self.token + + def _fetch_token(self, url, body='', headers=None, auth=None, + method='POST', **kwargs): + + if method.upper() == 'POST': + resp = self.session.post( + url, data=dict(url_decode(body)), + headers=headers, auth=auth, **kwargs) + else: + if '?' in url: + url = '&'.join([url, body]) + else: + url = '?'.join([url, body]) + resp = self.session.request(method, url, headers=headers, auth=auth, **kwargs) + + for hook in self.compliance_hook['access_token_response']: + resp = hook(resp) + + return self.parse_response_token(resp) + + def _refresh_token(self, url, refresh_token=None, body='', headers=None, + auth=None, **kwargs): + resp = self._http_post(url, body=body, auth=auth, headers=headers, **kwargs) + + for hook in self.compliance_hook['refresh_token_response']: + resp = hook(resp) + + token = self.parse_response_token(resp) + if 'refresh_token' not in token: + self.token['refresh_token'] = refresh_token + + if callable(self.update_token): + self.update_token(self.token, refresh_token=refresh_token) + + return self.token + + def _handle_token_hint(self, hook, url, token=None, token_type_hint=None, + body=None, auth=None, headers=None, **kwargs): + if token is None and self.token: + token = self.token.get('refresh_token') or self.token.get('access_token') + + if body is None: + body = '' + + body, headers = prepare_revoke_token_request( + token, token_type_hint, body, headers) + + for hook in self.compliance_hook[hook]: + url, headers, body = hook(url, headers, body) + + if auth is None: + auth = self.client_auth(self.revocation_endpoint_auth_method) + + session_kwargs = self._extract_session_request_params(kwargs) + return self._http_post( + url, body, auth=auth, headers=headers, **session_kwargs) + + def _prepare_token_endpoint_body(self, body, grant_type, **kwargs): + if grant_type == 'authorization_code': + if 'redirect_uri' not in kwargs: + kwargs['redirect_uri'] = self.redirect_uri + return prepare_token_request(grant_type, body, **kwargs) + + if 'scope' not in kwargs and self.scope: + kwargs['scope'] = self.scope + return prepare_token_request(grant_type, body, **kwargs) + + def _extract_session_request_params(self, kwargs): + """Extract parameters for session object from the passing ``**kwargs``.""" + rv = {} + for k in self.SESSION_REQUEST_PARAMS: + if k in kwargs: + rv[k] = kwargs.pop(k) + return rv + + def _http_post(self, url, body=None, auth=None, headers=None, **kwargs): + return self.session.post( + url, data=dict(url_decode(body)), + headers=headers, auth=auth, **kwargs) + + +def _guess_grant_type(kwargs): + if 'code' in kwargs: + grant_type = 'authorization_code' + elif 'username' in kwargs and 'password' in kwargs: + grant_type = 'password' + else: + grant_type = 'client_credentials' + return grant_type diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__init__.py new file mode 100644 index 0000000..e1748e3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__init__.py @@ -0,0 +1,83 @@ +""" + authlib.oauth2.rfc6749 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + The OAuth 2.0 Authorization Framework. + + https://tools.ietf.org/html/rfc6749 +""" + +from .requests import OAuth2Request, JsonRequest +from .wrappers import OAuth2Token +from .errors import ( + OAuth2Error, + AccessDeniedError, + MissingAuthorizationError, + InvalidGrantError, + InvalidClientError, + InvalidRequestError, + InvalidScopeError, + InsecureTransportError, + UnauthorizedClientError, + UnsupportedResponseTypeError, + UnsupportedGrantTypeError, + UnsupportedTokenTypeError, + # exceptions for clients + MissingCodeException, + MissingTokenException, + MissingTokenTypeException, + MismatchingStateException, +) +from .models import ClientMixin, AuthorizationCodeMixin, TokenMixin +from .authenticate_client import ClientAuthentication +from .authorization_server import AuthorizationServer +from .resource_protector import ResourceProtector, TokenValidator +from .token_endpoint import TokenEndpoint +from .grants import ( + BaseGrant, + AuthorizationEndpointMixin, + TokenEndpointMixin, + AuthorizationCodeGrant, + ImplicitGrant, + ResourceOwnerPasswordCredentialsGrant, + ClientCredentialsGrant, + RefreshTokenGrant, +) +from .util import scope_to_list, list_to_scope + +__all__ = [ + 'OAuth2Token', + 'OAuth2Request', 'JsonRequest', + 'OAuth2Error', + 'AccessDeniedError', + 'MissingAuthorizationError', + 'InvalidGrantError', + 'InvalidClientError', + 'InvalidRequestError', + 'InvalidScopeError', + 'InsecureTransportError', + 'UnauthorizedClientError', + 'UnsupportedResponseTypeError', + 'UnsupportedGrantTypeError', + 'UnsupportedTokenTypeError', + 'MissingCodeException', + 'MissingTokenException', + 'MissingTokenTypeException', + 'MismatchingStateException', + 'ClientMixin', 'AuthorizationCodeMixin', 'TokenMixin', + 'ClientAuthentication', + 'AuthorizationServer', + 'ResourceProtector', + 'TokenValidator', + 'TokenEndpoint', + 'BaseGrant', + 'AuthorizationEndpointMixin', + 'TokenEndpointMixin', + 'AuthorizationCodeGrant', + 'ImplicitGrant', + 'ResourceOwnerPasswordCredentialsGrant', + 'ClientCredentialsGrant', + 'RefreshTokenGrant', + 'scope_to_list', 'list_to_scope', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a9feb4a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/authenticate_client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/authenticate_client.cpython-312.pyc new file mode 100644 index 0000000..b5015a6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/authenticate_client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/authorization_server.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/authorization_server.cpython-312.pyc new file mode 100644 index 0000000..6ab545f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/authorization_server.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..13fbbc1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..6350ca3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/parameters.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/parameters.cpython-312.pyc new file mode 100644 index 0000000..61b53f1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/parameters.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/requests.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/requests.cpython-312.pyc new file mode 100644 index 0000000..5ce1d0a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/requests.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/resource_protector.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/resource_protector.cpython-312.pyc new file mode 100644 index 0000000..8a94142 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/resource_protector.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/token_endpoint.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/token_endpoint.cpython-312.pyc new file mode 100644 index 0000000..32ebee2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/token_endpoint.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/util.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..bb589c6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/util.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/wrappers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/wrappers.cpython-312.pyc new file mode 100644 index 0000000..1ca7375 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/__pycache__/wrappers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/authenticate_client.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/authenticate_client.py new file mode 100644 index 0000000..adcfd25 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/authenticate_client.py @@ -0,0 +1,103 @@ +""" + authlib.oauth2.rfc6749.authenticate_client + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Registry of client authentication methods, with 3 built-in methods: + + 1. client_secret_basic + 2. client_secret_post + 3. none + + The "client_secret_basic" method is used a lot in examples of `RFC6749`_, + but the concept of naming are introduced in `RFC7591`_. + + .. _`RFC6749`: https://tools.ietf.org/html/rfc6749 + .. _`RFC7591`: https://tools.ietf.org/html/rfc7591 +""" + +import logging +from .errors import InvalidClientError +from .util import extract_basic_authorization + +log = logging.getLogger(__name__) + +__all__ = ['ClientAuthentication'] + + +class ClientAuthentication: + def __init__(self, query_client): + self.query_client = query_client + self._methods = { + 'none': authenticate_none, + 'client_secret_basic': authenticate_client_secret_basic, + 'client_secret_post': authenticate_client_secret_post, + } + + def register(self, method, func): + self._methods[method] = func + + def authenticate(self, request, methods, endpoint): + for method in methods: + func = self._methods[method] + client = func(self.query_client, request) + if client and client.check_endpoint_auth_method(method, endpoint): + request.auth_method = method + return client + + if 'client_secret_basic' in methods: + raise InvalidClientError(state=request.state, status_code=401) + raise InvalidClientError(state=request.state) + + def __call__(self, request, methods, endpoint='token'): + return self.authenticate(request, methods, endpoint) + + +def authenticate_client_secret_basic(query_client, request): + """Authenticate client by ``client_secret_basic`` method. The client + uses HTTP Basic for authentication. + """ + client_id, client_secret = extract_basic_authorization(request.headers) + if client_id and client_secret: + client = _validate_client(query_client, client_id, request.state, 401) + if client.check_client_secret(client_secret): + log.debug(f'Authenticate {client_id} via "client_secret_basic" success') + return client + log.debug(f'Authenticate {client_id} via "client_secret_basic" failed') + + +def authenticate_client_secret_post(query_client, request): + """Authenticate client by ``client_secret_post`` method. The client + uses POST parameters for authentication. + """ + data = request.form + client_id = data.get('client_id') + client_secret = data.get('client_secret') + if client_id and client_secret: + client = _validate_client(query_client, client_id, request.state) + if client.check_client_secret(client_secret): + log.debug(f'Authenticate {client_id} via "client_secret_post" success') + return client + log.debug(f'Authenticate {client_id} via "client_secret_post" failed') + + +def authenticate_none(query_client, request): + """Authenticate public client by ``none`` method. The client + does not have a client secret. + """ + client_id = request.client_id + if client_id and not request.data.get('client_secret'): + client = _validate_client(query_client, client_id, request.state) + log.debug(f'Authenticate {client_id} via "none" success') + return client + log.debug(f'Authenticate {client_id} via "none" failed') + + +def _validate_client(query_client, client_id, state=None, status_code=400): + if client_id is None: + raise InvalidClientError(state=state, status_code=status_code) + + client = query_client(client_id) + if not client: + raise InvalidClientError(state=state, status_code=status_code) + + return client diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/authorization_server.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/authorization_server.py new file mode 100644 index 0000000..55bc7e3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/authorization_server.py @@ -0,0 +1,302 @@ +from authlib.common.errors import ContinueIteration +from .authenticate_client import ClientAuthentication +from .requests import OAuth2Request, JsonRequest +from .errors import ( + OAuth2Error, + InvalidScopeError, + UnsupportedResponseTypeError, + UnsupportedGrantTypeError, +) +from .util import scope_to_list + + +class AuthorizationServer: + """Authorization server that handles Authorization Endpoint and Token + Endpoint. + + :param scopes_supported: A list of supported scopes by this authorization server. + """ + def __init__(self, scopes_supported=None): + self.scopes_supported = scopes_supported + self._token_generators = {} + self._client_auth = None + self._authorization_grants = [] + self._token_grants = [] + self._endpoints = {} + + def query_client(self, client_id): + """Query OAuth client by client_id. The client model class MUST + implement the methods described by + :class:`~authlib.oauth2.rfc6749.ClientMixin`. + """ + raise NotImplementedError() + + def save_token(self, token, request): + """Define function to save the generated token into database.""" + raise NotImplementedError() + + def generate_token(self, grant_type, client, user=None, scope=None, + expires_in=None, include_refresh_token=True): + """Generate the token dict. + + :param grant_type: current requested grant_type. + :param client: the client that making the request. + :param user: current authorized user. + :param expires_in: if provided, use this value as expires_in. + :param scope: current requested scope. + :param include_refresh_token: should refresh_token be included. + :return: Token dict + """ + # generator for a specified grant type + func = self._token_generators.get(grant_type) + if not func: + # default generator for all grant types + func = self._token_generators.get('default') + if not func: + raise RuntimeError('No configured token generator') + + return func( + grant_type=grant_type, client=client, user=user, scope=scope, + expires_in=expires_in, include_refresh_token=include_refresh_token) + + def register_token_generator(self, grant_type, func): + """Register a function as token generator for the given ``grant_type``. + Developers MUST register a default token generator with a special + ``grant_type=default``:: + + def generate_bearer_token(grant_type, client, user=None, scope=None, + expires_in=None, include_refresh_token=True): + token = {'token_type': 'Bearer', 'access_token': ...} + if include_refresh_token: + token['refresh_token'] = ... + ... + return token + + authorization_server.register_token_generator('default', generate_bearer_token) + + If you register a generator for a certain grant type, that generator will only works + for the given grant type:: + + authorization_server.register_token_generator('client_credentials', generate_bearer_token) + + :param grant_type: string name of the grant type + :param func: a function to generate token + """ + self._token_generators[grant_type] = func + + def authenticate_client(self, request, methods, endpoint='token'): + """Authenticate client via HTTP request information with the given + methods, such as ``client_secret_basic``, ``client_secret_post``. + """ + if self._client_auth is None and self.query_client: + self._client_auth = ClientAuthentication(self.query_client) + return self._client_auth(request, methods, endpoint) + + def register_client_auth_method(self, method, func): + """Add more client auth method. The default methods are: + + * none: The client is a public client and does not have a client secret + * client_secret_post: The client uses the HTTP POST parameters + * client_secret_basic: The client uses HTTP Basic + + :param method: Name of the Auth method + :param func: Function to authenticate the client + + The auth method accept two parameters: ``query_client`` and ``request``, + an example for this method:: + + def authenticate_client_via_custom(query_client, request): + client_id = request.headers['X-Client-Id'] + client = query_client(client_id) + do_some_validation(client) + return client + + authorization_server.register_client_auth_method( + 'custom', authenticate_client_via_custom) + """ + if self._client_auth is None and self.query_client: + self._client_auth = ClientAuthentication(self.query_client) + + self._client_auth.register(method, func) + + def get_error_uri(self, request, error): + """Return a URI for the given error, framework may implement this method.""" + return None + + def send_signal(self, name, *args, **kwargs): + """Framework integration can re-implement this method to support + signal system. + """ + raise NotImplementedError() + + def create_oauth2_request(self, request) -> OAuth2Request: + """This method MUST be implemented in framework integrations. It is + used to create an OAuth2Request instance. + + :param request: the "request" instance in framework + :return: OAuth2Request instance + """ + raise NotImplementedError() + + def create_json_request(self, request) -> JsonRequest: + """This method MUST be implemented in framework integrations. It is + used to create an HttpRequest instance. + + :param request: the "request" instance in framework + :return: HttpRequest instance + """ + raise NotImplementedError() + + def handle_response(self, status, body, headers): + """Return HTTP response. Framework MUST implement this function.""" + raise NotImplementedError() + + def validate_requested_scope(self, scope, state=None): + """Validate if requested scope is supported by Authorization Server. + Developers CAN re-write this method to meet your needs. + """ + if scope and self.scopes_supported: + scopes = set(scope_to_list(scope)) + if not set(self.scopes_supported).issuperset(scopes): + raise InvalidScopeError(state=state) + + def register_grant(self, grant_cls, extensions=None): + """Register a grant class into the endpoint registry. Developers + can implement the grants in ``authlib.oauth2.rfc6749.grants`` and + register with this method:: + + class AuthorizationCodeGrant(grants.AuthorizationCodeGrant): + def authenticate_user(self, credential): + # ... + + authorization_server.register_grant(AuthorizationCodeGrant) + + :param grant_cls: a grant class. + :param extensions: extensions for the grant class. + """ + if hasattr(grant_cls, 'check_authorization_endpoint'): + self._authorization_grants.append((grant_cls, extensions)) + if hasattr(grant_cls, 'check_token_endpoint'): + self._token_grants.append((grant_cls, extensions)) + + def register_endpoint(self, endpoint): + """Add extra endpoint to authorization server. e.g. + RevocationEndpoint:: + + authorization_server.register_endpoint(RevocationEndpoint) + + :param endpoint_cls: A endpoint class or instance. + """ + if isinstance(endpoint, type): + endpoint = endpoint(self) + else: + endpoint.server = self + + endpoints = self._endpoints.setdefault(endpoint.ENDPOINT_NAME, []) + endpoints.append(endpoint) + + def get_authorization_grant(self, request): + """Find the authorization grant for current request. + + :param request: OAuth2Request instance. + :return: grant instance + """ + for (grant_cls, extensions) in self._authorization_grants: + if grant_cls.check_authorization_endpoint(request): + return _create_grant(grant_cls, extensions, request, self) + raise UnsupportedResponseTypeError(request.response_type) + + def get_consent_grant(self, request=None, end_user=None): + """Validate current HTTP request for authorization page. This page + is designed for resource owner to grant or deny the authorization. + """ + request = self.create_oauth2_request(request) + request.user = end_user + + grant = self.get_authorization_grant(request) + grant.validate_no_multiple_request_parameter(request) + grant.validate_consent_request() + return grant + + def get_token_grant(self, request): + """Find the token grant for current request. + + :param request: OAuth2Request instance. + :return: grant instance + """ + for (grant_cls, extensions) in self._token_grants: + if grant_cls.check_token_endpoint(request): + return _create_grant(grant_cls, extensions, request, self) + raise UnsupportedGrantTypeError(request.grant_type) + + def create_endpoint_response(self, name, request=None): + """Validate endpoint request and create endpoint response. + + :param name: Endpoint name + :param request: HTTP request instance. + :return: Response + """ + if name not in self._endpoints: + raise RuntimeError(f'There is no "{name}" endpoint.') + + endpoints = self._endpoints[name] + for endpoint in endpoints: + request = endpoint.create_endpoint_request(request) + try: + return self.handle_response(*endpoint(request)) + except ContinueIteration: + continue + except OAuth2Error as error: + return self.handle_error_response(request, error) + + def create_authorization_response(self, request=None, grant_user=None): + """Validate authorization request and create authorization response. + + :param request: HTTP request instance. + :param grant_user: if granted, it is resource owner. If denied, + it is None. + :returns: Response + """ + if not isinstance(request, OAuth2Request): + request = self.create_oauth2_request(request) + + try: + grant = self.get_authorization_grant(request) + except UnsupportedResponseTypeError as error: + return self.handle_error_response(request, error) + + try: + redirect_uri = grant.validate_authorization_request() + args = grant.create_authorization_response(redirect_uri, grant_user) + return self.handle_response(*args) + except OAuth2Error as error: + return self.handle_error_response(request, error) + + def create_token_response(self, request=None): + """Validate token request and create token response. + + :param request: HTTP request instance + """ + request = self.create_oauth2_request(request) + try: + grant = self.get_token_grant(request) + except UnsupportedGrantTypeError as error: + return self.handle_error_response(request, error) + + try: + grant.validate_token_request() + args = grant.create_token_response() + return self.handle_response(*args) + except OAuth2Error as error: + return self.handle_error_response(request, error) + + def handle_error_response(self, request, error): + return self.handle_response(*error(self.get_error_uri(request, error))) + + +def _create_grant(grant_cls, extensions, request, server): + grant = grant_cls(request, server) + if extensions: + for ext in extensions: + ext(grant) + return grant diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/errors.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/errors.py new file mode 100644 index 0000000..63ffb47 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/errors.py @@ -0,0 +1,233 @@ +""" + authlib.oauth2.rfc6749.errors + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Implementation for OAuth 2 Error Response. A basic error has + parameters: + + error + REQUIRED. A single ASCII [USASCII] error code. + + error_description + OPTIONAL. Human-readable ASCII [USASCII] text providing + additional information, used to assist the client developer in + understanding the error that occurred. + + error_uri + OPTIONAL. A URI identifying a human-readable web page with + information about the error, used to provide the client + developer with additional information about the error. + Values for the "error_uri" parameter MUST conform to the + URI-reference syntax and thus MUST NOT include characters + outside the set %x21 / %x23-5B / %x5D-7E. + + state + REQUIRED if a "state" parameter was present in the client + authorization request. The exact value received from the + client. + + https://tools.ietf.org/html/rfc6749#section-5.2 + + :copyright: (c) 2017 by Hsiaoming Yang. +""" +from authlib.oauth2.base import OAuth2Error +from authlib.common.security import is_secure_transport + +__all__ = [ + 'OAuth2Error', + 'InsecureTransportError', 'InvalidRequestError', + 'InvalidClientError', 'UnauthorizedClientError', 'InvalidGrantError', + 'UnsupportedResponseTypeError', 'UnsupportedGrantTypeError', + 'InvalidScopeError', 'AccessDeniedError', + 'MissingAuthorizationError', 'UnsupportedTokenTypeError', + 'MissingCodeException', 'MissingTokenException', + 'MissingTokenTypeException', 'MismatchingStateException', +] + + +class InsecureTransportError(OAuth2Error): + error = 'insecure_transport' + description = 'OAuth 2 MUST utilize https.' + + @classmethod + def check(cls, uri): + """Check and raise InsecureTransportError with the given URI.""" + if not is_secure_transport(uri): + raise cls() + + +class InvalidRequestError(OAuth2Error): + """The request is missing a required parameter, includes an + unsupported parameter value (other than grant type), + repeats a parameter, includes multiple credentials, + utilizes more than one mechanism for authenticating the + client, or is otherwise malformed. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'invalid_request' + + +class InvalidClientError(OAuth2Error): + """Client authentication failed (e.g., unknown client, no + client authentication included, or unsupported + authentication method). The authorization server MAY + return an HTTP 401 (Unauthorized) status code to indicate + which HTTP authentication schemes are supported. If the + client attempted to authenticate via the "Authorization" + request header field, the authorization server MUST + respond with an HTTP 401 (Unauthorized) status code and + include the "WWW-Authenticate" response header field + matching the authentication scheme used by the client. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'invalid_client' + status_code = 400 + + def get_headers(self): + headers = super().get_headers() + if self.status_code == 401: + error_description = self.get_error_description() + # safe escape + error_description = error_description.replace('"', '|') + extras = [ + f'error="{self.error}"', + f'error_description="{error_description}"' + ] + headers.append( + ('WWW-Authenticate', 'Basic ' + ', '.join(extras)) + ) + return headers + + +class InvalidGrantError(OAuth2Error): + """The provided authorization grant (e.g., authorization + code, resource owner credentials) or refresh token is + invalid, expired, revoked, does not match the redirection + URI used in the authorization request, or was issued to + another client. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'invalid_grant' + + +class UnauthorizedClientError(OAuth2Error): + """ The authenticated client is not authorized to use this + authorization grant type. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'unauthorized_client' + + +class UnsupportedResponseTypeError(OAuth2Error): + """The authorization server does not support obtaining + an access token using this method.""" + error = 'unsupported_response_type' + + def __init__(self, response_type): + super().__init__() + self.response_type = response_type + + def get_error_description(self): + return f'response_type={self.response_type} is not supported' + + +class UnsupportedGrantTypeError(OAuth2Error): + """The authorization grant type is not supported by the + authorization server. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'unsupported_grant_type' + + def __init__(self, grant_type): + super().__init__() + self.grant_type = grant_type + + def get_error_description(self): + return f'grant_type={self.grant_type} is not supported' + + +class InvalidScopeError(OAuth2Error): + """The requested scope is invalid, unknown, malformed, or + exceeds the scope granted by the resource owner. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'invalid_scope' + description = 'The requested scope is invalid, unknown, or malformed.' + + +class AccessDeniedError(OAuth2Error): + """The resource owner or authorization server denied the request. + + Used in authorization endpoint for "code" and "implicit". Defined in + `Section 4.1.2.1`_. + + .. _`Section 4.1.2.1`: https://tools.ietf.org/html/rfc6749#section-4.1.2.1 + """ + error = 'access_denied' + description = 'The resource owner or authorization server denied the request' + + +# -- below are extended errors -- # + + +class ForbiddenError(OAuth2Error): + status_code = 401 + + def __init__(self, auth_type=None, realm=None): + super().__init__() + self.auth_type = auth_type + self.realm = realm + + def get_headers(self): + headers = super().get_headers() + if not self.auth_type: + return headers + + extras = [] + if self.realm: + extras.append(f'realm="{self.realm}"') + extras.append(f'error="{self.error}"') + error_description = self.description + extras.append(f'error_description="{error_description}"') + headers.append( + ('WWW-Authenticate', f'{self.auth_type} ' + ', '.join(extras)) + ) + return headers + + +class MissingAuthorizationError(ForbiddenError): + error = 'missing_authorization' + description = 'Missing "Authorization" in headers.' + + +class UnsupportedTokenTypeError(ForbiddenError): + error = 'unsupported_token_type' + + +# -- exceptions for clients -- # + + +class MissingCodeException(OAuth2Error): + error = 'missing_code' + description = 'Missing "code" in response.' + + +class MissingTokenException(OAuth2Error): + error = 'missing_token' + description = 'Missing "access_token" in response.' + + +class MissingTokenTypeException(OAuth2Error): + error = 'missing_token_type' + description = 'Missing "token_type" in response.' + + +class MismatchingStateException(OAuth2Error): + error = 'mismatching_state' + description = 'CSRF Warning! State not equal in request and response.' diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__init__.py new file mode 100644 index 0000000..b179756 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__init__.py @@ -0,0 +1,37 @@ +""" + authlib.oauth2.rfc6749.grants + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Implementation for `Section 4`_ of "Obtaining Authorization". + + To request an access token, the client obtains authorization from the + resource owner. The authorization is expressed in the form of an + authorization grant, which the client uses to request the access + token. OAuth defines four grant types: + + 1. authorization code + 2. implicit + 3. resource owner password credentials + 4. client credentials. + + It also provides an extension mechanism for defining additional grant + types. Authlib defines refresh_token as a grant type too. + + .. _`Section 4`: https://tools.ietf.org/html/rfc6749#section-4 +""" + +# flake8: noqa + +from .base import BaseGrant, AuthorizationEndpointMixin, TokenEndpointMixin +from .authorization_code import AuthorizationCodeGrant +from .implicit import ImplicitGrant +from .resource_owner_password_credentials import ResourceOwnerPasswordCredentialsGrant +from .client_credentials import ClientCredentialsGrant +from .refresh_token import RefreshTokenGrant + +__all__ = [ + 'BaseGrant', 'AuthorizationEndpointMixin', 'TokenEndpointMixin', + 'AuthorizationCodeGrant', 'ImplicitGrant', + 'ResourceOwnerPasswordCredentialsGrant', + 'ClientCredentialsGrant', 'RefreshTokenGrant', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6eab71f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/authorization_code.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/authorization_code.cpython-312.pyc new file mode 100644 index 0000000..d095cd6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/authorization_code.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..100b700 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/client_credentials.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/client_credentials.cpython-312.pyc new file mode 100644 index 0000000..d8fd29f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/client_credentials.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/implicit.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/implicit.cpython-312.pyc new file mode 100644 index 0000000..ad61324 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/implicit.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/refresh_token.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/refresh_token.cpython-312.pyc new file mode 100644 index 0000000..e859418 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/refresh_token.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/resource_owner_password_credentials.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/resource_owner_password_credentials.cpython-312.pyc new file mode 100644 index 0000000..4a45b4c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/__pycache__/resource_owner_password_credentials.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/authorization_code.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/authorization_code.py new file mode 100644 index 0000000..76a51de --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/authorization_code.py @@ -0,0 +1,378 @@ +import logging +from authlib.common.urls import add_params_to_uri +from authlib.common.security import generate_token +from .base import BaseGrant, AuthorizationEndpointMixin, TokenEndpointMixin +from ..errors import ( + OAuth2Error, + UnauthorizedClientError, + InvalidClientError, + InvalidGrantError, + InvalidRequestError, + AccessDeniedError, +) + +log = logging.getLogger(__name__) + + +class AuthorizationCodeGrant(BaseGrant, AuthorizationEndpointMixin, TokenEndpointMixin): + """The authorization code grant type is used to obtain both access + tokens and refresh tokens and is optimized for confidential clients. + Since this is a redirection-based flow, the client must be capable of + interacting with the resource owner's user-agent (typically a web + browser) and capable of receiving incoming requests (via redirection) + from the authorization server:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + ^ + | + (B) + +----|-----+ Client Identifier +---------------+ + | -+----(A)-- & Redirection URI ---->| | + | User- | | Authorization | + | Agent -+----(B)-- User authenticates --->| Server | + | | | | + | -+----(C)-- Authorization Code ---<| | + +-|----|---+ +---------------+ + | | ^ v + (A) (C) | | + | | | | + ^ v | | + +---------+ | | + | |>---(D)-- Authorization Code ---------' | + | Client | & Redirection URI | + | | | + | |<---(E)----- Access Token -------------------' + +---------+ (w/ Optional Refresh Token) + """ + #: Allowed client auth methods for token endpoint + TOKEN_ENDPOINT_AUTH_METHODS = ['client_secret_basic', 'client_secret_post'] + + #: Generated "code" length + AUTHORIZATION_CODE_LENGTH = 48 + + RESPONSE_TYPES = {'code'} + GRANT_TYPE = 'authorization_code' + + def validate_authorization_request(self): + """The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format. + Per `Section 4.1.1`_. + + response_type + REQUIRED. Value MUST be set to "code". + + client_id + REQUIRED. The client identifier as described in Section 2.2. + + redirect_uri + OPTIONAL. As described in Section 3.1.2. + + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. + + state + RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in Section 10.12. + + The client directs the resource owner to the constructed URI using an + HTTP redirection response, or by other means available to it via the + user-agent. + + For example, the client directs the user-agent to make the following + HTTP request using TLS (with extra line breaks for display purposes + only): + + .. code-block:: http + + GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1 + Host: server.example.com + + The authorization server validates the request to ensure that all + required parameters are present and valid. If the request is valid, + the authorization server authenticates the resource owner and obtains + an authorization decision (by asking the resource owner or by + establishing approval via other means). + + .. _`Section 4.1.1`: https://tools.ietf.org/html/rfc6749#section-4.1.1 + """ + return validate_code_authorization_request(self) + + def create_authorization_response(self, redirect_uri: str, grant_user): + """If the resource owner grants the access request, the authorization + server issues an authorization code and delivers it to the client by + adding the following parameters to the query component of the + redirection URI using the "application/x-www-form-urlencoded" format. + Per `Section 4.1.2`_. + + code + REQUIRED. The authorization code generated by the + authorization server. The authorization code MUST expire + shortly after it is issued to mitigate the risk of leaks. A + maximum authorization code lifetime of 10 minutes is + RECOMMENDED. The client MUST NOT use the authorization code + more than once. If an authorization code is used more than + once, the authorization server MUST deny the request and SHOULD + revoke (when possible) all tokens previously issued based on + that authorization code. The authorization code is bound to + the client identifier and redirection URI. + state + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + For example, the authorization server redirects the user-agent by + sending the following HTTP response. + + .. code-block:: http + + HTTP/1.1 302 Found + Location: https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA + &state=xyz + + .. _`Section 4.1.2`: https://tools.ietf.org/html/rfc6749#section-4.1.2 + + :param redirect_uri: Redirect to the given URI for the authorization + :param grant_user: if resource owner granted the request, pass this + resource owner, otherwise pass None. + :returns: (status_code, body, headers) + """ + if not grant_user: + raise AccessDeniedError(state=self.request.state, redirect_uri=redirect_uri) + + self.request.user = grant_user + + code = self.generate_authorization_code() + self.save_authorization_code(code, self.request) + + params = [('code', code)] + if self.request.state: + params.append(('state', self.request.state)) + uri = add_params_to_uri(redirect_uri, params) + headers = [('Location', uri)] + return 302, '', headers + + def validate_token_request(self): + """The client makes a request to the token endpoint by sending the + following parameters using the "application/x-www-form-urlencoded" + format per `Section 4.1.3`_: + + grant_type + REQUIRED. Value MUST be set to "authorization_code". + + code + REQUIRED. The authorization code received from the + authorization server. + + redirect_uri + REQUIRED, if the "redirect_uri" parameter was included in the + authorization request as described in Section 4.1.1, and their + values MUST be identical. + + client_id + REQUIRED, if the client is not authenticating with the + authorization server as described in Section 3.2.1. + + If the client type is confidential or the client was issued client + credentials (or assigned other authentication requirements), the + client MUST authenticate with the authorization server as described + in Section 3.2.1. + + For example, the client makes the following HTTP request using TLS: + + .. code-block:: http + + POST /token HTTP/1.1 + Host: server.example.com + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + Content-Type: application/x-www-form-urlencoded + + grant_type=authorization_code&code=SplxlOBeZQQYbYS6WxSbIA + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb + + .. _`Section 4.1.3`: https://tools.ietf.org/html/rfc6749#section-4.1.3 + """ + # ignore validate for grant_type, since it is validated by + # check_token_endpoint + + # authenticate the client if client authentication is included + client = self.authenticate_token_endpoint_client() + + log.debug('Validate token request of %r', client) + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError( + f'The client is not authorized to use "grant_type={self.GRANT_TYPE}"') + + code = self.request.form.get('code') + if code is None: + raise InvalidRequestError('Missing "code" in request.') + + # ensure that the authorization code was issued to the authenticated + # confidential client, or if the client is public, ensure that the + # code was issued to "client_id" in the request + authorization_code = self.query_authorization_code(code, client) + if not authorization_code: + raise InvalidGrantError('Invalid "code" in request.') + + # validate redirect_uri parameter + log.debug('Validate token redirect_uri of %r', client) + redirect_uri = self.request.redirect_uri + original_redirect_uri = authorization_code.get_redirect_uri() + if original_redirect_uri and redirect_uri != original_redirect_uri: + raise InvalidGrantError('Invalid "redirect_uri" in request.') + + # save for create_token_response + self.request.client = client + self.request.authorization_code = authorization_code + self.execute_hook('after_validate_token_request') + + def create_token_response(self): + """If the access token request is valid and authorized, the + authorization server issues an access token and optional refresh + token as described in Section 5.1. If the request client + authentication failed or is invalid, the authorization server returns + an error response as described in Section 5.2. Per `Section 4.1.4`_. + + An example successful response: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/json + Cache-Control: no-store + Pragma: no-cache + + { + "access_token":"2YotnFZFEjr1zCsicMWpAA", + "token_type":"example", + "expires_in":3600, + "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA", + "example_parameter":"example_value" + } + + :returns: (status_code, body, headers) + + .. _`Section 4.1.4`: https://tools.ietf.org/html/rfc6749#section-4.1.4 + """ + client = self.request.client + authorization_code = self.request.authorization_code + + user = self.authenticate_user(authorization_code) + if not user: + raise InvalidGrantError('There is no "user" for this code.') + self.request.user = user + + scope = authorization_code.get_scope() + token = self.generate_token( + user=user, + scope=scope, + include_refresh_token=client.check_grant_type('refresh_token'), + ) + log.debug('Issue token %r to %r', token, client) + + self.save_token(token) + self.execute_hook('process_token', token=token) + self.delete_authorization_code(authorization_code) + return 200, token, self.TOKEN_RESPONSE_HEADER + + def generate_authorization_code(self): + """"The method to generate "code" value for authorization code data. + Developers may rewrite this method, or customize the code length with:: + + class MyAuthorizationCodeGrant(AuthorizationCodeGrant): + AUTHORIZATION_CODE_LENGTH = 32 # default is 48 + """ + return generate_token(self.AUTHORIZATION_CODE_LENGTH) + + def save_authorization_code(self, code, request): + """Save authorization_code for later use. Developers MUST implement + it in subclass. Here is an example:: + + def save_authorization_code(self, code, request): + client = request.client + item = AuthorizationCode( + code=code, + client_id=client.client_id, + redirect_uri=request.redirect_uri, + scope=request.scope, + user_id=request.user.id, + ) + item.save() + """ + raise NotImplementedError() + + def query_authorization_code(self, code, client): # pragma: no cover + """Get authorization_code from previously savings. Developers MUST + implement it in subclass:: + + def query_authorization_code(self, code, client): + return Authorization.get(code=code, client_id=client.client_id) + + :param code: a string represent the code. + :param client: client related to this code. + :return: authorization_code object + """ + raise NotImplementedError() + + def delete_authorization_code(self, authorization_code): + """Delete authorization code from database or cache. Developers MUST + implement it in subclass, e.g.:: + + def delete_authorization_code(self, authorization_code): + authorization_code.delete() + + :param authorization_code: the instance of authorization_code + """ + raise NotImplementedError() + + def authenticate_user(self, authorization_code): + """Authenticate the user related to this authorization_code. Developers + MUST implement this method in subclass, e.g.:: + + def authenticate_user(self, authorization_code): + return User.get(authorization_code.user_id) + + :param authorization_code: AuthorizationCode object + :return: user + """ + raise NotImplementedError() + + +def validate_code_authorization_request(grant): + request = grant.request + client_id = request.client_id + log.debug('Validate authorization request of %r', client_id) + + if client_id is None: + raise InvalidClientError(state=request.state) + + client = grant.server.query_client(client_id) + if not client: + raise InvalidClientError(state=request.state) + + redirect_uri = grant.validate_authorization_redirect_uri(request, client) + response_type = request.response_type + if not client.check_response_type(response_type): + raise UnauthorizedClientError( + f'The client is not authorized to use "response_type={response_type}"', + state=grant.request.state, + redirect_uri=redirect_uri, + ) + + try: + grant.request.client = client + grant.validate_requested_scope() + grant.execute_hook('after_validate_authorization_request') + except OAuth2Error as error: + error.redirect_uri = redirect_uri + raise error + return redirect_uri diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/base.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/base.py new file mode 100644 index 0000000..9aa3c76 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/base.py @@ -0,0 +1,162 @@ +from authlib.consts import default_json_headers +from authlib.common.urls import urlparse +from ..requests import OAuth2Request +from ..errors import InvalidRequestError + + +class BaseGrant: + #: Allowed client auth methods for token endpoint + TOKEN_ENDPOINT_AUTH_METHODS = ['client_secret_basic'] + + #: Designed for which "grant_type" + GRANT_TYPE = None + + # NOTE: there is no charset for application/json, since + # application/json should always in UTF-8. + # The example on RFC is incorrect. + # https://tools.ietf.org/html/rfc4627 + TOKEN_RESPONSE_HEADER = default_json_headers + + def __init__(self, request: OAuth2Request, server): + self.prompt = None + self.redirect_uri = None + self.request = request + self.server = server + self._hooks = { + 'after_validate_authorization_request': set(), + 'after_validate_consent_request': set(), + 'after_validate_token_request': set(), + 'process_token': set(), + } + + @property + def client(self): + return self.request.client + + def generate_token(self, user=None, scope=None, grant_type=None, + expires_in=None, include_refresh_token=True): + if grant_type is None: + grant_type = self.GRANT_TYPE + return self.server.generate_token( + client=self.request.client, + grant_type=grant_type, + user=user, + scope=scope, + expires_in=expires_in, + include_refresh_token=include_refresh_token, + ) + + def authenticate_token_endpoint_client(self): + """Authenticate client with the given methods for token endpoint. + + For example, the client makes the following HTTP request using TLS: + + .. code-block:: http + + POST /token HTTP/1.1 + Host: server.example.com + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + Content-Type: application/x-www-form-urlencoded + + grant_type=authorization_code&code=SplxlOBeZQQYbYS6WxSbIA + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb + + Default available methods are: "none", "client_secret_basic" and + "client_secret_post". + + :return: client + """ + client = self.server.authenticate_client( + self.request, self.TOKEN_ENDPOINT_AUTH_METHODS) + self.server.send_signal( + 'after_authenticate_client', + client=client, grant=self) + return client + + def save_token(self, token): + """A method to save token into database.""" + return self.server.save_token(token, self.request) + + def validate_requested_scope(self): + """Validate if requested scope is supported by Authorization Server.""" + scope = self.request.scope + state = self.request.state + return self.server.validate_requested_scope(scope, state) + + def register_hook(self, hook_type, hook): + if hook_type not in self._hooks: + raise ValueError('Hook type %s is not in %s.', + hook_type, self._hooks) + self._hooks[hook_type].add(hook) + + def execute_hook(self, hook_type, *args, **kwargs): + for hook in self._hooks[hook_type]: + hook(self, *args, **kwargs) + + +class TokenEndpointMixin: + #: Allowed HTTP methods of this token endpoint + TOKEN_ENDPOINT_HTTP_METHODS = ['POST'] + + #: Designed for which "grant_type" + GRANT_TYPE = None + + @classmethod + def check_token_endpoint(cls, request: OAuth2Request): + return request.grant_type == cls.GRANT_TYPE and \ + request.method in cls.TOKEN_ENDPOINT_HTTP_METHODS + + def validate_token_request(self): + raise NotImplementedError() + + def create_token_response(self): + raise NotImplementedError() + + +class AuthorizationEndpointMixin: + RESPONSE_TYPES = set() + ERROR_RESPONSE_FRAGMENT = False + + @classmethod + def check_authorization_endpoint(cls, request: OAuth2Request): + return request.response_type in cls.RESPONSE_TYPES + + @staticmethod + def validate_authorization_redirect_uri(request: OAuth2Request, client): + if request.redirect_uri: + if not client.check_redirect_uri(request.redirect_uri): + raise InvalidRequestError( + f'Redirect URI {request.redirect_uri} is not supported by client.', + state=request.state) + return request.redirect_uri + else: + redirect_uri = client.get_default_redirect_uri() + if not redirect_uri: + raise InvalidRequestError( + 'Missing "redirect_uri" in request.', + state=request.state) + return redirect_uri + + @staticmethod + def validate_no_multiple_request_parameter(request: OAuth2Request): + """For the Authorization Endpoint, request and response parameters MUST NOT be included + more than once. Per `Section 3.1`_. + + .. _`Section 3.1`: https://tools.ietf.org/html/rfc6749#section-3.1 + """ + datalist = request.datalist + parameters = ["response_type", "client_id", "redirect_uri", "scope", "state"] + for param in parameters: + if len(datalist.get(param, [])) > 1: + raise InvalidRequestError(f'Multiple "{param}" in request.', state=request.state) + + def validate_consent_request(self): + redirect_uri = self.validate_authorization_request() + self.execute_hook('after_validate_consent_request', redirect_uri) + self.redirect_uri = redirect_uri + + def validate_authorization_request(self): + raise NotImplementedError() + + def create_authorization_response(self, redirect_uri: str, grant_user): + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/client_credentials.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/client_credentials.py new file mode 100644 index 0000000..57249cb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/client_credentials.py @@ -0,0 +1,102 @@ +import logging +from .base import BaseGrant, TokenEndpointMixin +from ..errors import UnauthorizedClientError + +log = logging.getLogger(__name__) + + +class ClientCredentialsGrant(BaseGrant, TokenEndpointMixin): + """The client can request an access token using only its client + credentials (or other supported means of authentication) when the + client is requesting access to the protected resources under its + control, or those of another resource owner that have been previously + arranged with the authorization server. + + The client credentials grant type MUST only be used by confidential + clients:: + + +---------+ +---------------+ + | | | | + | |>--(A)- Client Authentication --->| Authorization | + | Client | | Server | + | |<--(B)---- Access Token ---------<| | + | | | | + +---------+ +---------------+ + + https://tools.ietf.org/html/rfc6749#section-4.4 + """ + GRANT_TYPE = 'client_credentials' + + def validate_token_request(self): + """The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per Appendix B with a character encoding of UTF-8 in the HTTP + request entity-body: + + grant_type + REQUIRED. Value MUST be set to "client_credentials". + + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. + + The client MUST authenticate with the authorization server as + described in Section 3.2.1. + + For example, the client makes the following HTTP request using + transport-layer security (with extra line breaks for display purposes + only): + + .. code-block:: http + + POST /token HTTP/1.1 + Host: server.example.com + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + Content-Type: application/x-www-form-urlencoded + + grant_type=client_credentials + + The authorization server MUST authenticate the client. + """ + + # ignore validate for grant_type, since it is validated by + # check_token_endpoint + client = self.authenticate_token_endpoint_client() + log.debug('Validate token request of %r', client) + + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError() + + self.request.client = client + self.validate_requested_scope() + + def create_token_response(self): + """If the access token request is valid and authorized, the + authorization server issues an access token as described in + Section 5.1. A refresh token SHOULD NOT be included. If the request + failed client authentication or is invalid, the authorization server + returns an error response as described in Section 5.2. + + An example successful response: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/json + Cache-Control: no-store + Pragma: no-cache + + { + "access_token":"2YotnFZFEjr1zCsicMWpAA", + "token_type":"example", + "expires_in":3600, + "example_parameter":"example_value" + } + + :returns: (status_code, body, headers) + """ + token = self.generate_token(scope=self.request.scope, include_refresh_token=False) + log.debug('Issue token %r to %r', token, self.client) + self.save_token(token) + self.execute_hook('process_token', self, token=token) + return 200, token, self.TOKEN_RESPONSE_HEADER diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/implicit.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/implicit.py new file mode 100644 index 0000000..75b12be --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/implicit.py @@ -0,0 +1,229 @@ +import logging +from authlib.common.urls import add_params_to_uri +from .base import BaseGrant, AuthorizationEndpointMixin +from ..errors import ( + OAuth2Error, + UnauthorizedClientError, + AccessDeniedError, +) + +log = logging.getLogger(__name__) + + +class ImplicitGrant(BaseGrant, AuthorizationEndpointMixin): + """The implicit grant type is used to obtain access tokens (it does not + support the issuance of refresh tokens) and is optimized for public + clients known to operate a particular redirection URI. These clients + are typically implemented in a browser using a scripting language + such as JavaScript. + + Since this is a redirection-based flow, the client must be capable of + interacting with the resource owner's user-agent (typically a web + browser) and capable of receiving incoming requests (via redirection) + from the authorization server. + + Unlike the authorization code grant type, in which the client makes + separate requests for authorization and for an access token, the + client receives the access token as the result of the authorization + request. + + The implicit grant type does not include client authentication, and + relies on the presence of the resource owner and the registration of + the redirection URI. Because the access token is encoded into the + redirection URI, it may be exposed to the resource owner and other + applications residing on the same device:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + ^ + | + (B) + +----|-----+ Client Identifier +---------------+ + | -+----(A)-- & Redirection URI --->| | + | User- | | Authorization | + | Agent -|----(B)-- User authenticates -->| Server | + | | | | + | |<---(C)--- Redirection URI ----<| | + | | with Access Token +---------------+ + | | in Fragment + | | +---------------+ + | |----(D)--- Redirection URI ---->| Web-Hosted | + | | without Fragment | Client | + | | | Resource | + | (F) |<---(E)------- Script ---------<| | + | | +---------------+ + +-|--------+ + | | + (A) (G) Access Token + | | + ^ v + +---------+ + | | + | Client | + | | + +---------+ + """ + #: authorization_code grant type has authorization endpoint + AUTHORIZATION_ENDPOINT = True + #: Allowed client auth methods for token endpoint + TOKEN_ENDPOINT_AUTH_METHODS = ['none'] + + RESPONSE_TYPES = {'token'} + GRANT_TYPE = 'implicit' + ERROR_RESPONSE_FRAGMENT = True + + def validate_authorization_request(self): + """The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format. + Per `Section 4.2.1`_. + + response_type + REQUIRED. Value MUST be set to "token". + + client_id + REQUIRED. The client identifier as described in Section 2.2. + + redirect_uri + OPTIONAL. As described in Section 3.1.2. + + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. + + state + RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in Section 10.12. + + The client directs the resource owner to the constructed URI using an + HTTP redirection response, or by other means available to it via the + user-agent. + + For example, the client directs the user-agent to make the following + HTTP request using TLS: + + .. code-block:: http + + GET /authorize?response_type=token&client_id=s6BhdRkqt3&state=xyz + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1 + Host: server.example.com + + .. _`Section 4.2.1`: https://tools.ietf.org/html/rfc6749#section-4.2.1 + """ + # ignore validate for response_type, since it is validated by + # check_authorization_endpoint + + # The implicit grant type is optimized for public clients + client = self.authenticate_token_endpoint_client() + log.debug('Validate authorization request of %r', client) + + redirect_uri = self.validate_authorization_redirect_uri( + self.request, client) + + response_type = self.request.response_type + if not client.check_response_type(response_type): + raise UnauthorizedClientError( + 'The client is not authorized to use ' + '"response_type={}"'.format(response_type), + state=self.request.state, + redirect_uri=redirect_uri, + redirect_fragment=True, + ) + + try: + self.request.client = client + self.validate_requested_scope() + self.execute_hook('after_validate_authorization_request') + except OAuth2Error as error: + error.redirect_uri = redirect_uri + error.redirect_fragment = True + raise error + return redirect_uri + + def create_authorization_response(self, redirect_uri, grant_user): + """If the resource owner grants the access request, the authorization + server issues an access token and delivers it to the client by adding + the following parameters to the fragment component of the redirection + URI using the "application/x-www-form-urlencoded" format. + Per `Section 4.2.2`_. + + access_token + REQUIRED. The access token issued by the authorization server. + + token_type + REQUIRED. The type of the token issued as described in + Section 7.1. Value is case insensitive. + + expires_in + RECOMMENDED. The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + scope + OPTIONAL, if identical to the scope requested by the client; + otherwise, REQUIRED. The scope of the access token as + described by Section 3.3. + + state + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + The authorization server MUST NOT issue a refresh token. + + For example, the authorization server redirects the user-agent by + sending the following HTTP response: + + .. code-block:: http + + HTTP/1.1 302 Found + Location: http://example.com/cb#access_token=2YotnFZFEjr1zCsicMWpAA + &state=xyz&token_type=example&expires_in=3600 + + Developers should note that some user-agents do not support the + inclusion of a fragment component in the HTTP "Location" response + header field. Such clients will require using other methods for + redirecting the client than a 3xx redirection response -- for + example, returning an HTML page that includes a 'continue' button + with an action linked to the redirection URI. + + .. _`Section 4.2.2`: https://tools.ietf.org/html/rfc6749#section-4.2.2 + + :param redirect_uri: Redirect to the given URI for the authorization + :param grant_user: if resource owner granted the request, pass this + resource owner, otherwise pass None. + :returns: (status_code, body, headers) + """ + state = self.request.state + if grant_user: + self.request.user = grant_user + token = self.generate_token( + user=grant_user, + scope=self.request.scope, + include_refresh_token=False, + ) + log.debug('Grant token %r to %r', token, self.request.client) + + self.save_token(token) + self.execute_hook('process_token', token=token) + params = [(k, token[k]) for k in token] + if state: + params.append(('state', state)) + + uri = add_params_to_uri(redirect_uri, params, fragment=True) + headers = [('Location', uri)] + return 302, '', headers + else: + raise AccessDeniedError( + state=state, + redirect_uri=redirect_uri, + redirect_fragment=True + ) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/refresh_token.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/refresh_token.py new file mode 100644 index 0000000..4df5b70 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/refresh_token.py @@ -0,0 +1,179 @@ +""" + authlib.oauth2.rfc6749.grants.refresh_token + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + A special grant endpoint for refresh_token grant_type. Refreshing an + Access Token per `Section 6`_. + + .. _`Section 6`: https://tools.ietf.org/html/rfc6749#section-6 +""" + +import logging +from .base import BaseGrant, TokenEndpointMixin +from ..util import scope_to_list +from ..errors import ( + InvalidRequestError, + InvalidScopeError, + InvalidGrantError, + UnauthorizedClientError, +) +log = logging.getLogger(__name__) + + +class RefreshTokenGrant(BaseGrant, TokenEndpointMixin): + """A special grant endpoint for refresh_token grant_type. Refreshing an + Access Token per `Section 6`_. + + .. _`Section 6`: https://tools.ietf.org/html/rfc6749#section-6 + """ + GRANT_TYPE = 'refresh_token' + + #: The authorization server MAY issue a new refresh token + INCLUDE_NEW_REFRESH_TOKEN = False + + def _validate_request_client(self): + # require client authentication for confidential clients or for any + # client that was issued client credentials (or with other + # authentication requirements) + client = self.authenticate_token_endpoint_client() + log.debug('Validate token request of %r', client) + + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError() + + return client + + def _validate_request_token(self, client): + refresh_token = self.request.form.get('refresh_token') + if refresh_token is None: + raise InvalidRequestError('Missing "refresh_token" in request.') + + token = self.authenticate_refresh_token(refresh_token) + if not token or not token.check_client(client): + raise InvalidGrantError() + return token + + def _validate_token_scope(self, token): + scope = self.request.scope + if not scope: + return + + original_scope = token.get_scope() + if not original_scope: + raise InvalidScopeError() + + original_scope = set(scope_to_list(original_scope)) + if not original_scope.issuperset(set(scope_to_list(scope))): + raise InvalidScopeError() + + def validate_token_request(self): + """If the authorization server issued a refresh token to the client, the + client makes a refresh request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per Appendix B with a character encoding of UTF-8 in the HTTP + request entity-body, per Section 6: + + grant_type + REQUIRED. Value MUST be set to "refresh_token". + + refresh_token + REQUIRED. The refresh token issued to the client. + + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. The requested scope MUST NOT include any scope + not originally granted by the resource owner, and if omitted is + treated as equal to the scope originally granted by the + resource owner. + + + For example, the client makes the following HTTP request using + transport-layer security (with extra line breaks for display purposes + only): + + .. code-block:: http + + POST /token HTTP/1.1 + Host: server.example.com + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + Content-Type: application/x-www-form-urlencoded + + grant_type=refresh_token&refresh_token=tGzv3JOkF0XG5Qx2TlKWIA + """ + client = self._validate_request_client() + self.request.client = client + refresh_token = self._validate_request_token(client) + self._validate_token_scope(refresh_token) + self.request.refresh_token = refresh_token + + def create_token_response(self): + """If valid and authorized, the authorization server issues an access + token as described in Section 5.1. If the request failed + verification or is invalid, the authorization server returns an error + response as described in Section 5.2. + """ + refresh_token = self.request.refresh_token + user = self.authenticate_user(refresh_token) + if not user: + raise InvalidRequestError('There is no "user" for this token.') + + client = self.request.client + token = self.issue_token(user, refresh_token) + log.debug('Issue token %r to %r', token, client) + + self.request.user = user + self.save_token(token) + self.execute_hook('process_token', token=token) + self.revoke_old_credential(refresh_token) + return 200, token, self.TOKEN_RESPONSE_HEADER + + def issue_token(self, user, refresh_token): + scope = self.request.scope + if not scope: + scope = refresh_token.get_scope() + + token = self.generate_token( + user=user, + scope=scope, + include_refresh_token=self.INCLUDE_NEW_REFRESH_TOKEN, + ) + return token + + def authenticate_refresh_token(self, refresh_token): + """Get token information with refresh_token string. Developers MUST + implement this method in subclass:: + + def authenticate_refresh_token(self, refresh_token): + token = Token.get(refresh_token=refresh_token) + if token and not token.refresh_token_revoked: + return token + + :param refresh_token: The refresh token issued to the client + :return: token + """ + raise NotImplementedError() + + def authenticate_user(self, refresh_token): + """Authenticate the user related to this credential. Developers MUST + implement this method in subclass:: + + def authenticate_user(self, credential): + return User.get(credential.user_id) + + :param refresh_token: Token object + :return: user + """ + raise NotImplementedError() + + def revoke_old_credential(self, refresh_token): + """The authorization server MAY revoke the old refresh token after + issuing a new refresh token to the client. Developers MUST implement + this method in subclass:: + + def revoke_old_credential(self, refresh_token): + credential.revoked = True + credential.save() + + :param refresh_token: Token object + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.py new file mode 100644 index 0000000..41cabb6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.py @@ -0,0 +1,154 @@ +import logging +from .base import BaseGrant, TokenEndpointMixin +from ..errors import ( + UnauthorizedClientError, + InvalidRequestError, +) + +log = logging.getLogger(__name__) + + +class ResourceOwnerPasswordCredentialsGrant(BaseGrant, TokenEndpointMixin): + """The resource owner password credentials grant type is suitable in + cases where the resource owner has a trust relationship with the + client, such as the device operating system or a highly privileged + + application. The authorization server should take special care when + enabling this grant type and only allow it when other flows are not + viable. + + This grant type is suitable for clients capable of obtaining the + resource owner's credentials (username and password, typically using + an interactive form). It is also used to migrate existing clients + using direct authentication schemes such as HTTP Basic or Digest + authentication to OAuth by converting the stored credentials to an + access token:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + v + | Resource Owner + (A) Password Credentials + | + v + +---------+ +---------------+ + | |>--(B)---- Resource Owner ------->| | + | | Password Credentials | Authorization | + | Client | | Server | + | |<--(C)---- Access Token ---------<| | + | | (w/ Optional Refresh Token) | | + +---------+ +---------------+ + """ + GRANT_TYPE = 'password' + + def validate_token_request(self): + """The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per Appendix B with a character encoding of UTF-8 in the HTTP + request entity-body: + + grant_type + REQUIRED. Value MUST be set to "password". + + username + REQUIRED. The resource owner username. + + password + REQUIRED. The resource owner password. + + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. + + If the client type is confidential or the client was issued client + credentials (or assigned other authentication requirements), the + client MUST authenticate with the authorization server as described + in Section 3.2.1. + + For example, the client makes the following HTTP request using + transport-layer security (with extra line breaks for display purposes + only): + + .. code-block:: http + + POST /token HTTP/1.1 + Host: server.example.com + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + Content-Type: application/x-www-form-urlencoded + + grant_type=password&username=johndoe&password=A3ddj3w + """ + # ignore validate for grant_type, since it is validated by + # check_token_endpoint + client = self.authenticate_token_endpoint_client() + log.debug('Validate token request of %r', client) + + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError() + + params = self.request.form + if 'username' not in params: + raise InvalidRequestError('Missing "username" in request.') + if 'password' not in params: + raise InvalidRequestError('Missing "password" in request.') + + log.debug('Authenticate user of %r', params['username']) + user = self.authenticate_user( + params['username'], + params['password'] + ) + if not user: + raise InvalidRequestError( + 'Invalid "username" or "password" in request.', + ) + self.request.client = client + self.request.user = user + self.validate_requested_scope() + + def create_token_response(self): + """If the access token request is valid and authorized, the + authorization server issues an access token and optional refresh + token as described in Section 5.1. If the request failed client + authentication or is invalid, the authorization server returns an + error response as described in Section 5.2. + + An example successful response: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/json + Cache-Control: no-store + Pragma: no-cache + + { + "access_token":"2YotnFZFEjr1zCsicMWpAA", + "token_type":"example", + "expires_in":3600, + "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA", + "example_parameter":"example_value" + } + + :returns: (status_code, body, headers) + """ + user = self.request.user + scope = self.request.scope + token = self.generate_token(user=user, scope=scope) + log.debug('Issue token %r to %r', token, self.client) + self.save_token(token) + self.execute_hook('process_token', token=token) + return 200, token, self.TOKEN_RESPONSE_HEADER + + def authenticate_user(self, username, password): + """validate the resource owner password credentials using its + existing password validation algorithm:: + + def authenticate_user(self, username, password): + user = get_user_by_username(username) + if user.check_password(password): + return user + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/models.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/models.py new file mode 100644 index 0000000..fe4922b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/models.py @@ -0,0 +1,228 @@ +""" + authlib.oauth2.rfc6749.models + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This module defines how to construct Client, AuthorizationCode and Token. +""" +from authlib.deprecate import deprecate + + +class ClientMixin: + """Implementation of OAuth 2 Client described in `Section 2`_ with + some methods to help validation. A client has at least these information: + + * client_id: A string represents client identifier. + * client_secret: A string represents client password. + * token_endpoint_auth_method: A way to authenticate client at token + endpoint. + + .. _`Section 2`: https://tools.ietf.org/html/rfc6749#section-2 + """ + + def get_client_id(self): + """A method to return client_id of the client. For instance, the value + in database is saved in a column called ``client_id``:: + + def get_client_id(self): + return self.client_id + + :return: string + """ + raise NotImplementedError() + + def get_default_redirect_uri(self): + """A method to get client default redirect_uri. For instance, the + database table for client has a column called ``default_redirect_uri``:: + + def get_default_redirect_uri(self): + return self.default_redirect_uri + + :return: A URL string + """ + raise NotImplementedError() + + def get_allowed_scope(self, scope): + """A method to return a list of requested scopes which are supported by + this client. For instance, there is a ``scope`` column:: + + def get_allowed_scope(self, scope): + if not scope: + return '' + allowed = set(scope_to_list(self.scope)) + return list_to_scope([s for s in scope.split() if s in allowed]) + + :param scope: the requested scope. + :return: string of scope + """ + raise NotImplementedError() + + def check_redirect_uri(self, redirect_uri): + """Validate redirect_uri parameter in Authorization Endpoints. For + instance, in the client table, there is an ``allowed_redirect_uris`` + column:: + + def check_redirect_uri(self, redirect_uri): + return redirect_uri in self.allowed_redirect_uris + + :param redirect_uri: A URL string for redirecting. + :return: bool + """ + raise NotImplementedError() + + def check_client_secret(self, client_secret): + """Check client_secret matching with the client. For instance, in + the client table, the column is called ``client_secret``:: + + import secrets + + def check_client_secret(self, client_secret): + return secrets.compare_digest(self.client_secret, client_secret) + + :param client_secret: A string of client secret + :return: bool + """ + raise NotImplementedError() + + def check_endpoint_auth_method(self, method, endpoint): + """Check if client support the given method for the given endpoint. + There is a ``token_endpoint_auth_method`` defined via `RFC7591`_. + Developers MAY re-implement this method with:: + + def check_endpoint_auth_method(self, method, endpoint): + if endpoint == 'token': + # if client table has ``token_endpoint_auth_method`` + return self.token_endpoint_auth_method == method + return True + + Method values defined by this specification are: + + * "none": The client is a public client as defined in OAuth 2.0, + and does not have a client secret. + + * "client_secret_post": The client uses the HTTP POST parameters + as defined in OAuth 2.0 + + * "client_secret_basic": The client uses HTTP Basic as defined in + OAuth 2.0 + + .. _`RFC7591`: https://tools.ietf.org/html/rfc7591 + """ + raise NotImplementedError() + + def check_token_endpoint_auth_method(self, method): + deprecate('Please implement ``check_endpoint_auth_method`` instead.') + return self.check_endpoint_auth_method(method, 'token') + + def check_response_type(self, response_type): + """Validate if the client can handle the given response_type. There + are two response types defined by RFC6749: code and token. For + instance, there is a ``allowed_response_types`` column in your client:: + + def check_response_type(self, response_type): + return response_type in self.response_types + + :param response_type: the requested response_type string. + :return: bool + """ + raise NotImplementedError() + + def check_grant_type(self, grant_type): + """Validate if the client can handle the given grant_type. There are + four grant types defined by RFC6749: + + * authorization_code + * implicit + * client_credentials + * password + + For instance, there is a ``allowed_grant_types`` column in your client:: + + def check_grant_type(self, grant_type): + return grant_type in self.grant_types + + :param grant_type: the requested grant_type string. + :return: bool + """ + raise NotImplementedError() + + +class AuthorizationCodeMixin: + def get_redirect_uri(self): + """A method to get authorization code's ``redirect_uri``. + For instance, the database table for authorization code has a + column called ``redirect_uri``:: + + def get_redirect_uri(self): + return self.redirect_uri + + :return: A URL string + """ + raise NotImplementedError() + + def get_scope(self): + """A method to get scope of the authorization code. For instance, + the column is called ``scope``:: + + def get_scope(self): + return self.scope + + :return: scope string + """ + raise NotImplementedError() + + +class TokenMixin: + def check_client(self, client): + """A method to check if this token is issued to the given client. + For instance, ``client_id`` is saved on token table:: + + def check_client(self, client): + return self.client_id == client.client_id + + :return: bool + """ + raise NotImplementedError() + + def get_scope(self): + """A method to get scope of the authorization code. For instance, + the column is called ``scope``:: + + def get_scope(self): + return self.scope + + :return: scope string + """ + raise NotImplementedError() + + def get_expires_in(self): + """A method to get the ``expires_in`` value of the token. e.g. + the column is called ``expires_in``:: + + def get_expires_in(self): + return self.expires_in + + :return: timestamp int + """ + raise NotImplementedError() + + def is_expired(self): + """A method to define if this token is expired. For instance, + there is a column ``expired_at`` in the table:: + + def is_expired(self): + return self.expired_at < now + + :return: boolean + """ + raise NotImplementedError() + + def is_revoked(self): + """A method to define if this token is revoked. For instance, + there is a boolean column ``revoked`` in the table:: + + def is_revoked(self): + return self.revoked + + :return: boolean + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/parameters.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/parameters.py new file mode 100644 index 0000000..8c3a5aa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/parameters.py @@ -0,0 +1,214 @@ +from authlib.common.urls import ( + urlparse, + add_params_to_uri, + add_params_to_qs, +) +from authlib.common.encoding import to_unicode +from .errors import ( + MissingCodeException, + MissingTokenException, + MissingTokenTypeException, + MismatchingStateException, +) +from .util import list_to_scope + + +def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None, + scope=None, state=None, **kwargs): + """Prepare the authorization grant request URI. + + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the ``application/x-www-form-urlencoded`` format: + + :param uri: The authorize endpoint to fetch "code" or "token". + :param client_id: The client identifier as described in `Section 2.2`_. + :param response_type: To indicate which OAuth 2 grant/flow is required, + "code" and "token". + :param redirect_uri: The client provided URI to redirect back to after + authorization as described in `Section 3.1.2`_. + :param scope: The scope of the access request as described by + `Section 3.3`_. + :param state: An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent + back to the client. The parameter SHOULD be used for + preventing cross-site request forgery as described in + `Section 10.12`_. + :param kwargs: Extra arguments to embed in the grant/authorization URL. + + An example of an authorization code grant authorization URL:: + + /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb + + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + """ + params = [ + ('response_type', response_type), + ('client_id', client_id) + ] + + if redirect_uri: + params.append(('redirect_uri', redirect_uri)) + if scope: + params.append(('scope', list_to_scope(scope))) + if state: + params.append(('state', state)) + + for k in kwargs: + if kwargs[k] is not None: + params.append((to_unicode(k), kwargs[k])) + + return add_params_to_uri(uri, params) + + +def prepare_token_request(grant_type, body='', redirect_uri=None, **kwargs): + """Prepare the access token request. Per `Section 4.1.3`_. + + The client makes a request to the token endpoint by adding the + following parameters using the ``application/x-www-form-urlencoded`` + format in the HTTP request entity-body: + + :param grant_type: To indicate grant type being used, i.e. "password", + "authorization_code" or "client_credentials". + :param body: Existing request body to embed parameters in. + :param redirect_uri: If the "redirect_uri" parameter was included in the + authorization request as described in + `Section 4.1.1`_, and their values MUST be identical. + :param kwargs: Extra arguments to embed in the request body. + + An example of an authorization code token request body:: + + grant_type=authorization_code&code=SplxlOBeZQQYbYS6WxSbIA + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb + + .. _`Section 4.1.1`: https://tools.ietf.org/html/rfc6749#section-4.1.1 + .. _`Section 4.1.3`: https://tools.ietf.org/html/rfc6749#section-4.1.3 + """ + params = [('grant_type', grant_type)] + + if redirect_uri: + params.append(('redirect_uri', redirect_uri)) + + if 'scope' in kwargs: + kwargs['scope'] = list_to_scope(kwargs['scope']) + + if grant_type == 'authorization_code' and 'code' not in kwargs: + raise MissingCodeException() + + for k in kwargs: + if kwargs[k]: + params.append((to_unicode(k), kwargs[k])) + + return add_params_to_qs(body, params) + + +def parse_authorization_code_response(uri, state=None): + """Parse authorization grant response URI into a dict. + + If the resource owner grants the access request, the authorization + server issues an authorization code and delivers it to the client by + adding the following parameters to the query component of the + redirection URI using the ``application/x-www-form-urlencoded`` format: + + **code** + REQUIRED. The authorization code generated by the + authorization server. The authorization code MUST expire + shortly after it is issued to mitigate the risk of leaks. A + maximum authorization code lifetime of 10 minutes is + RECOMMENDED. The client MUST NOT use the authorization code + more than once. If an authorization code is used more than + once, the authorization server MUST deny the request and SHOULD + revoke (when possible) all tokens previously issued based on + that authorization code. The authorization code is bound to + the client identifier and redirection URI. + + **state** + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + :param uri: The full redirect URL back to the client. + :param state: The state parameter from the authorization request. + + For example, the authorization server redirects the user-agent by + sending the following HTTP response: + + .. code-block:: http + + HTTP/1.1 302 Found + Location: https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA + &state=xyz + + """ + query = urlparse.urlparse(uri).query + params = dict(urlparse.parse_qsl(query)) + + if 'code' not in params: + raise MissingCodeException() + + params_state = params.get('state') + if state and params_state != state: + raise MismatchingStateException() + + return params + + +def parse_implicit_response(uri, state=None): + """Parse the implicit token response URI into a dict. + + If the resource owner grants the access request, the authorization + server issues an access token and delivers it to the client by adding + the following parameters to the fragment component of the redirection + URI using the ``application/x-www-form-urlencoded`` format: + + **access_token** + REQUIRED. The access token issued by the authorization server. + + **token_type** + REQUIRED. The type of the token issued as described in + Section 7.1. Value is case insensitive. + + **expires_in** + RECOMMENDED. The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + **scope** + OPTIONAL, if identical to the scope requested by the client, + otherwise REQUIRED. The scope of the access token as described + by Section 3.3. + + **state** + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + Similar to the authorization code response, but with a full token provided + in the URL fragment: + + .. code-block:: http + + HTTP/1.1 302 Found + Location: http://example.com/cb#access_token=2YotnFZFEjr1zCsicMWpAA + &state=xyz&token_type=example&expires_in=3600 + """ + fragment = urlparse.urlparse(uri).fragment + params = dict(urlparse.parse_qsl(fragment, keep_blank_values=True)) + + if 'access_token' not in params: + raise MissingTokenException() + + if 'token_type' not in params: + raise MissingTokenTypeException() + + if state and params.get('state', None) != state: + raise MismatchingStateException() + + return params diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/requests.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/requests.py new file mode 100644 index 0000000..7f6a709 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/requests.py @@ -0,0 +1,103 @@ +from collections import defaultdict +from typing import DefaultDict + +from authlib.common.encoding import json_loads +from authlib.common.urls import urlparse, url_decode +from .errors import InsecureTransportError + + +class OAuth2Request: + def __init__(self, method: str, uri: str, body=None, headers=None): + InsecureTransportError.check(uri) + #: HTTP method + self.method = method + self.uri = uri + self.body = body + #: HTTP headers + self.headers = headers or {} + + self.client = None + self.auth_method = None + self.user = None + self.authorization_code = None + self.refresh_token = None + self.credential = None + + self._parsed_query = None + + @property + def args(self): + if self._parsed_query is None: + self._parsed_query = url_decode(urlparse.urlparse(self.uri).query) + return dict(self._parsed_query) + + @property + def form(self): + return self.body or {} + + @property + def data(self): + data = {} + data.update(self.args) + data.update(self.form) + return data + + @property + def datalist(self) -> DefaultDict[str, list]: + """ Return all the data in query parameters and the body of the request as a dictionary with all the values + in lists. """ + if self._parsed_query is None: + self._parsed_query = url_decode(urlparse.urlparse(self.uri).query) + values = defaultdict(list) + for k, v in self._parsed_query: + values[k].append(v) + for k, v in self.form.items(): + values[k].append(v) + return values + + @property + def client_id(self) -> str: + """The authorization server issues the registered client a client + identifier -- a unique string representing the registration + information provided by the client. The value is extracted from + request. + + :return: string + """ + return self.data.get('client_id') + + @property + def response_type(self) -> str: + rt = self.data.get('response_type') + if rt and ' ' in rt: + # sort multiple response types + return ' '.join(sorted(rt.split())) + return rt + + @property + def grant_type(self) -> str: + return self.form.get('grant_type') + + @property + def redirect_uri(self): + return self.data.get('redirect_uri') + + @property + def scope(self) -> str: + return self.data.get('scope') + + @property + def state(self): + return self.data.get('state') + + +class JsonRequest: + def __init__(self, method, uri, body=None, headers=None): + self.method = method + self.uri = uri + self.body = body + self.headers = headers or {} + + @property + def data(self): + return json_loads(self.body) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/resource_protector.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/resource_protector.py new file mode 100644 index 0000000..60a85d8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/resource_protector.py @@ -0,0 +1,140 @@ +""" + authlib.oauth2.rfc6749.resource_protector + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Implementation of Accessing Protected Resources per `Section 7`_. + + .. _`Section 7`: https://tools.ietf.org/html/rfc6749#section-7 +""" +from .util import scope_to_list +from .errors import MissingAuthorizationError, UnsupportedTokenTypeError + + +class TokenValidator: + """Base token validator class. Subclass this validator to register + into ResourceProtector instance. + """ + TOKEN_TYPE = 'bearer' + + def __init__(self, realm=None, **extra_attributes): + self.realm = realm + self.extra_attributes = extra_attributes + + @staticmethod + def scope_insufficient(token_scopes, required_scopes): + if not required_scopes: + return False + + token_scopes = scope_to_list(token_scopes) + if not token_scopes: + return True + + token_scopes = set(token_scopes) + for scope in required_scopes: + resource_scopes = set(scope_to_list(scope)) + if token_scopes.issuperset(resource_scopes): + return False + + return True + + def authenticate_token(self, token_string): + """A method to query token from database with the given token string. + Developers MUST re-implement this method. For instance:: + + def authenticate_token(self, token_string): + return get_token_from_database(token_string) + + :param token_string: A string to represent the access_token. + :return: token + """ + raise NotImplementedError() + + def validate_request(self, request): + """A method to validate if the HTTP request is valid or not. Developers MUST + re-implement this method. For instance, your server requires a + "X-Device-Version" in the header:: + + def validate_request(self, request): + if 'X-Device-Version' not in request.headers: + raise InvalidRequestError() + + Usually, you don't have to detect if the request is valid or not. If you have + to, you MUST re-implement this method. + + :param request: instance of HttpRequest + :raise: InvalidRequestError + """ + + def validate_token(self, token, scopes, request): + """A method to validate if the authorized token is valid, if it has the + permission on the given scopes. Developers MUST re-implement this method. + e.g, check if token is expired, revoked:: + + def validate_token(self, token, scopes, request): + if not token: + raise InvalidTokenError() + if token.is_expired() or token.is_revoked(): + raise InvalidTokenError() + if not match_token_scopes(token, scopes): + raise InsufficientScopeError() + """ + raise NotImplementedError() + + +class ResourceProtector: + def __init__(self): + self._token_validators = {} + self._default_realm = None + self._default_auth_type = None + + def register_token_validator(self, validator: TokenValidator): + """Register a token validator for a given Authorization type. + Authlib has a built-in BearerTokenValidator per rfc6750. + """ + if not self._default_auth_type: + self._default_realm = validator.realm + self._default_auth_type = validator.TOKEN_TYPE + + if validator.TOKEN_TYPE not in self._token_validators: + self._token_validators[validator.TOKEN_TYPE] = validator + + def get_token_validator(self, token_type): + """Get token validator from registry for the given token type.""" + validator = self._token_validators.get(token_type.lower()) + if not validator: + raise UnsupportedTokenTypeError(self._default_auth_type, self._default_realm) + return validator + + def parse_request_authorization(self, request): + """Parse the token and token validator from request Authorization header. + Here is an example of Authorization header:: + + Authorization: Bearer a-token-string + + This method will parse this header, if it can find the validator for + ``Bearer``, it will return the validator and ``a-token-string``. + + :return: validator, token_string + :raise: MissingAuthorizationError + :raise: UnsupportedTokenTypeError + """ + auth = request.headers.get('Authorization') + if not auth: + raise MissingAuthorizationError(self._default_auth_type, self._default_realm) + + # https://tools.ietf.org/html/rfc6749#section-7.1 + token_parts = auth.split(None, 1) + if len(token_parts) != 2: + raise UnsupportedTokenTypeError(self._default_auth_type, self._default_realm) + + token_type, token_string = token_parts + validator = self.get_token_validator(token_type) + return validator, token_string + + def validate_request(self, scopes, request, **kwargs): + """Validate the request and return a token.""" + validator, token_string = self.parse_request_authorization(request) + validator.validate_request(request) + token = validator.authenticate_token(token_string) + validator.validate_token(token, scopes, request, **kwargs) + return token diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/token_endpoint.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/token_endpoint.py new file mode 100644 index 0000000..0ede557 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/token_endpoint.py @@ -0,0 +1,32 @@ +class TokenEndpoint: + #: Endpoint name to be registered + ENDPOINT_NAME = None + #: Supported token types + SUPPORTED_TOKEN_TYPES = ('access_token', 'refresh_token') + #: Allowed client authenticate methods + CLIENT_AUTH_METHODS = ['client_secret_basic'] + + def __init__(self, server): + self.server = server + + def __call__(self, request): + # make it callable for authorization server + # ``create_endpoint_response`` + return self.create_endpoint_response(request) + + def create_endpoint_request(self, request): + return self.server.create_oauth2_request(request) + + def authenticate_endpoint_client(self, request): + """Authentication client for endpoint with ``CLIENT_AUTH_METHODS``. + """ + client = self.server.authenticate_client( + request, self.CLIENT_AUTH_METHODS, self.ENDPOINT_NAME) + request.client = client + return client + + def authenticate_token(self, request, client): + raise NotImplementedError() + + def create_endpoint_response(self, request): + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/util.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/util.py new file mode 100644 index 0000000..d7bc5d9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/util.py @@ -0,0 +1,41 @@ +import base64 +import binascii +from urllib.parse import unquote +from authlib.common.encoding import to_unicode + + +def list_to_scope(scope): + """Convert a list of scopes to a space separated string.""" + if isinstance(scope, (set, tuple, list)): + return " ".join([to_unicode(s) for s in scope]) + if scope is None: + return scope + return to_unicode(scope) + + +def scope_to_list(scope): + """Convert a space separated string to a list of scopes.""" + if isinstance(scope, (tuple, list, set)): + return [to_unicode(s) for s in scope] + elif scope is None: + return None + return scope.strip().split() + + +def extract_basic_authorization(headers): + auth = headers.get('Authorization') + if not auth or ' ' not in auth: + return None, None + + auth_type, auth_token = auth.split(None, 1) + if auth_type.lower() != 'basic': + return None, None + + try: + query = to_unicode(base64.b64decode(auth_token)) + except (binascii.Error, TypeError): + return None, None + if ':' in query: + username, password = query.split(':', 1) + return unquote(username), unquote(password) + return query, None diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/wrappers.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/wrappers.py new file mode 100644 index 0000000..86d75bb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6749/wrappers.py @@ -0,0 +1,25 @@ +import time + + +class OAuth2Token(dict): + def __init__(self, params): + if params.get('expires_at'): + params['expires_at'] = int(params['expires_at']) + elif params.get('expires_in'): + params['expires_at'] = int(time.time()) + \ + int(params['expires_in']) + super().__init__(params) + + def is_expired(self, leeway=60): + expires_at = self.get('expires_at') + if not expires_at: + return None + # small timedelta to consider token as expired before it actually expires + expiration_threshold = expires_at - leeway + return expiration_threshold < time.time() + + @classmethod + def from_dict(cls, token): + if isinstance(token, dict) and not isinstance(token, cls): + token = cls(token) + return token diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__init__.py new file mode 100644 index 0000000..ef3880b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__init__.py @@ -0,0 +1,26 @@ +""" + authlib.oauth2.rfc6750 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + The OAuth 2.0 Authorization Framework: Bearer Token Usage. + + https://tools.ietf.org/html/rfc6750 +""" + +from .errors import InvalidTokenError, InsufficientScopeError +from .parameters import add_bearer_token +from .token import BearerTokenGenerator +from .validator import BearerTokenValidator + +# TODO: add deprecation +BearerToken = BearerTokenGenerator + + +__all__ = [ + 'InvalidTokenError', 'InsufficientScopeError', + 'add_bearer_token', + 'BearerToken', + 'BearerTokenGenerator', + 'BearerTokenValidator', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a28639d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..40c2737 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/parameters.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/parameters.cpython-312.pyc new file mode 100644 index 0000000..f41d7fc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/parameters.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/token.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/token.cpython-312.pyc new file mode 100644 index 0000000..c8fdbe8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/token.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/validator.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/validator.cpython-312.pyc new file mode 100644 index 0000000..f38c5d8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/__pycache__/validator.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/errors.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/errors.py new file mode 100644 index 0000000..1be92a3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/errors.py @@ -0,0 +1,80 @@ +""" + authlib.rfc6750.errors + ~~~~~~~~~~~~~~~~~~~~~~ + + OAuth Extensions Error Registration. When a request fails, + the resource server responds using the appropriate HTTP + status code and includes one of the following error codes + in the response. + + https://tools.ietf.org/html/rfc6750#section-6.2 + + :copyright: (c) 2017 by Hsiaoming Yang. +""" +from ..base import OAuth2Error + +__all__ = [ + 'InvalidTokenError', 'InsufficientScopeError' +] + + +class InvalidTokenError(OAuth2Error): + """The access token provided is expired, revoked, malformed, or + invalid for other reasons. The resource SHOULD respond with + the HTTP 401 (Unauthorized) status code. The client MAY + request a new access token and retry the protected resource + request. + + https://tools.ietf.org/html/rfc6750#section-3.1 + """ + error = 'invalid_token' + description = ( + 'The access token provided is expired, revoked, malformed, ' + 'or invalid for other reasons.' + ) + status_code = 401 + + def __init__(self, description=None, uri=None, status_code=None, + state=None, realm=None, **extra_attributes): + super().__init__( + description, uri, status_code, state) + self.realm = realm + self.extra_attributes = extra_attributes + + def get_headers(self): + """If the protected resource request does not include authentication + credentials or does not contain an access token that enables access + to the protected resource, the resource server MUST include the HTTP + "WWW-Authenticate" response header field; it MAY include it in + response to other conditions as well. + + https://tools.ietf.org/html/rfc6750#section-3 + """ + headers = super().get_headers() + + extras = [] + if self.realm: + extras.append(f'realm="{self.realm}"') + if self.extra_attributes: + extras.extend([f'{k}="{self.extra_attributes[k]}"' for k in self.extra_attributes]) + extras.append(f'error="{self.error}"') + error_description = self.get_error_description() + extras.append(f'error_description="{error_description}"') + headers.append( + ('WWW-Authenticate', 'Bearer ' + ', '.join(extras)) + ) + return headers + + +class InsufficientScopeError(OAuth2Error): + """The request requires higher privileges than provided by the + access token. The resource server SHOULD respond with the HTTP + 403 (Forbidden) status code and MAY include the "scope" + attribute with the scope necessary to access the protected + resource. + + https://tools.ietf.org/html/rfc6750#section-3.1 + """ + error = 'insufficient_scope' + description = 'The request requires higher privileges than provided by the access token.' + status_code = 403 diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/parameters.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/parameters.py new file mode 100644 index 0000000..8914a90 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/parameters.py @@ -0,0 +1,41 @@ +from authlib.common.urls import add_params_to_qs, add_params_to_uri + + +def add_to_uri(token, uri): + """Add a Bearer Token to the request URI. + Not recommended, use only if client can't use authorization header or body. + + http://www.example.com/path?access_token=h480djs93hd8 + """ + return add_params_to_uri(uri, [('access_token', token)]) + + +def add_to_headers(token, headers=None): + """Add a Bearer Token to the request URI. + Recommended method of passing bearer tokens. + + Authorization: Bearer h480djs93hd8 + """ + headers = headers or {} + headers['Authorization'] = f'Bearer {token}' + return headers + + +def add_to_body(token, body=None): + """Add a Bearer Token to the request body. + + access_token=h480djs93hd8 + """ + if body is None: + body = '' + return add_params_to_qs(body, [('access_token', token)]) + + +def add_bearer_token(token, uri, headers, body, placement='header'): + if placement in ('uri', 'url', 'query'): + uri = add_to_uri(token, uri) + elif placement in ('header', 'headers'): + headers = add_to_headers(token, headers) + elif placement == 'body': + body = add_to_body(token, body) + return uri, headers, body diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/token.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/token.py new file mode 100644 index 0000000..1ab4dc5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/token.py @@ -0,0 +1,88 @@ +class BearerTokenGenerator: + """Bearer token generator which can create the payload for token response + by OAuth 2 server. A typical token response would be: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/json;charset=UTF-8 + Cache-Control: no-store + Pragma: no-cache + + { + "access_token":"mF_9.B5f-4.1JqM", + "token_type":"Bearer", + "expires_in":3600, + "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA" + } + """ + + #: default expires_in value + DEFAULT_EXPIRES_IN = 3600 + #: default expires_in value differentiate by grant_type + GRANT_TYPES_EXPIRES_IN = { + 'authorization_code': 864000, + 'implicit': 3600, + 'password': 864000, + 'client_credentials': 864000 + } + + def __init__(self, access_token_generator, + refresh_token_generator=None, + expires_generator=None): + self.access_token_generator = access_token_generator + self.refresh_token_generator = refresh_token_generator + self.expires_generator = expires_generator + + def _get_expires_in(self, client, grant_type): + if self.expires_generator is None: + expires_in = self.GRANT_TYPES_EXPIRES_IN.get( + grant_type, self.DEFAULT_EXPIRES_IN) + elif callable(self.expires_generator): + expires_in = self.expires_generator(client, grant_type) + elif isinstance(self.expires_generator, int): + expires_in = self.expires_generator + else: + expires_in = self.DEFAULT_EXPIRES_IN + return expires_in + + @staticmethod + def get_allowed_scope(client, scope): + if scope: + scope = client.get_allowed_scope(scope) + return scope + + def generate(self, grant_type, client, user=None, scope=None, + expires_in=None, include_refresh_token=True): + """Generate a bearer token for OAuth 2.0 authorization token endpoint. + + :param client: the client that making the request. + :param grant_type: current requested grant_type. + :param user: current authorized user. + :param expires_in: if provided, use this value as expires_in. + :param scope: current requested scope. + :param include_refresh_token: should refresh_token be included. + :return: Token dict + """ + scope = self.get_allowed_scope(client, scope) + access_token = self.access_token_generator( + client=client, grant_type=grant_type, user=user, scope=scope) + if expires_in is None: + expires_in = self._get_expires_in(client, grant_type) + + token = { + 'token_type': 'Bearer', + 'access_token': access_token, + } + if expires_in: + token['expires_in'] = expires_in + if include_refresh_token and self.refresh_token_generator: + token['refresh_token'] = self.refresh_token_generator( + client=client, grant_type=grant_type, user=user, scope=scope) + if scope: + token['scope'] = scope + return token + + def __call__(self, grant_type, client, user=None, scope=None, + expires_in=None, include_refresh_token=True): + return self.generate(grant_type, client, user, scope, expires_in, include_refresh_token) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/validator.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/validator.py new file mode 100644 index 0000000..d479014 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc6750/validator.py @@ -0,0 +1,39 @@ +""" + authlib.oauth2.rfc6750.validator + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Validate Bearer Token for in request, scope and token. +""" + +from ..rfc6749 import TokenValidator +from .errors import ( + InvalidTokenError, + InsufficientScopeError +) + + +class BearerTokenValidator(TokenValidator): + TOKEN_TYPE = 'bearer' + + def authenticate_token(self, token_string): + """A method to query token from database with the given token string. + Developers MUST re-implement this method. For instance:: + + def authenticate_token(self, token_string): + return get_token_from_database(token_string) + + :param token_string: A string to represent the access_token. + :return: token + """ + raise NotImplementedError() + + def validate_token(self, token, scopes, request): + """Check if token is active and matches the requested scopes.""" + if not token: + raise InvalidTokenError(realm=self.realm, extra_attributes=self.extra_attributes) + if token.is_expired(): + raise InvalidTokenError(realm=self.realm, extra_attributes=self.extra_attributes) + if token.is_revoked(): + raise InvalidTokenError(realm=self.realm, extra_attributes=self.extra_attributes) + if self.scope_insufficient(token.get_scope(), scopes): + raise InsufficientScopeError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__init__.py new file mode 100644 index 0000000..2b9c120 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__init__.py @@ -0,0 +1,14 @@ +""" + authlib.oauth2.rfc7009 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + OAuth 2.0 Token Revocation. + + https://tools.ietf.org/html/rfc7009 +""" + +from .parameters import prepare_revoke_token_request +from .revocation import RevocationEndpoint + +__all__ = ['prepare_revoke_token_request', 'RevocationEndpoint'] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d0c3447 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__pycache__/parameters.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__pycache__/parameters.cpython-312.pyc new file mode 100644 index 0000000..1668b74 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__pycache__/parameters.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__pycache__/revocation.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__pycache__/revocation.cpython-312.pyc new file mode 100644 index 0000000..b1c95ae Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/__pycache__/revocation.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/parameters.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/parameters.py new file mode 100644 index 0000000..2a829a7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/parameters.py @@ -0,0 +1,25 @@ +from authlib.common.urls import add_params_to_qs + + +def prepare_revoke_token_request(token, token_type_hint=None, + body=None, headers=None): + """Construct request body and headers for revocation endpoint. + + :param token: access_token or refresh_token string. + :param token_type_hint: Optional, `access_token` or `refresh_token`. + :param body: current request body. + :param headers: current request headers. + :return: tuple of (body, headers) + + https://tools.ietf.org/html/rfc7009#section-2.1 + """ + params = [('token', token)] + if token_type_hint: + params.append(('token_type_hint', token_type_hint)) + + body = add_params_to_qs(body or '', params) + if headers is None: + headers = {} + + headers['Content-Type'] = 'application/x-www-form-urlencoded' + return body, headers diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/revocation.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/revocation.py new file mode 100644 index 0000000..816e5f4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7009/revocation.py @@ -0,0 +1,109 @@ +from authlib.consts import default_json_headers +from ..rfc6749 import TokenEndpoint, InvalidGrantError +from ..rfc6749 import ( + InvalidRequestError, + UnsupportedTokenTypeError, +) + + +class RevocationEndpoint(TokenEndpoint): + """Implementation of revocation endpoint which is described in + `RFC7009`_. + + .. _RFC7009: https://tools.ietf.org/html/rfc7009 + """ + #: Endpoint name to be registered + ENDPOINT_NAME = 'revocation' + + def authenticate_token(self, request, client): + """The client constructs the request by including the following + parameters using the "application/x-www-form-urlencoded" format in + the HTTP request entity-body: + + token + REQUIRED. The token that the client wants to get revoked. + + token_type_hint + OPTIONAL. A hint about the type of the token submitted for + revocation. + """ + self.check_params(request, client) + token = self.query_token(request.form['token'], request.form.get('token_type_hint')) + if token and not token.check_client(client): + raise InvalidGrantError() + return token + + def check_params(self, request, client): + if 'token' not in request.form: + raise InvalidRequestError() + + hint = request.form.get('token_type_hint') + if hint and hint not in self.SUPPORTED_TOKEN_TYPES: + raise UnsupportedTokenTypeError() + + def create_endpoint_response(self, request): + """Validate revocation request and create the response for revocation. + For example, a client may request the revocation of a refresh token + with the following request:: + + POST /revoke HTTP/1.1 + Host: server.example.com + Content-Type: application/x-www-form-urlencoded + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + + token=45ghiukldjahdnhzdauz&token_type_hint=refresh_token + + :returns: (status_code, body, headers) + """ + # The authorization server first validates the client credentials + client = self.authenticate_endpoint_client(request) + + # then verifies whether the token was issued to the client making + # the revocation request + token = self.authenticate_token(request, client) + + # the authorization server invalidates the token + if token: + self.revoke_token(token, request) + self.server.send_signal( + 'after_revoke_token', + token=token, + client=client, + ) + return 200, {}, default_json_headers + + def query_token(self, token_string, token_type_hint): + """Get the token from database/storage by the given token string. + Developers should implement this method:: + + def query_token(self, token_string, token_type_hint): + if token_type_hint == 'access_token': + return Token.query_by_access_token(token_string) + if token_type_hint == 'refresh_token': + return Token.query_by_refresh_token(token_string) + return Token.query_by_access_token(token_string) or \ + Token.query_by_refresh_token(token_string) + """ + raise NotImplementedError() + + def revoke_token(self, token, request): + """Mark token as revoked. Since token MUST be unique, it would be + dangerous to delete it. Consider this situation: + + 1. Jane obtained a token XYZ + 2. Jane revoked (deleted) token XYZ + 3. Bob generated a new token XYZ + 4. Jane can use XYZ to access Bob's resource + + It would be secure to mark a token as revoked:: + + def revoke_token(self, token, request): + hint = request.form.get('token_type_hint') + if hint == 'access_token': + token.access_token_revoked = True + else: + token.access_token_revoked = True + token.refresh_token_revoked = True + token.save() + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/__init__.py new file mode 100644 index 0000000..0dbe0b3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/__init__.py @@ -0,0 +1,3 @@ +from .client import AssertionClient + +__all__ = ['AssertionClient'] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1ba7435 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/__pycache__/client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/__pycache__/client.cpython-312.pyc new file mode 100644 index 0000000..3e71820 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/__pycache__/client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/client.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/client.py new file mode 100644 index 0000000..cf43104 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7521/client.py @@ -0,0 +1,92 @@ +from authlib.common.encoding import to_native +from authlib.oauth2.base import OAuth2Error + + +class AssertionClient: + """Constructs a new Assertion Framework for OAuth 2.0 Authorization Grants + per RFC7521_. + + .. _RFC7521: https://tools.ietf.org/html/rfc7521 + """ + DEFAULT_GRANT_TYPE = None + ASSERTION_METHODS = {} + token_auth_class = None + oauth_error_class = OAuth2Error + + def __init__(self, session, token_endpoint, issuer, subject, + audience=None, grant_type=None, claims=None, + token_placement='header', scope=None, leeway=60, **kwargs): + + self.session = session + + if audience is None: + audience = token_endpoint + + self.token_endpoint = token_endpoint + + if grant_type is None: + grant_type = self.DEFAULT_GRANT_TYPE + + self.grant_type = grant_type + + # https://tools.ietf.org/html/rfc7521#section-5.1 + self.issuer = issuer + self.subject = subject + self.audience = audience + self.claims = claims + self.scope = scope + if self.token_auth_class is not None: + self.token_auth = self.token_auth_class(None, token_placement, self) + self._kwargs = kwargs + self.leeway = leeway + + @property + def token(self): + return self.token_auth.token + + @token.setter + def token(self, token): + self.token_auth.set_token(token) + + def refresh_token(self): + """Using Assertions as Authorization Grants to refresh token as + described in `Section 4.1`_. + + .. _`Section 4.1`: https://tools.ietf.org/html/rfc7521#section-4.1 + """ + generate_assertion = self.ASSERTION_METHODS[self.grant_type] + assertion = generate_assertion( + issuer=self.issuer, + subject=self.subject, + audience=self.audience, + claims=self.claims, + **self._kwargs + ) + data = { + 'assertion': to_native(assertion), + 'grant_type': self.grant_type, + } + if self.scope: + data['scope'] = self.scope + + return self._refresh_token(data) + + def parse_response_token(self, resp): + if resp.status_code >= 500: + resp.raise_for_status() + + token = resp.json() + if 'error' in token: + raise self.oauth_error_class( + error=token['error'], + description=token.get('error_description') + ) + + self.token = token + return self.token + + def _refresh_token(self, data): + resp = self.session.request( + 'POST', self.token_endpoint, data=data, withhold_token=True) + + return self.parse_response_token(resp) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__init__.py new file mode 100644 index 0000000..ec9d3d3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__init__.py @@ -0,0 +1,37 @@ +""" + authlib.oauth2.rfc7523 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + JSON Web Token (JWT) Profile for OAuth 2.0 Client + Authentication and Authorization Grants. + + https://tools.ietf.org/html/rfc7523 +""" + +from .jwt_bearer import JWTBearerGrant +from .client import ( + JWTBearerClientAssertion, +) +from .assertion import ( + client_secret_jwt_sign, + private_key_jwt_sign, +) +from .auth import ( + ClientSecretJWT, PrivateKeyJWT, +) +from .token import JWTBearerTokenGenerator +from .validator import JWTBearerToken, JWTBearerTokenValidator + +__all__ = [ + 'JWTBearerGrant', + 'JWTBearerClientAssertion', + 'client_secret_jwt_sign', + 'private_key_jwt_sign', + 'ClientSecretJWT', + 'PrivateKeyJWT', + + 'JWTBearerToken', + 'JWTBearerTokenGenerator', + 'JWTBearerTokenValidator', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d1fd36b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/assertion.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/assertion.cpython-312.pyc new file mode 100644 index 0000000..dcf9386 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/assertion.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/auth.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000..d64161b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/auth.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/client.cpython-312.pyc new file mode 100644 index 0000000..9cf27fc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/jwt_bearer.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/jwt_bearer.cpython-312.pyc new file mode 100644 index 0000000..050cb66 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/jwt_bearer.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/token.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/token.cpython-312.pyc new file mode 100644 index 0000000..8898bbc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/token.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/validator.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/validator.cpython-312.pyc new file mode 100644 index 0000000..ea96d24 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/__pycache__/validator.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/assertion.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/assertion.py new file mode 100644 index 0000000..0bb9fe7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/assertion.py @@ -0,0 +1,66 @@ +import time +from authlib.jose import jwt +from authlib.common.security import generate_token + + +def sign_jwt_bearer_assertion( + key, issuer, audience, subject=None, issued_at=None, + expires_at=None, claims=None, header=None, **kwargs): + + if header is None: + header = {} + alg = kwargs.pop('alg', None) + if alg: + header['alg'] = alg + if 'alg' not in header: + raise ValueError('Missing "alg" in header') + + payload = {'iss': issuer, 'aud': audience} + + # subject is not required in Google service + if subject: + payload['sub'] = subject + + if not issued_at: + issued_at = int(time.time()) + + expires_in = kwargs.pop('expires_in', 3600) + if not expires_at: + expires_at = issued_at + expires_in + + payload['iat'] = issued_at + payload['exp'] = expires_at + + if claims: + payload.update(claims) + + return jwt.encode(header, payload, key) + + +def client_secret_jwt_sign(client_secret, client_id, token_endpoint, alg='HS256', + claims=None, **kwargs): + return _sign(client_secret, client_id, token_endpoint, alg, claims, **kwargs) + + +def private_key_jwt_sign(private_key, client_id, token_endpoint, alg='RS256', + claims=None, **kwargs): + return _sign(private_key, client_id, token_endpoint, alg, claims, **kwargs) + + +def _sign(key, client_id, token_endpoint, alg, claims=None, **kwargs): + # REQUIRED. Issuer. This MUST contain the client_id of the OAuth Client. + issuer = client_id + # REQUIRED. Subject. This MUST contain the client_id of the OAuth Client. + subject = client_id + # The Audience SHOULD be the URL of the Authorization Server's Token Endpoint. + audience = token_endpoint + + # jti is required + if claims is None: + claims = {} + if 'jti' not in claims: + claims['jti'] = generate_token(36) + + return sign_jwt_bearer_assertion( + key=key, issuer=issuer, audience=audience, subject=subject, + claims=claims, alg=alg, **kwargs) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/auth.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/auth.py new file mode 100644 index 0000000..7764466 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/auth.py @@ -0,0 +1,94 @@ +from authlib.common.urls import add_params_to_qs +from .assertion import client_secret_jwt_sign, private_key_jwt_sign +from .client import ASSERTION_TYPE + + +class ClientSecretJWT: + """Authentication method for OAuth 2.0 Client. This authentication + method is called ``client_secret_jwt``, which is using ``client_id`` + and ``client_secret`` constructed with JWT to identify a client. + + Here is an example of use ``client_secret_jwt`` with Requests Session:: + + from authlib.integrations.requests_client import OAuth2Session + + token_endpoint = 'https://example.com/oauth/token' + session = OAuth2Session( + 'your-client-id', 'your-client-secret', + token_endpoint_auth_method='client_secret_jwt' + ) + session.register_client_auth_method(ClientSecretJWT(token_endpoint)) + session.fetch_token(token_endpoint) + + :param token_endpoint: A string URL of the token endpoint + :param claims: Extra JWT claims + :param headers: Extra JWT headers + :param alg: ``alg`` value, default is HS256 + """ + name = 'client_secret_jwt' + alg = 'HS256' + + def __init__(self, token_endpoint=None, claims=None, headers=None, alg=None): + self.token_endpoint = token_endpoint + self.claims = claims + self.headers = headers + if alg is not None: + self.alg = alg + + def sign(self, auth, token_endpoint): + return client_secret_jwt_sign( + auth.client_secret, + client_id=auth.client_id, + token_endpoint=token_endpoint, + claims=self.claims, + header=self.headers, + alg=self.alg, + ) + + def __call__(self, auth, method, uri, headers, body): + token_endpoint = self.token_endpoint + if not token_endpoint: + token_endpoint = uri + + client_assertion = self.sign(auth, token_endpoint) + body = add_params_to_qs(body or '', [ + ('client_assertion_type', ASSERTION_TYPE), + ('client_assertion', client_assertion) + ]) + return uri, headers, body + + +class PrivateKeyJWT(ClientSecretJWT): + """Authentication method for OAuth 2.0 Client. This authentication + method is called ``private_key_jwt``, which is using ``client_id`` + and ``private_key`` constructed with JWT to identify a client. + + Here is an example of use ``private_key_jwt`` with Requests Session:: + + from authlib.integrations.requests_client import OAuth2Session + + token_endpoint = 'https://example.com/oauth/token' + session = OAuth2Session( + 'your-client-id', 'your-client-private-key', + token_endpoint_auth_method='private_key_jwt' + ) + session.register_client_auth_method(PrivateKeyJWT(token_endpoint)) + session.fetch_token(token_endpoint) + + :param token_endpoint: A string URL of the token endpoint + :param claims: Extra JWT claims + :param headers: Extra JWT headers + :param alg: ``alg`` value, default is RS256 + """ + name = 'private_key_jwt' + alg = 'RS256' + + def sign(self, auth, token_endpoint): + return private_key_jwt_sign( + auth.client_secret, + client_id=auth.client_id, + token_endpoint=token_endpoint, + claims=self.claims, + header=self.headers, + alg=self.alg, + ) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/client.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/client.py new file mode 100644 index 0000000..2a6a1bf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/client.py @@ -0,0 +1,113 @@ +import logging +from authlib.jose import jwt +from authlib.jose.errors import JoseError +from ..rfc6749 import InvalidClientError + +ASSERTION_TYPE = 'urn:ietf:params:oauth:client-assertion-type:jwt-bearer' +log = logging.getLogger(__name__) + + +class JWTBearerClientAssertion: + """Implementation of Using JWTs for Client Authentication, which is + defined by RFC7523. + """ + #: Value of ``client_assertion_type`` of JWTs + CLIENT_ASSERTION_TYPE = ASSERTION_TYPE + #: Name of the client authentication method + CLIENT_AUTH_METHOD = 'client_assertion_jwt' + + def __init__(self, token_url, validate_jti=True): + self.token_url = token_url + self._validate_jti = validate_jti + + def __call__(self, query_client, request): + data = request.form + assertion_type = data.get('client_assertion_type') + assertion = data.get('client_assertion') + if assertion_type == ASSERTION_TYPE and assertion: + resolve_key = self.create_resolve_key_func(query_client, request) + self.process_assertion_claims(assertion, resolve_key) + return self.authenticate_client(request.client) + log.debug('Authenticate via %r failed', self.CLIENT_AUTH_METHOD) + + def create_claims_options(self): + """Create a claims_options for verify JWT payload claims. Developers + MAY overwrite this method to create a more strict options.""" + # https://tools.ietf.org/html/rfc7523#section-3 + # The Audience SHOULD be the URL of the Authorization Server's Token Endpoint + options = { + 'iss': {'essential': True, 'validate': _validate_iss}, + 'sub': {'essential': True}, + 'aud': {'essential': True, 'value': self.token_url}, + 'exp': {'essential': True}, + } + if self._validate_jti: + options['jti'] = {'essential': True, 'validate': self.validate_jti} + return options + + def process_assertion_claims(self, assertion, resolve_key): + """Extract JWT payload claims from request "assertion", per + `Section 3.1`_. + + :param assertion: assertion string value in the request + :param resolve_key: function to resolve the sign key + :return: JWTClaims + :raise: InvalidClientError + + .. _`Section 3.1`: https://tools.ietf.org/html/rfc7523#section-3.1 + """ + try: + claims = jwt.decode( + assertion, resolve_key, + claims_options=self.create_claims_options() + ) + claims.validate() + except JoseError as e: + log.debug('Assertion Error: %r', e) + raise InvalidClientError() + return claims + + def authenticate_client(self, client): + if client.check_endpoint_auth_method(self.CLIENT_AUTH_METHOD, 'token'): + return client + raise InvalidClientError() + + def create_resolve_key_func(self, query_client, request): + def resolve_key(headers, payload): + # https://tools.ietf.org/html/rfc7523#section-3 + # For client authentication, the subject MUST be the + # "client_id" of the OAuth client + client_id = payload['sub'] + client = query_client(client_id) + if not client: + raise InvalidClientError() + request.client = client + return self.resolve_client_public_key(client, headers) + return resolve_key + + def validate_jti(self, claims, jti): + """Validate if the given ``jti`` value is used before. Developers + MUST implement this method:: + + def validate_jti(self, claims, jti): + key = 'jti:{}-{}'.format(claims['sub'], jti) + if redis.get(key): + return False + redis.set(key, 1, ex=3600) + return True + """ + raise NotImplementedError() + + def resolve_client_public_key(self, client, headers): + """Resolve the client public key for verifying the JWT signature. + A client may have many public keys, in this case, we can retrieve it + via ``kid`` value in headers. Developers MUST implement this method:: + + def resolve_client_public_key(self, client, headers): + return client.public_key + """ + raise NotImplementedError() + + +def _validate_iss(claims, iss): + return claims['sub'] == iss diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/jwt_bearer.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/jwt_bearer.py new file mode 100644 index 0000000..fb672a9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/jwt_bearer.py @@ -0,0 +1,182 @@ +import logging +from authlib.jose import jwt, JoseError +from ..rfc6749 import BaseGrant, TokenEndpointMixin +from ..rfc6749 import ( + UnauthorizedClientError, + InvalidRequestError, + InvalidGrantError, + InvalidClientError, +) +from .assertion import sign_jwt_bearer_assertion + +log = logging.getLogger(__name__) +JWT_BEARER_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:jwt-bearer' + + +class JWTBearerGrant(BaseGrant, TokenEndpointMixin): + GRANT_TYPE = JWT_BEARER_GRANT_TYPE + + #: Options for verifying JWT payload claims. Developers MAY + #: overwrite this constant to create a more strict options. + CLAIMS_OPTIONS = { + 'iss': {'essential': True}, + 'aud': {'essential': True}, + 'exp': {'essential': True}, + } + + @staticmethod + def sign(key, issuer, audience, subject=None, + issued_at=None, expires_at=None, claims=None, **kwargs): + return sign_jwt_bearer_assertion( + key, issuer, audience, subject, issued_at, + expires_at, claims, **kwargs) + + def process_assertion_claims(self, assertion): + """Extract JWT payload claims from request "assertion", per + `Section 3.1`_. + + :param assertion: assertion string value in the request + :return: JWTClaims + :raise: InvalidGrantError + + .. _`Section 3.1`: https://tools.ietf.org/html/rfc7523#section-3.1 + """ + try: + claims = jwt.decode( + assertion, self.resolve_public_key, + claims_options=self.CLAIMS_OPTIONS) + claims.validate() + except JoseError as e: + log.debug('Assertion Error: %r', e) + raise InvalidGrantError(description=e.description) + return claims + + def resolve_public_key(self, headers, payload): + client = self.resolve_issuer_client(payload['iss']) + return self.resolve_client_key(client, headers, payload) + + def validate_token_request(self): + """The client makes a request to the token endpoint by sending the + following parameters using the "application/x-www-form-urlencoded" + format per `Section 2.1`_: + + grant_type + REQUIRED. Value MUST be set to + "urn:ietf:params:oauth:grant-type:jwt-bearer". + + assertion + REQUIRED. Value MUST contain a single JWT. + + scope + OPTIONAL. + + The following example demonstrates an access token request with a JWT + as an authorization grant: + + .. code-block:: http + + POST /token.oauth2 HTTP/1.1 + Host: as.example.com + Content-Type: application/x-www-form-urlencoded + + grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer + &assertion=eyJhbGciOiJFUzI1NiIsImtpZCI6IjE2In0. + eyJpc3Mi[...omitted for brevity...]. + J9l-ZhwP[...omitted for brevity...] + + .. _`Section 2.1`: https://tools.ietf.org/html/rfc7523#section-2.1 + """ + assertion = self.request.form.get('assertion') + if not assertion: + raise InvalidRequestError('Missing "assertion" in request') + + claims = self.process_assertion_claims(assertion) + client = self.resolve_issuer_client(claims['iss']) + log.debug('Validate token request of %s', client) + + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError() + + self.request.client = client + self.validate_requested_scope() + + subject = claims.get('sub') + if subject: + user = self.authenticate_user(subject) + if not user: + raise InvalidGrantError(description='Invalid "sub" value in assertion') + + log.debug('Check client(%s) permission to User(%s)', client, user) + if not self.has_granted_permission(client, user): + raise InvalidClientError( + description='Client has no permission to access user data') + self.request.user = user + + def create_token_response(self): + """If valid and authorized, the authorization server issues an access + token. + """ + token = self.generate_token( + scope=self.request.scope, + user=self.request.user, + include_refresh_token=False, + ) + log.debug('Issue token %r to %r', token, self.request.client) + self.save_token(token) + return 200, token, self.TOKEN_RESPONSE_HEADER + + def resolve_issuer_client(self, issuer): + """Fetch client via "iss" in assertion claims. Developers MUST + implement this method in subclass, e.g.:: + + def resolve_issuer_client(self, issuer): + return Client.query_by_iss(issuer) + + :param issuer: "iss" value in assertion + :return: Client instance + """ + raise NotImplementedError() + + def resolve_client_key(self, client, headers, payload): + """Resolve client key to decode assertion data. Developers MUST + implement this method in subclass. For instance, there is a + "jwks" column on client table, e.g.:: + + def resolve_client_key(self, client, headers, payload): + # from authlib.jose import JsonWebKey + + key_set = JsonWebKey.import_key_set(client.jwks) + return key_set.find_by_kid(headers['kid']) + + :param client: instance of OAuth client model + :param headers: headers part of the JWT + :param payload: payload part of the JWT + :return: ``authlib.jose.Key`` instance + """ + raise NotImplementedError() + + def authenticate_user(self, subject): + """Authenticate user with the given assertion claims. Developers MUST + implement it in subclass, e.g.:: + + def authenticate_user(self, subject): + return User.get_by_sub(subject) + + :param subject: "sub" value in claims + :return: User instance + """ + raise NotImplementedError() + + def has_granted_permission(self, client, user): + """Check if the client has permission to access the given user's resource. + Developers MUST implement it in subclass, e.g.:: + + def has_granted_permission(self, client, user): + permission = ClientUserGrant.query(client=client, user=user) + return permission.granted + + :param client: instance of OAuth client model + :param user: instance of User model + :return: bool + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/token.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/token.py new file mode 100644 index 0000000..e598d73 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/token.py @@ -0,0 +1,93 @@ +import time +from authlib.common.encoding import to_native +from authlib.jose import jwt + + +class JWTBearerTokenGenerator: + """A JSON Web Token formatted bearer token generator for jwt-bearer grant type. + This token generator can be registered into authorization server:: + + authorization_server.register_token_generator( + 'urn:ietf:params:oauth:grant-type:jwt-bearer', + JWTBearerTokenGenerator(private_rsa_key), + ) + + In this way, we can generate the token into JWT format. And we don't have to + save this token into database, since it will be short time valid. Consider to + rewrite ``JWTBearerGrant.save_token``:: + + class MyJWTBearerGrant(JWTBearerGrant): + def save_token(self, token): + pass + + :param secret_key: private RSA key in bytes, JWK or JWK Set. + :param issuer: a string or URI of the issuer + :param alg: ``alg`` to use in JWT + """ + DEFAULT_EXPIRES_IN = 3600 + + def __init__(self, secret_key, issuer=None, alg='RS256'): + self.secret_key = secret_key + self.issuer = issuer + self.alg = alg + + @staticmethod + def get_allowed_scope(client, scope): + if scope: + scope = client.get_allowed_scope(scope) + return scope + + @staticmethod + def get_sub_value(user): + """Return user's ID as ``sub`` value in token payload. For instance:: + + @staticmethod + def get_sub_value(user): + return str(user.id) + """ + return user.get_user_id() + + def get_token_data(self, grant_type, client, expires_in, user=None, scope=None): + scope = self.get_allowed_scope(client, scope) + issued_at = int(time.time()) + data = { + 'scope': scope, + 'grant_type': grant_type, + 'iat': issued_at, + 'exp': issued_at + expires_in, + 'client_id': client.get_client_id(), + } + if self.issuer: + data['iss'] = self.issuer + if user: + data['sub'] = self.get_sub_value(user) + return data + + def generate(self, grant_type, client, user=None, scope=None, expires_in=None): + """Generate a bearer token for OAuth 2.0 authorization token endpoint. + + :param client: the client that making the request. + :param grant_type: current requested grant_type. + :param user: current authorized user. + :param expires_in: if provided, use this value as expires_in. + :param scope: current requested scope. + :return: Token dict + """ + if expires_in is None: + expires_in = self.DEFAULT_EXPIRES_IN + + token_data = self.get_token_data(grant_type, client, expires_in, user, scope) + access_token = jwt.encode({'alg': self.alg}, token_data, key=self.secret_key, check=False) + token = { + 'token_type': 'Bearer', + 'access_token': to_native(access_token), + 'expires_in': expires_in + } + if scope: + token['scope'] = scope + return token + + def __call__(self, grant_type, client, user=None, scope=None, + expires_in=None, include_refresh_token=True): + # there is absolutely no refresh token in JWT format + return self.generate(grant_type, client, user, scope, expires_in) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/validator.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/validator.py new file mode 100644 index 0000000..f2423b8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7523/validator.py @@ -0,0 +1,54 @@ +import time +import logging +from authlib.jose import jwt, JoseError, JWTClaims +from ..rfc6749 import TokenMixin +from ..rfc6750 import BearerTokenValidator + +logger = logging.getLogger(__name__) + + +class JWTBearerToken(TokenMixin, JWTClaims): + def check_client(self, client): + return self['client_id'] == client.get_client_id() + + def get_scope(self): + return self.get('scope') + + def get_expires_in(self): + return self['exp'] - self['iat'] + + def is_expired(self): + return self['exp'] < time.time() + + def is_revoked(self): + return False + + +class JWTBearerTokenValidator(BearerTokenValidator): + TOKEN_TYPE = 'bearer' + token_cls = JWTBearerToken + + def __init__(self, public_key, issuer=None, realm=None, **extra_attributes): + super().__init__(realm, **extra_attributes) + self.public_key = public_key + claims_options = { + 'exp': {'essential': True}, + 'client_id': {'essential': True}, + 'grant_type': {'essential': True}, + } + if issuer: + claims_options['iss'] = {'essential': True, 'value': issuer} + self.claims_options = claims_options + + def authenticate_token(self, token_string): + try: + claims = jwt.decode( + token_string, self.public_key, + claims_options=self.claims_options, + claims_cls=self.token_cls, + ) + claims.validate() + return claims + except JoseError as error: + logger.debug('Authenticate token failed. %r', error) + return None diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__init__.py new file mode 100644 index 0000000..8ebb070 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__init__.py @@ -0,0 +1,25 @@ +""" + authlib.oauth2.rfc7591 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + OAuth 2.0 Dynamic Client Registration Protocol. + + https://tools.ietf.org/html/rfc7591 +""" + + +from .claims import ClientMetadataClaims +from .endpoint import ClientRegistrationEndpoint +from .errors import ( + InvalidRedirectURIError, + InvalidClientMetadataError, + InvalidSoftwareStatementError, + UnapprovedSoftwareStatementError, +) + +__all__ = [ + 'ClientMetadataClaims', 'ClientRegistrationEndpoint', + 'InvalidRedirectURIError', 'InvalidClientMetadataError', + 'InvalidSoftwareStatementError', 'UnapprovedSoftwareStatementError', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..0d6e174 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/claims.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/claims.cpython-312.pyc new file mode 100644 index 0000000..e44b800 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/claims.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/endpoint.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/endpoint.cpython-312.pyc new file mode 100644 index 0000000..21d906e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/endpoint.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..8f4306d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/claims.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/claims.py new file mode 100644 index 0000000..b6157b5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/claims.py @@ -0,0 +1,218 @@ +from authlib.jose import BaseClaims, JsonWebKey +from authlib.jose.errors import InvalidClaimError +from authlib.common.urls import is_valid_url + + +class ClientMetadataClaims(BaseClaims): + # https://tools.ietf.org/html/rfc7591#section-2 + REGISTERED_CLAIMS = [ + 'redirect_uris', + 'token_endpoint_auth_method', + 'grant_types', + 'response_types', + 'client_name', + 'client_uri', + 'logo_uri', + 'scope', + 'contacts', + 'tos_uri', + 'policy_uri', + 'jwks_uri', + 'jwks', + 'software_id', + 'software_version', + ] + + def validate(self): + self._validate_essential_claims() + self.validate_redirect_uris() + self.validate_token_endpoint_auth_method() + self.validate_grant_types() + self.validate_response_types() + self.validate_client_name() + self.validate_client_uri() + self.validate_logo_uri() + self.validate_scope() + self.validate_contacts() + self.validate_tos_uri() + self.validate_policy_uri() + self.validate_jwks_uri() + self.validate_jwks() + self.validate_software_id() + self.validate_software_version() + + def validate_redirect_uris(self): + """Array of redirection URI strings for use in redirect-based flows + such as the authorization code and implicit flows. As required by + Section 2 of OAuth 2.0 [RFC6749], clients using flows with + redirection MUST register their redirection URI values. + Authorization servers that support dynamic registration for + redirect-based flows MUST implement support for this metadata + value. + """ + uris = self.get('redirect_uris') + if uris: + for uri in uris: + self._validate_uri('redirect_uris', uri) + + def validate_token_endpoint_auth_method(self): + """String indicator of the requested authentication method for the + token endpoint. + """ + # If unspecified or omitted, the default is "client_secret_basic" + if 'token_endpoint_auth_method' not in self: + self['token_endpoint_auth_method'] = 'client_secret_basic' + self._validate_claim_value('token_endpoint_auth_method') + + def validate_grant_types(self): + """Array of OAuth 2.0 grant type strings that the client can use at + the token endpoint. + """ + self._validate_claim_value('grant_types') + + def validate_response_types(self): + """Array of the OAuth 2.0 response type strings that the client can + use at the authorization endpoint. + """ + self._validate_claim_value('response_types') + + def validate_client_name(self): + """Human-readable string name of the client to be presented to the + end-user during authorization. If omitted, the authorization + server MAY display the raw "client_id" value to the end-user + instead. It is RECOMMENDED that clients always send this field. + The value of this field MAY be internationalized, as described in + Section 2.2. + """ + + def validate_client_uri(self): + """URL string of a web page providing information about the client. + If present, the server SHOULD display this URL to the end-user in + a clickable fashion. It is RECOMMENDED that clients always send + this field. The value of this field MUST point to a valid web + page. The value of this field MAY be internationalized, as + described in Section 2.2. + """ + self._validate_uri('client_uri') + + def validate_logo_uri(self): + """URL string that references a logo for the client. If present, the + server SHOULD display this image to the end-user during approval. + The value of this field MUST point to a valid image file. The + value of this field MAY be internationalized, as described in + Section 2.2. + """ + self._validate_uri('logo_uri') + + def validate_scope(self): + """String containing a space-separated list of scope values (as + described in Section 3.3 of OAuth 2.0 [RFC6749]) that the client + can use when requesting access tokens. The semantics of values in + this list are service specific. If omitted, an authorization + server MAY register a client with a default set of scopes. + """ + self._validate_claim_value('scope') + + def validate_contacts(self): + """Array of strings representing ways to contact people responsible + for this client, typically email addresses. The authorization + server MAY make these contact addresses available to end-users for + support requests for the client. See Section 6 for information on + Privacy Considerations. + """ + if 'contacts' in self and not isinstance(self['contacts'], list): + raise InvalidClaimError('contacts') + + def validate_tos_uri(self): + """URL string that points to a human-readable terms of service + document for the client that describes a contractual relationship + between the end-user and the client that the end-user accepts when + authorizing the client. The authorization server SHOULD display + this URL to the end-user if it is provided. The value of this + field MUST point to a valid web page. The value of this field MAY + be internationalized, as described in Section 2.2. + """ + self._validate_uri('tos_uri') + + def validate_policy_uri(self): + """URL string that points to a human-readable privacy policy document + that describes how the deployment organization collects, uses, + retains, and discloses personal data. The authorization server + SHOULD display this URL to the end-user if it is provided. The + value of this field MUST point to a valid web page. The value of + this field MAY be internationalized, as described in Section 2.2. + """ + self._validate_uri('policy_uri') + + def validate_jwks_uri(self): + """URL string referencing the client's JSON Web Key (JWK) Set + [RFC7517] document, which contains the client's public keys. The + value of this field MUST point to a valid JWK Set document. These + keys can be used by higher-level protocols that use signing or + encryption. For instance, these keys might be used by some + applications for validating signed requests made to the token + endpoint when using JWTs for client authentication [RFC7523]. Use + of this parameter is preferred over the "jwks" parameter, as it + allows for easier key rotation. The "jwks_uri" and "jwks" + parameters MUST NOT both be present in the same request or + response. + """ + # TODO: use real HTTP library + self._validate_uri('jwks_uri') + + def validate_jwks(self): + """Client's JSON Web Key Set [RFC7517] document value, which contains + the client's public keys. The value of this field MUST be a JSON + object containing a valid JWK Set. These keys can be used by + higher-level protocols that use signing or encryption. This + parameter is intended to be used by clients that cannot use the + "jwks_uri" parameter, such as native clients that cannot host + public URLs. The "jwks_uri" and "jwks" parameters MUST NOT both + be present in the same request or response. + """ + if 'jwks' in self: + if 'jwks_uri' in self: + # The "jwks_uri" and "jwks" parameters MUST NOT both be present + raise InvalidClaimError('jwks') + + jwks = self['jwks'] + try: + key_set = JsonWebKey.import_key_set(jwks) + if not key_set: + raise InvalidClaimError('jwks') + except ValueError: + raise InvalidClaimError('jwks') + + def validate_software_id(self): + """A unique identifier string (e.g., a Universally Unique Identifier + (UUID)) assigned by the client developer or software publisher + used by registration endpoints to identify the client software to + be dynamically registered. Unlike "client_id", which is issued by + the authorization server and SHOULD vary between instances, the + "software_id" SHOULD remain the same for all instances of the + client software. The "software_id" SHOULD remain the same across + multiple updates or versions of the same piece of software. The + value of this field is not intended to be human readable and is + usually opaque to the client and authorization server. + """ + + def validate_software_version(self): + """A version identifier string for the client software identified by + "software_id". The value of the "software_version" SHOULD change + on any update to the client software identified by the same + "software_id". The value of this field is intended to be compared + using string equality matching and no other comparison semantics + are defined by this specification. The value of this field is + outside the scope of this specification, but it is not intended to + be human readable and is usually opaque to the client and + authorization server. The definition of what constitutes an + update to client software that would trigger a change to this + value is specific to the software itself and is outside the scope + of this specification. + """ + + def _validate_uri(self, key, uri=None): + if uri is None: + uri = self.get(key) + if uri and not is_valid_url(uri): + raise InvalidClaimError(key) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/endpoint.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/endpoint.py new file mode 100644 index 0000000..d26e061 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/endpoint.py @@ -0,0 +1,211 @@ +import os +import time +import binascii +from authlib.consts import default_json_headers +from authlib.common.security import generate_token +from authlib.jose import JsonWebToken, JoseError +from ..rfc6749 import AccessDeniedError, InvalidRequestError +from ..rfc6749 import scope_to_list +from .claims import ClientMetadataClaims +from .errors import ( + InvalidClientMetadataError, + UnapprovedSoftwareStatementError, + InvalidSoftwareStatementError, +) + + +class ClientRegistrationEndpoint: + """The client registration endpoint is an OAuth 2.0 endpoint designed to + allow a client to be registered with the authorization server. + """ + ENDPOINT_NAME = 'client_registration' + + #: The claims validation class + claims_class = ClientMetadataClaims + + #: Rewrite this value with a list to support ``software_statement`` + #: e.g. ``software_statement_alg_values_supported = ['RS256']`` + software_statement_alg_values_supported = None + + def __init__(self, server): + self.server = server + + def __call__(self, request): + return self.create_registration_response(request) + + def create_registration_response(self, request): + token = self.authenticate_token(request) + if not token: + raise AccessDeniedError() + + request.credential = token + + client_metadata = self.extract_client_metadata(request) + client_info = self.generate_client_info() + body = {} + body.update(client_metadata) + body.update(client_info) + client = self.save_client(client_info, client_metadata, request) + registration_info = self.generate_client_registration_info(client, request) + if registration_info: + body.update(registration_info) + return 201, body, default_json_headers + + def extract_client_metadata(self, request): + if not request.data: + raise InvalidRequestError() + + json_data = request.data.copy() + software_statement = json_data.pop('software_statement', None) + if software_statement and self.software_statement_alg_values_supported: + data = self.extract_software_statement(software_statement, request) + json_data.update(data) + + options = self.get_claims_options() + claims = self.claims_class(json_data, {}, options, self.get_server_metadata()) + try: + claims.validate() + except JoseError as error: + raise InvalidClientMetadataError(error.description) + return claims.get_registered_claims() + + def extract_software_statement(self, software_statement, request): + key = self.resolve_public_key(request) + if not key: + raise UnapprovedSoftwareStatementError() + + try: + jwt = JsonWebToken(self.software_statement_alg_values_supported) + claims = jwt.decode(software_statement, key) + # there is no need to validate claims + return claims + except JoseError: + raise InvalidSoftwareStatementError() + + def get_claims_options(self): + """Generate claims options validation from Authorization Server metadata.""" + metadata = self.get_server_metadata() + if not metadata: + return {} + + scopes_supported = metadata.get('scopes_supported') + response_types_supported = metadata.get('response_types_supported') + grant_types_supported = metadata.get('grant_types_supported') + auth_methods_supported = metadata.get('token_endpoint_auth_methods_supported') + options = {} + if scopes_supported is not None: + scopes_supported = set(scopes_supported) + + def _validate_scope(claims, value): + if not value: + return True + scopes = set(scope_to_list(value)) + return scopes_supported.issuperset(scopes) + + options['scope'] = {'validate': _validate_scope} + + if response_types_supported is not None: + response_types_supported = set(response_types_supported) + + def _validate_response_types(claims, value): + # If omitted, the default is that the client will use only the "code" + # response type. + response_types = set(value) if value else {"code"} + return response_types_supported.issuperset(response_types) + + options['response_types'] = {'validate': _validate_response_types} + + if grant_types_supported is not None: + grant_types_supported = set(grant_types_supported) + + def _validate_grant_types(claims, value): + # If omitted, the default behavior is that the client will use only + # the "authorization_code" Grant Type. + grant_types = set(value) if value else {"authorization_code"} + return grant_types_supported.issuperset(grant_types) + + options['grant_types'] = {'validate': _validate_grant_types} + + if auth_methods_supported is not None: + options['token_endpoint_auth_method'] = {'values': auth_methods_supported} + + return options + + def generate_client_info(self): + # https://tools.ietf.org/html/rfc7591#section-3.2.1 + client_id = self.generate_client_id() + client_secret = self.generate_client_secret() + client_id_issued_at = int(time.time()) + client_secret_expires_at = 0 + return dict( + client_id=client_id, + client_secret=client_secret, + client_id_issued_at=client_id_issued_at, + client_secret_expires_at=client_secret_expires_at, + ) + + def generate_client_registration_info(self, client, request): + """Generate ```registration_client_uri`` and ``registration_access_token`` + for RFC7592. This method returns ``None`` by default. Developers MAY rewrite + this method to return registration information.""" + return None + + def create_endpoint_request(self, request): + return self.server.create_json_request(request) + + def generate_client_id(self): + """Generate ``client_id`` value. Developers MAY rewrite this method + to use their own way to generate ``client_id``. + """ + return generate_token(42) + + def generate_client_secret(self): + """Generate ``client_secret`` value. Developers MAY rewrite this method + to use their own way to generate ``client_secret``. + """ + return binascii.hexlify(os.urandom(24)).decode('ascii') + + def get_server_metadata(self): + """Return server metadata which includes supported grant types, + response types and etc. + """ + raise NotImplementedError() + + def authenticate_token(self, request): + """Authenticate current credential who is requesting to register a client. + Developers MUST implement this method in subclass:: + + def authenticate_token(self, request): + auth = request.headers.get('Authorization') + return get_token_by_auth(auth) + + :return: token instance + """ + raise NotImplementedError() + + def resolve_public_key(self, request): + """Resolve a public key for decoding ``software_statement``. If + ``enable_software_statement=True``, developers MUST implement this + method in subclass:: + + def resolve_public_key(self, request): + return get_public_key_from_user(request.credential) + + :return: JWK or Key string + """ + raise NotImplementedError() + + def save_client(self, client_info, client_metadata, request): + """Save client into database. Developers MUST implement this method + in subclass:: + + def save_client(self, client_info, client_metadata, request): + client = OAuthClient( + client_id=client_info['client_id'], + client_secret=client_info['client_secret'], + ... + ) + client.save() + return client + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/errors.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/errors.py new file mode 100644 index 0000000..31693c0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7591/errors.py @@ -0,0 +1,33 @@ +from ..rfc6749 import OAuth2Error + + +class InvalidRedirectURIError(OAuth2Error): + """The value of one or more redirection URIs is invalid. + https://tools.ietf.org/html/rfc7591#section-3.2.2 + """ + error = 'invalid_redirect_uri' + + +class InvalidClientMetadataError(OAuth2Error): + """The value of one of the client metadata fields is invalid and the + server has rejected this request. Note that an authorization + server MAY choose to substitute a valid value for any requested + parameter of a client's metadata. + https://tools.ietf.org/html/rfc7591#section-3.2.2 + """ + error = 'invalid_client_metadata' + + +class InvalidSoftwareStatementError(OAuth2Error): + """The software statement presented is invalid. + https://tools.ietf.org/html/rfc7591#section-3.2.2 + """ + error = 'invalid_software_statement' + + +class UnapprovedSoftwareStatementError(OAuth2Error): + """The software statement presented is not approved for use by this + authorization server. + https://tools.ietf.org/html/rfc7591#section-3.2.2 + """ + error = 'unapproved_software_statement' diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/__init__.py new file mode 100644 index 0000000..6a6457b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/__init__.py @@ -0,0 +1,13 @@ +""" + authlib.oauth2.rfc7592 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + OAuth 2.0 Dynamic Client Registration Management Protocol. + + https://tools.ietf.org/html/rfc7592 +""" + +from .endpoint import ClientConfigurationEndpoint + +__all__ = ['ClientConfigurationEndpoint'] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f2a931e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/__pycache__/endpoint.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/__pycache__/endpoint.cpython-312.pyc new file mode 100644 index 0000000..f837bcf Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/__pycache__/endpoint.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/endpoint.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/endpoint.py new file mode 100644 index 0000000..25d8b6a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7592/endpoint.py @@ -0,0 +1,262 @@ +from authlib.consts import default_json_headers +from authlib.jose import JoseError +from ..rfc7591.claims import ClientMetadataClaims +from ..rfc6749 import scope_to_list +from ..rfc6749 import AccessDeniedError +from ..rfc6749 import InvalidClientError +from ..rfc6749 import InvalidRequestError +from ..rfc6749 import UnauthorizedClientError +from ..rfc7591 import InvalidClientMetadataError + + +class ClientConfigurationEndpoint: + ENDPOINT_NAME = 'client_configuration' + + #: The claims validation class + claims_class = ClientMetadataClaims + + def __init__(self, server): + self.server = server + + def __call__(self, request): + return self.create_configuration_response(request) + + def create_configuration_response(self, request): + # This request is authenticated by the registration access token issued + # to the client. + token = self.authenticate_token(request) + if not token: + raise AccessDeniedError() + + request.credential = token + + client = self.authenticate_client(request) + if not client: + # If the client does not exist on this server, the server MUST respond + # with HTTP 401 Unauthorized and the registration access token used to + # make this request SHOULD be immediately revoked. + self.revoke_access_token(request, token) + raise InvalidClientError(status_code=401) + + if not self.check_permission(client, request): + # If the client does not have permission to read its record, the server + # MUST return an HTTP 403 Forbidden. + raise UnauthorizedClientError(status_code=403) + + request.client = client + + if request.method == 'GET': + return self.create_read_client_response(client, request) + elif request.method == 'DELETE': + return self.create_delete_client_response(client, request) + elif request.method == 'PUT': + return self.create_update_client_response(client, request) + + def create_endpoint_request(self, request): + return self.server.create_json_request(request) + + def create_read_client_response(self, client, request): + body = self.introspect_client(client) + body.update(self.generate_client_registration_info(client, request)) + return 200, body, default_json_headers + + def create_delete_client_response(self, client, request): + self.delete_client(client, request) + headers = [ + ('Cache-Control', 'no-store'), + ('Pragma', 'no-cache'), + ] + return 204, '', headers + + def create_update_client_response(self, client, request): + # The updated client metadata fields request MUST NOT include the + # 'registration_access_token', 'registration_client_uri', + # 'client_secret_expires_at', or 'client_id_issued_at' fields + must_not_include = ( + 'registration_access_token', + 'registration_client_uri', + 'client_secret_expires_at', + 'client_id_issued_at', + ) + for k in must_not_include: + if k in request.data: + raise InvalidRequestError() + + # The client MUST include its 'client_id' field in the request + client_id = request.data.get('client_id') + if not client_id: + raise InvalidRequestError() + if client_id != client.get_client_id(): + raise InvalidRequestError() + + # If the client includes the 'client_secret' field in the request, + # the value of this field MUST match the currently issued client + # secret for that client. + if 'client_secret' in request.data: + if not client.check_client_secret(request.data['client_secret']): + raise InvalidRequestError() + + client_metadata = self.extract_client_metadata(request) + client = self.update_client(client, client_metadata, request) + return self.create_read_client_response(client, request) + + def extract_client_metadata(self, request): + json_data = request.data.copy() + options = self.get_claims_options() + claims = self.claims_class(json_data, {}, options, self.get_server_metadata()) + + try: + claims.validate() + except JoseError as error: + raise InvalidClientMetadataError(error.description) + return claims.get_registered_claims() + + def get_claims_options(self): + metadata = self.get_server_metadata() + if not metadata: + return {} + + scopes_supported = metadata.get('scopes_supported') + response_types_supported = metadata.get('response_types_supported') + grant_types_supported = metadata.get('grant_types_supported') + auth_methods_supported = metadata.get('token_endpoint_auth_methods_supported') + options = {} + if scopes_supported is not None: + scopes_supported = set(scopes_supported) + + def _validate_scope(claims, value): + if not value: + return True + scopes = set(scope_to_list(value)) + return scopes_supported.issuperset(scopes) + + options['scope'] = {'validate': _validate_scope} + + if response_types_supported is not None: + response_types_supported = set(response_types_supported) + + def _validate_response_types(claims, value): + # If omitted, the default is that the client will use only the "code" + # response type. + response_types = set(value) if value else {"code"} + return response_types_supported.issuperset(response_types) + + options['response_types'] = {'validate': _validate_response_types} + + if grant_types_supported is not None: + grant_types_supported = set(grant_types_supported) + + def _validate_grant_types(claims, value): + # If omitted, the default behavior is that the client will use only + # the "authorization_code" Grant Type. + grant_types = set(value) if value else {"authorization_code"} + return grant_types_supported.issuperset(grant_types) + + options['grant_types'] = {'validate': _validate_grant_types} + + if auth_methods_supported is not None: + options['token_endpoint_auth_method'] = {'values': auth_methods_supported} + + return options + + def introspect_client(self, client): + return {**client.client_info, **client.client_metadata} + + def generate_client_registration_info(self, client, request): + """Generate ```registration_client_uri`` and ``registration_access_token`` + for RFC7592. By default this method returns the values sent in the current + request. Developers MUST rewrite this method to return different registration + information.:: + + def generate_client_registration_info(self, client, request):{ + access_token = request.headers['Authorization'].split(' ')[1] + return { + 'registration_client_uri': request.uri, + 'registration_access_token': access_token, + } + + :param client: the instance of OAuth client + :param request: formatted request instance + """ + raise NotImplementedError() + + def authenticate_token(self, request): + """Authenticate current credential who is requesting to register a client. + Developers MUST implement this method in subclass:: + + def authenticate_token(self, request): + auth = request.headers.get('Authorization') + return get_token_by_auth(auth) + + :return: token instance + """ + raise NotImplementedError() + + def authenticate_client(self, request): + """Read a client from the request payload. + Developers MUST implement this method in subclass:: + + def authenticate_client(self, request): + client_id = request.data.get('client_id') + return Client.get(client_id=client_id) + + :return: client instance + """ + raise NotImplementedError() + + def revoke_access_token(self, token, request): + """Revoke a token access in case an invalid client has been requested. + Developers MUST implement this method in subclass:: + + def revoke_access_token(self, token, request): + token.revoked = True + token.save() + + """ + raise NotImplementedError() + + def check_permission(self, client, request): + """Checks wether the current client is allowed to be accessed, edited + or deleted. Developers MUST implement it in subclass, e.g.:: + + def check_permission(self, client, request): + return client.editable + + :return: boolean + """ + raise NotImplementedError() + + def delete_client(self, client, request): + """Delete authorization code from database or cache. Developers MUST + implement it in subclass, e.g.:: + + def delete_client(self, client, request): + client.delete() + + :param client: the instance of OAuth client + :param request: formatted request instance + """ + raise NotImplementedError() + + def update_client(self, client, client_metadata, request): + """Update the client in the database. Developers MUST implement this method + in subclass:: + + def update_client(self, client, client_metadata, request): + client.set_client_metadata({**client.client_metadata, **client_metadata}) + client.save() + return client + + :param client: the instance of OAuth client + :param client_metadata: a dict of the client claims to update + :param request: formatted request instance + :return: client instance + """ + + raise NotImplementedError() + + def get_server_metadata(self): + """Return server metadata which includes supported grant types, + response types and etc. + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/__init__.py new file mode 100644 index 0000000..c03043b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/__init__.py @@ -0,0 +1,13 @@ +""" + authlib.oauth2.rfc7636 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + Proof Key for Code Exchange by OAuth Public Clients. + + https://tools.ietf.org/html/rfc7636 +""" + +from .challenge import CodeChallenge, create_s256_code_challenge + +__all__ = ['CodeChallenge', 'create_s256_code_challenge'] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..207a51f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/__pycache__/challenge.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/__pycache__/challenge.cpython-312.pyc new file mode 100644 index 0000000..e52d44c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/__pycache__/challenge.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/challenge.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/challenge.py new file mode 100644 index 0000000..93f3dfc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7636/challenge.py @@ -0,0 +1,148 @@ +import re +import hashlib +from authlib.common.encoding import to_bytes, to_unicode, urlsafe_b64encode +from ..rfc6749 import ( + InvalidRequestError, + InvalidGrantError, + OAuth2Request, +) + + +CODE_VERIFIER_PATTERN = re.compile(r'^[a-zA-Z0-9\-._~]{43,128}$') +CODE_CHALLENGE_PATTERN = re.compile(r'^[a-zA-Z0-9\-._~]{43,128}$') + + +def create_s256_code_challenge(code_verifier): + """Create S256 code_challenge with the given code_verifier.""" + data = hashlib.sha256(to_bytes(code_verifier, 'ascii')).digest() + return to_unicode(urlsafe_b64encode(data)) + + +def compare_plain_code_challenge(code_verifier, code_challenge): + # If the "code_challenge_method" from Section 4.3 was "plain", + # they are compared directly + return code_verifier == code_challenge + + +def compare_s256_code_challenge(code_verifier, code_challenge): + # BASE64URL-ENCODE(SHA256(ASCII(code_verifier))) == code_challenge + return create_s256_code_challenge(code_verifier) == code_challenge + + +class CodeChallenge: + """CodeChallenge extension to Authorization Code Grant. It is used to + improve the security of Authorization Code flow for public clients by + sending extra "code_challenge" and "code_verifier" to the authorization + server. + + The AuthorizationCodeGrant SHOULD save the ``code_challenge`` and + ``code_challenge_method`` into database when ``save_authorization_code``. + Then register this extension via:: + + server.register_grant( + AuthorizationCodeGrant, + [CodeChallenge(required=True)] + ) + """ + #: defaults to "plain" if not present in the request + DEFAULT_CODE_CHALLENGE_METHOD = 'plain' + #: supported ``code_challenge_method`` + SUPPORTED_CODE_CHALLENGE_METHOD = ['plain', 'S256'] + + CODE_CHALLENGE_METHODS = { + 'plain': compare_plain_code_challenge, + 'S256': compare_s256_code_challenge, + } + + def __init__(self, required=True): + self.required = required + + def __call__(self, grant): + grant.register_hook( + 'after_validate_authorization_request', + self.validate_code_challenge, + ) + grant.register_hook( + 'after_validate_token_request', + self.validate_code_verifier, + ) + + def validate_code_challenge(self, grant): + request: OAuth2Request = grant.request + challenge = request.data.get('code_challenge') + method = request.data.get('code_challenge_method') + if not challenge and not method: + return + + if not challenge: + raise InvalidRequestError('Missing "code_challenge"') + + if len(request.datalist.get('code_challenge', [])) > 1: + raise InvalidRequestError('Multiple "code_challenge" in request.') + + if not CODE_CHALLENGE_PATTERN.match(challenge): + raise InvalidRequestError('Invalid "code_challenge"') + + if method and method not in self.SUPPORTED_CODE_CHALLENGE_METHOD: + raise InvalidRequestError('Unsupported "code_challenge_method"') + + if len(request.datalist.get('code_challenge_method', [])) > 1: + raise InvalidRequestError('Multiple "code_challenge_method" in request.') + + def validate_code_verifier(self, grant): + request: OAuth2Request = grant.request + verifier = request.form.get('code_verifier') + + # public client MUST verify code challenge + if self.required and request.auth_method == 'none' and not verifier: + raise InvalidRequestError('Missing "code_verifier"') + + authorization_code = request.authorization_code + challenge = self.get_authorization_code_challenge(authorization_code) + + # ignore, it is the normal RFC6749 authorization_code request + if not challenge and not verifier: + return + + # challenge exists, code_verifier is required + if not verifier: + raise InvalidRequestError('Missing "code_verifier"') + + if not CODE_VERIFIER_PATTERN.match(verifier): + raise InvalidRequestError('Invalid "code_verifier"') + + # 4.6. Server Verifies code_verifier before Returning the Tokens + method = self.get_authorization_code_challenge_method(authorization_code) + if method is None: + method = self.DEFAULT_CODE_CHALLENGE_METHOD + + func = self.CODE_CHALLENGE_METHODS.get(method) + if not func: + raise RuntimeError(f'No verify method for "{method}"') + + # If the values are not equal, an error response indicating + # "invalid_grant" MUST be returned. + if not func(verifier, challenge): + raise InvalidGrantError(description='Code challenge failed.') + + def get_authorization_code_challenge(self, authorization_code): + """Get "code_challenge" associated with this authorization code. + Developers MAY re-implement it in subclass, the default logic:: + + def get_authorization_code_challenge(self, authorization_code): + return authorization_code.code_challenge + + :param authorization_code: the instance of authorization_code + """ + return authorization_code.code_challenge + + def get_authorization_code_challenge_method(self, authorization_code): + """Get "code_challenge_method" associated with this authorization code. + Developers MAY re-implement it in subclass, the default logic:: + + def get_authorization_code_challenge_method(self, authorization_code): + return authorization_code.code_challenge_method + + :param authorization_code: the instance of authorization_code + """ + return authorization_code.code_challenge_method diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__init__.py new file mode 100644 index 0000000..045aeda --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__init__.py @@ -0,0 +1,15 @@ +""" + authlib.oauth2.rfc7662 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + OAuth 2.0 Token Introspection. + + https://tools.ietf.org/html/rfc7662 +""" + +from .introspection import IntrospectionEndpoint +from .models import IntrospectionToken +from .token_validator import IntrospectTokenValidator + +__all__ = ['IntrospectionEndpoint', 'IntrospectionToken', 'IntrospectTokenValidator'] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..9815c1e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/introspection.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/introspection.cpython-312.pyc new file mode 100644 index 0000000..51dded4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/introspection.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..e99075e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/token_validator.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/token_validator.cpython-312.pyc new file mode 100644 index 0000000..bac34a7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/__pycache__/token_validator.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/introspection.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/introspection.py new file mode 100644 index 0000000..515d6ca --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/introspection.py @@ -0,0 +1,131 @@ +from authlib.consts import default_json_headers +from ..rfc6749 import ( + TokenEndpoint, + InvalidRequestError, + UnsupportedTokenTypeError, +) + + +class IntrospectionEndpoint(TokenEndpoint): + """Implementation of introspection endpoint which is described in + `RFC7662`_. + + .. _RFC7662: https://tools.ietf.org/html/rfc7662 + """ + #: Endpoint name to be registered + ENDPOINT_NAME = 'introspection' + + def authenticate_token(self, request, client): + """The protected resource calls the introspection endpoint using an HTTP + ``POST`` request with parameters sent as + "application/x-www-form-urlencoded" data. The protected resource sends a + parameter representing the token along with optional parameters + representing additional context that is known by the protected resource + to aid the authorization server in its response. + + token + **REQUIRED** The string value of the token. For access tokens, this + is the ``access_token`` value returned from the token endpoint + defined in OAuth 2.0. For refresh tokens, this is the + ``refresh_token`` value returned from the token endpoint as defined + in OAuth 2.0. + + token_type_hint + **OPTIONAL** A hint about the type of the token submitted for + introspection. + """ + + self.check_params(request, client) + token = self.query_token(request.form['token'], request.form.get('token_type_hint')) + if token and self.check_permission(token, client, request): + return token + + def check_params(self, request, client): + params = request.form + if 'token' not in params: + raise InvalidRequestError() + + hint = params.get('token_type_hint') + if hint and hint not in self.SUPPORTED_TOKEN_TYPES: + raise UnsupportedTokenTypeError() + + def create_endpoint_response(self, request): + """Validate introspection request and create the response. + + :returns: (status_code, body, headers) + """ + # The authorization server first validates the client credentials + client = self.authenticate_endpoint_client(request) + + # then verifies whether the token was issued to the client making + # the revocation request + token = self.authenticate_token(request, client) + + # the authorization server invalidates the token + body = self.create_introspection_payload(token) + return 200, body, default_json_headers + + def create_introspection_payload(self, token): + # the token is not active, does not exist on this server, or the + # protected resource is not allowed to introspect this particular + # token, then the authorization server MUST return an introspection + # response with the "active" field set to "false" + if not token: + return {'active': False} + if token.is_expired() or token.is_revoked(): + return {'active': False} + payload = self.introspect_token(token) + if 'active' not in payload: + payload['active'] = True + return payload + + def check_permission(self, token, client, request): + """Check if the request has permission to introspect the token. Developers + MUST implement this method:: + + def check_permission(self, token, client, request): + # only allow a special client to introspect the token + return client.client_id == 'introspection_client' + + :return: bool + """ + raise NotImplementedError() + + def query_token(self, token_string, token_type_hint): + """Get the token from database/storage by the given token string. + Developers should implement this method:: + + def query_token(self, token_string, token_type_hint): + if token_type_hint == 'access_token': + tok = Token.query_by_access_token(token_string) + elif token_type_hint == 'refresh_token': + tok = Token.query_by_refresh_token(token_string) + else: + tok = Token.query_by_access_token(token_string) + if not tok: + tok = Token.query_by_refresh_token(token_string) + return tok + """ + raise NotImplementedError() + + def introspect_token(self, token): + """Read given token and return its introspection metadata as a + dictionary following `Section 2.2`_:: + + def introspect_token(self, token): + return { + 'active': True, + 'client_id': token.client_id, + 'token_type': token.token_type, + 'username': get_token_username(token), + 'scope': token.get_scope(), + 'sub': get_token_user_sub(token), + 'aud': token.client_id, + 'iss': 'https://server.example.com/', + 'exp': token.expires_at, + 'iat': token.issued_at, + } + + .. _`Section 2.2`: https://tools.ietf.org/html/rfc7662#section-2.2 + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/models.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/models.py new file mode 100644 index 0000000..0f4f0c2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/models.py @@ -0,0 +1,30 @@ +from ..rfc6749 import TokenMixin + + +class IntrospectionToken(dict, TokenMixin): + def get_client_id(self): + return self.get('client_id') + + def get_scope(self): + return self.get('scope') + + def get_expires_in(self): + # this method is only used in refresh token, + # no need to implement it + return 0 + + def get_expires_at(self): + return self.get('exp', 0) + + def __getattr__(self, key): + # https://tools.ietf.org/html/rfc7662#section-2.2 + available_keys = { + 'active', 'scope', 'client_id', 'username', 'token_type', + 'exp', 'iat', 'nbf', 'sub', 'aud', 'iss', 'jti' + } + try: + return object.__getattribute__(self, key) + except AttributeError as error: + if key in available_keys: + return self.get(key) + raise error diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/token_validator.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/token_validator.py new file mode 100644 index 0000000..882c8d9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc7662/token_validator.py @@ -0,0 +1,34 @@ +from ..rfc6749 import TokenValidator +from ..rfc6750 import ( + InvalidTokenError, + InsufficientScopeError +) + + +class IntrospectTokenValidator(TokenValidator): + TOKEN_TYPE = 'bearer' + + def introspect_token(self, token_string): + """Request introspection token endpoint with the given token string, + authorization server will return token information in JSON format. + Developers MUST implement this method before using it:: + + def introspect_token(self, token_string): + # for example, introspection token endpoint has limited + # internal IPs to access, so there is no need to add + # authentication. + url = 'https://example.com/oauth/introspect' + resp = requests.post(url, data={'token': token_string}) + resp.raise_for_status() + return resp.json() + """ + raise NotImplementedError() + + def authenticate_token(self, token_string): + return self.introspect_token(token_string) + + def validate_token(self, token, scopes, request): + if not token or not token['active']: + raise InvalidTokenError(realm=self.realm, extra_attributes=self.extra_attributes) + if self.scope_insufficient(token.get('scope'), scopes): + raise InsufficientScopeError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__init__.py new file mode 100644 index 0000000..b1b151c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__init__.py @@ -0,0 +1,15 @@ +""" + authlib.oauth2.rfc8414 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + OAuth 2.0 Authorization Server Metadata. + + https://tools.ietf.org/html/rfc8414 +""" + +from .models import AuthorizationServerMetadata +from .well_known import get_well_known_url + + +__all__ = ['AuthorizationServerMetadata', 'get_well_known_url'] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..ba2b199 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__pycache__/models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..771a83e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__pycache__/models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__pycache__/well_known.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__pycache__/well_known.cpython-312.pyc new file mode 100644 index 0000000..9421dd5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/__pycache__/well_known.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/models.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/models.py new file mode 100644 index 0000000..2dc790b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/models.py @@ -0,0 +1,368 @@ +from authlib.common.urls import urlparse, is_valid_url +from authlib.common.security import is_secure_transport + + +class AuthorizationServerMetadata(dict): + """Define Authorization Server Metadata via `Section 2`_ in RFC8414_. + + .. _RFC8414: https://tools.ietf.org/html/rfc8414 + .. _`Section 2`: https://tools.ietf.org/html/rfc8414#section-2 + """ + REGISTRY_KEYS = [ + 'issuer', 'authorization_endpoint', 'token_endpoint', + 'jwks_uri', 'registration_endpoint', 'scopes_supported', + 'response_types_supported', 'response_modes_supported', + 'grant_types_supported', 'token_endpoint_auth_methods_supported', + 'token_endpoint_auth_signing_alg_values_supported', + 'service_documentation', 'ui_locales_supported', + 'op_policy_uri', 'op_tos_uri', 'revocation_endpoint', + 'revocation_endpoint_auth_methods_supported', + 'revocation_endpoint_auth_signing_alg_values_supported', + 'introspection_endpoint', + 'introspection_endpoint_auth_methods_supported', + 'introspection_endpoint_auth_signing_alg_values_supported', + 'code_challenge_methods_supported', + ] + + def validate_issuer(self): + """REQUIRED. The authorization server's issuer identifier, which is + a URL that uses the "https" scheme and has no query or fragment + components. + """ + issuer = self.get('issuer') + + #: 1. REQUIRED + if not issuer: + raise ValueError('"issuer" is required') + + parsed = urlparse.urlparse(issuer) + + #: 2. uses the "https" scheme + if not is_secure_transport(issuer): + raise ValueError('"issuer" MUST use "https" scheme') + + #: 3. has no query or fragment + if parsed.query or parsed.fragment: + raise ValueError('"issuer" has no query or fragment') + + def validate_authorization_endpoint(self): + """URL of the authorization server's authorization endpoint + [RFC6749]. This is REQUIRED unless no grant types are supported + that use the authorization endpoint. + """ + url = self.get('authorization_endpoint') + if url: + if not is_secure_transport(url): + raise ValueError( + '"authorization_endpoint" MUST use "https" scheme') + return + + grant_types_supported = set(self.grant_types_supported) + authorization_grant_types = {'authorization_code', 'implicit'} + if grant_types_supported & authorization_grant_types: + raise ValueError('"authorization_endpoint" is required') + + def validate_token_endpoint(self): + """URL of the authorization server's token endpoint [RFC6749]. This + is REQUIRED unless only the implicit grant type is supported. + """ + grant_types_supported = self.get('grant_types_supported') + if grant_types_supported and len(grant_types_supported) == 1 and \ + grant_types_supported[0] == 'implicit': + return + + url = self.get('token_endpoint') + if not url: + raise ValueError('"token_endpoint" is required') + + if not is_secure_transport(url): + raise ValueError('"token_endpoint" MUST use "https" scheme') + + def validate_jwks_uri(self): + """OPTIONAL. URL of the authorization server's JWK Set [JWK] + document. The referenced document contains the signing key(s) the + client uses to validate signatures from the authorization server. + This URL MUST use the "https" scheme. The JWK Set MAY also + contain the server's encryption key or keys, which are used by + clients to encrypt requests to the server. When both signing and + encryption keys are made available, a "use" (public key use) + parameter value is REQUIRED for all keys in the referenced JWK Set + to indicate each key's intended usage. + """ + url = self.get('jwks_uri') + if url and not is_secure_transport(url): + raise ValueError('"jwks_uri" MUST use "https" scheme') + + def validate_registration_endpoint(self): + """OPTIONAL. URL of the authorization server's OAuth 2.0 Dynamic + Client Registration endpoint [RFC7591]. + """ + url = self.get('registration_endpoint') + if url and not is_secure_transport(url): + raise ValueError( + '"registration_endpoint" MUST use "https" scheme') + + def validate_scopes_supported(self): + """RECOMMENDED. JSON array containing a list of the OAuth 2.0 + [RFC6749] "scope" values that this authorization server supports. + Servers MAY choose not to advertise some supported scope values + even when this parameter is used. + """ + validate_array_value(self, 'scopes_supported') + + def validate_response_types_supported(self): + """REQUIRED. JSON array containing a list of the OAuth 2.0 + "response_type" values that this authorization server supports. + The array values used are the same as those used with the + "response_types" parameter defined by "OAuth 2.0 Dynamic Client + Registration Protocol" [RFC7591]. + """ + response_types_supported = self.get('response_types_supported') + if not response_types_supported: + raise ValueError('"response_types_supported" is required') + if not isinstance(response_types_supported, list): + raise ValueError('"response_types_supported" MUST be JSON array') + + def validate_response_modes_supported(self): + """OPTIONAL. JSON array containing a list of the OAuth 2.0 + "response_mode" values that this authorization server supports, as + specified in "OAuth 2.0 Multiple Response Type Encoding Practices" + [OAuth.Responses]. If omitted, the default is "["query", + "fragment"]". The response mode value "form_post" is also defined + in "OAuth 2.0 Form Post Response Mode" [OAuth.Post]. + """ + validate_array_value(self, 'response_modes_supported') + + def validate_grant_types_supported(self): + """OPTIONAL. JSON array containing a list of the OAuth 2.0 grant + type values that this authorization server supports. The array + values used are the same as those used with the "grant_types" + parameter defined by "OAuth 2.0 Dynamic Client Registration + Protocol" [RFC7591]. If omitted, the default value is + "["authorization_code", "implicit"]". + """ + validate_array_value(self, 'grant_types_supported') + + def validate_token_endpoint_auth_methods_supported(self): + """OPTIONAL. JSON array containing a list of client authentication + methods supported by this token endpoint. Client authentication + method values are used in the "token_endpoint_auth_method" + parameter defined in Section 2 of [RFC7591]. If omitted, the + default is "client_secret_basic" -- the HTTP Basic Authentication + Scheme specified in Section 2.3.1 of OAuth 2.0 [RFC6749]. + """ + validate_array_value(self, 'token_endpoint_auth_methods_supported') + + def validate_token_endpoint_auth_signing_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWS signing + algorithms ("alg" values) supported by the token endpoint for the + signature on the JWT [JWT] used to authenticate the client at the + token endpoint for the "private_key_jwt" and "client_secret_jwt" + authentication methods. This metadata entry MUST be present if + either of these authentication methods are specified in the + "token_endpoint_auth_methods_supported" entry. No default + algorithms are implied if this entry is omitted. Servers SHOULD + support "RS256". The value "none" MUST NOT be used. + """ + _validate_alg_values( + self, + 'token_endpoint_auth_signing_alg_values_supported', + self.token_endpoint_auth_methods_supported + ) + + def validate_service_documentation(self): + """OPTIONAL. URL of a page containing human-readable information + that developers might want or need to know when using the + authorization server. In particular, if the authorization server + does not support Dynamic Client Registration, then information on + how to register clients needs to be provided in this + documentation. + """ + value = self.get('service_documentation') + if value and not is_valid_url(value): + raise ValueError('"service_documentation" MUST be a URL') + + def validate_ui_locales_supported(self): + """OPTIONAL. Languages and scripts supported for the user interface, + represented as a JSON array of language tag values from BCP 47 + [RFC5646]. If omitted, the set of supported languages and scripts + is unspecified. + """ + validate_array_value(self, 'ui_locales_supported') + + def validate_op_policy_uri(self): + """OPTIONAL. URL that the authorization server provides to the + person registering the client to read about the authorization + server's requirements on how the client can use the data provided + by the authorization server. The registration process SHOULD + display this URL to the person registering the client if it is + given. As described in Section 5, despite the identifier + "op_policy_uri" appearing to be OpenID-specific, its usage in this + specification is actually referring to a general OAuth 2.0 feature + that is not specific to OpenID Connect. + """ + value = self.get('op_policy_uri') + if value and not is_valid_url(value): + raise ValueError('"op_policy_uri" MUST be a URL') + + def validate_op_tos_uri(self): + """OPTIONAL. URL that the authorization server provides to the + person registering the client to read about the authorization + server's terms of service. The registration process SHOULD + display this URL to the person registering the client if it is + given. As described in Section 5, despite the identifier + "op_tos_uri", appearing to be OpenID-specific, its usage in this + specification is actually referring to a general OAuth 2.0 feature + that is not specific to OpenID Connect. + """ + value = self.get('op_tos_uri') + if value and not is_valid_url(value): + raise ValueError('"op_tos_uri" MUST be a URL') + + def validate_revocation_endpoint(self): + """OPTIONAL. URL of the authorization server's OAuth 2.0 revocation + endpoint [RFC7009].""" + url = self.get('revocation_endpoint') + if url and not is_secure_transport(url): + raise ValueError('"revocation_endpoint" MUST use "https" scheme') + + def validate_revocation_endpoint_auth_methods_supported(self): + """OPTIONAL. JSON array containing a list of client authentication + methods supported by this revocation endpoint. The valid client + authentication method values are those registered in the IANA + "OAuth Token Endpoint Authentication Methods" registry + [IANA.OAuth.Parameters]. If omitted, the default is + "client_secret_basic" -- the HTTP Basic Authentication Scheme + specified in Section 2.3.1 of OAuth 2.0 [RFC6749]. + """ + validate_array_value(self, 'revocation_endpoint_auth_methods_supported') + + def validate_revocation_endpoint_auth_signing_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWS signing + algorithms ("alg" values) supported by the revocation endpoint for + the signature on the JWT [JWT] used to authenticate the client at + the revocation endpoint for the "private_key_jwt" and + "client_secret_jwt" authentication methods. This metadata entry + MUST be present if either of these authentication methods are + specified in the "revocation_endpoint_auth_methods_supported" + entry. No default algorithms are implied if this entry is + omitted. The value "none" MUST NOT be used. + """ + _validate_alg_values( + self, + 'revocation_endpoint_auth_signing_alg_values_supported', + self.revocation_endpoint_auth_methods_supported + ) + + def validate_introspection_endpoint(self): + """OPTIONAL. URL of the authorization server's OAuth 2.0 + introspection endpoint [RFC7662]. + """ + url = self.get('introspection_endpoint') + if url and not is_secure_transport(url): + raise ValueError( + '"introspection_endpoint" MUST use "https" scheme') + + def validate_introspection_endpoint_auth_methods_supported(self): + """OPTIONAL. JSON array containing a list of client authentication + methods supported by this introspection endpoint. The valid + client authentication method values are those registered in the + IANA "OAuth Token Endpoint Authentication Methods" registry + [IANA.OAuth.Parameters] or those registered in the IANA "OAuth + Access Token Types" registry [IANA.OAuth.Parameters]. (These + values are and will remain distinct, due to Section 7.2.) If + omitted, the set of supported authentication methods MUST be + determined by other means. + """ + validate_array_value(self, 'introspection_endpoint_auth_methods_supported') + + def validate_introspection_endpoint_auth_signing_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWS signing + algorithms ("alg" values) supported by the introspection endpoint + for the signature on the JWT [JWT] used to authenticate the client + at the introspection endpoint for the "private_key_jwt" and + "client_secret_jwt" authentication methods. This metadata entry + MUST be present if either of these authentication methods are + specified in the "introspection_endpoint_auth_methods_supported" + entry. No default algorithms are implied if this entry is + omitted. The value "none" MUST NOT be used. + """ + _validate_alg_values( + self, + 'introspection_endpoint_auth_signing_alg_values_supported', + self.introspection_endpoint_auth_methods_supported + ) + + def validate_code_challenge_methods_supported(self): + """OPTIONAL. JSON array containing a list of Proof Key for Code + Exchange (PKCE) [RFC7636] code challenge methods supported by this + authorization server. Code challenge method values are used in + the "code_challenge_method" parameter defined in Section 4.3 of + [RFC7636]. The valid code challenge method values are those + registered in the IANA "PKCE Code Challenge Methods" registry + [IANA.OAuth.Parameters]. If omitted, the authorization server + does not support PKCE. + """ + validate_array_value(self, 'code_challenge_methods_supported') + + @property + def response_modes_supported(self): + #: If omitted, the default is ["query", "fragment"] + return self.get('response_modes_supported', ["query", "fragment"]) + + @property + def grant_types_supported(self): + #: If omitted, the default value is ["authorization_code", "implicit"] + return self.get('grant_types_supported', ["authorization_code", "implicit"]) + + @property + def token_endpoint_auth_methods_supported(self): + #: If omitted, the default is "client_secret_basic" + return self.get('token_endpoint_auth_methods_supported', ["client_secret_basic"]) + + @property + def revocation_endpoint_auth_methods_supported(self): + #: If omitted, the default is "client_secret_basic" + return self.get('revocation_endpoint_auth_methods_supported', ["client_secret_basic"]) + + @property + def introspection_endpoint_auth_methods_supported(self): + #: If omitted, the set of supported authentication methods MUST be + #: determined by other means + #: here, we use "client_secret_basic" + return self.get('introspection_endpoint_auth_methods_supported', ["client_secret_basic"]) + + def validate(self): + """Validate all server metadata value.""" + for key in self.REGISTRY_KEYS: + object.__getattribute__(self, f'validate_{key}')() + + def __getattr__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError as error: + if key in self.REGISTRY_KEYS: + return self.get(key) + raise error + + +def _validate_alg_values(data, key, auth_methods_supported): + value = data.get(key) + if value and not isinstance(value, list): + raise ValueError(f'"{key}" MUST be JSON array') + + auth_methods = set(auth_methods_supported) + jwt_auth_methods = {'private_key_jwt', 'client_secret_jwt'} + if auth_methods & jwt_auth_methods: + if not value: + raise ValueError(f'"{key}" is required') + + if value and 'none' in value: + raise ValueError( + f'the value "none" MUST NOT be used in "{key}"') + + +def validate_array_value(metadata, key): + values = metadata.get(key) + if values is not None and not isinstance(values, list): + raise ValueError(f'"{key}" MUST be JSON array') diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/well_known.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/well_known.py new file mode 100644 index 0000000..42d70b3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8414/well_known.py @@ -0,0 +1,22 @@ +from authlib.common.urls import urlparse + + +def get_well_known_url(issuer, external=False, suffix='oauth-authorization-server'): + """Get well-known URI with issuer via `Section 3.1`_. + + .. _`Section 3.1`: https://tools.ietf.org/html/rfc8414#section-3.1 + + :param issuer: URL of the issuer + :param external: return full external url or not + :param suffix: well-known URI suffix for RFC8414 + :return: URL + """ + parsed = urlparse.urlparse(issuer) + path = parsed.path + if path and path != '/': + url_path = f'/.well-known/{suffix}{path}' + else: + url_path = f'/.well-known/{suffix}' + if not external: + return url_path + return parsed.scheme + '://' + parsed.netloc + url_path diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__init__.py new file mode 100644 index 0000000..6ad59fd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__init__.py @@ -0,0 +1,22 @@ +""" + authlib.oauth2.rfc8628 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents an implementation of + OAuth 2.0 Device Authorization Grant. + + https://tools.ietf.org/html/rfc8628 +""" + +from .endpoint import DeviceAuthorizationEndpoint +from .device_code import DeviceCodeGrant, DEVICE_CODE_GRANT_TYPE +from .models import DeviceCredentialMixin, DeviceCredentialDict +from .errors import AuthorizationPendingError, SlowDownError, ExpiredTokenError + + +__all__ = [ + 'DeviceAuthorizationEndpoint', + 'DeviceCodeGrant', 'DEVICE_CODE_GRANT_TYPE', + 'DeviceCredentialMixin', 'DeviceCredentialDict', + 'AuthorizationPendingError', 'SlowDownError', 'ExpiredTokenError', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a38856a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/device_code.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/device_code.cpython-312.pyc new file mode 100644 index 0000000..c1e5baa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/device_code.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/endpoint.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/endpoint.cpython-312.pyc new file mode 100644 index 0000000..0549312 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/endpoint.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..57e2309 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..82bf816 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/__pycache__/models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/device_code.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/device_code.py new file mode 100644 index 0000000..6820917 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/device_code.py @@ -0,0 +1,183 @@ +import logging +from ..rfc6749.errors import ( + InvalidRequestError, + UnauthorizedClientError, + AccessDeniedError, +) +from ..rfc6749 import BaseGrant, TokenEndpointMixin +from .errors import ( + AuthorizationPendingError, + ExpiredTokenError, + SlowDownError, +) + +log = logging.getLogger(__name__) +DEVICE_CODE_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:device_code' + + +class DeviceCodeGrant(BaseGrant, TokenEndpointMixin): + """This OAuth 2.0 [RFC6749] protocol extension enables OAuth clients to + request user authorization from applications on devices that have + limited input capabilities or lack a suitable browser. Such devices + include smart TVs, media consoles, picture frames, and printers, + which lack an easy input method or a suitable browser required for + traditional OAuth interactions. Here is the authorization flow:: + + +----------+ +----------------+ + | |>---(A)-- Client Identifier --->| | + | | | | + | |<---(B)-- Device Code, ---<| | + | | User Code, | | + | Device | & Verification URI | | + | Client | | | + | | [polling] | | + | |>---(E)-- Device Code --->| | + | | & Client Identifier | | + | | | Authorization | + | |<---(F)-- Access Token ---<| Server | + +----------+ (& Optional Refresh Token) | | + v | | + : | | + (C) User Code & Verification URI | | + : | | + v | | + +----------+ | | + | End User | | | + | at |<---(D)-- End user reviews --->| | + | Browser | authorization request | | + +----------+ +----------------+ + + This DeviceCodeGrant is the implementation of step (E) and (F). + + (E) While the end user reviews the client's request (step D), the + client repeatedly polls the authorization server to find out if + the user completed the user authorization step. The client + includes the device code and its client identifier. + + (F) The authorization server validates the device code provided by + the client and responds with the access token if the client is + granted access, an error if they are denied access, or an + indication that the client should continue to poll. + """ + GRANT_TYPE = DEVICE_CODE_GRANT_TYPE + TOKEN_ENDPOINT_AUTH_METHODS = ['client_secret_basic', 'client_secret_post', 'none'] + + def validate_token_request(self): + """After displaying instructions to the user, the client creates an + access token request and sends it to the token endpoint with the + following parameters: + + grant_type + REQUIRED. Value MUST be set to + "urn:ietf:params:oauth:grant-type:device_code". + + device_code + REQUIRED. The device verification code, "device_code" from the + device authorization response. + + client_id + REQUIRED if the client is not authenticating with the + authorization server as described in Section 3.2.1. of [RFC6749]. + The client identifier as described in Section 2.2 of [RFC6749]. + + For example, the client makes the following HTTPS request:: + + POST /token HTTP/1.1 + Host: server.example.com + Content-Type: application/x-www-form-urlencoded + + grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Adevice_code + &device_code=GmRhmhcxhwAzkoEqiMEg_DnyEysNkuNhszIySk9eS + &client_id=1406020730 + """ + device_code = self.request.data.get('device_code') + if not device_code: + raise InvalidRequestError('Missing "device_code" in payload') + + client = self.authenticate_token_endpoint_client() + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError() + + credential = self.query_device_credential(device_code) + if not credential: + raise InvalidRequestError('Invalid "device_code" in payload') + + if credential.get_client_id() != client.get_client_id(): + raise UnauthorizedClientError() + + user = self.validate_device_credential(credential) + self.request.user = user + self.request.client = client + self.request.credential = credential + + def create_token_response(self): + """If the access token request is valid and authorized, the + authorization server issues an access token and optional refresh + token. + """ + client = self.request.client + scope = self.request.credential.get_scope() + token = self.generate_token( + user=self.request.user, + scope=scope, + include_refresh_token=client.check_grant_type('refresh_token'), + ) + log.debug('Issue token %r to %r', token, client) + self.save_token(token) + self.execute_hook('process_token', token=token) + return 200, token, self.TOKEN_RESPONSE_HEADER + + def validate_device_credential(self, credential): + if credential.is_expired(): + raise ExpiredTokenError() + + user_code = credential.get_user_code() + user_grant = self.query_user_grant(user_code) + + if user_grant is not None: + user, approved = user_grant + if not approved: + raise AccessDeniedError() + return user + + if self.should_slow_down(credential): + raise SlowDownError() + + raise AuthorizationPendingError() + + def query_device_credential(self, device_code): + """Get device credential from previously savings via ``DeviceAuthorizationEndpoint``. + Developers MUST implement it in subclass:: + + def query_device_credential(self, device_code): + return DeviceCredential.get(device_code) + + :param device_code: a string represent the code. + :return: DeviceCredential instance + """ + raise NotImplementedError() + + def query_user_grant(self, user_code): + """Get user and grant via the given user code. Developers MUST + implement it in subclass:: + + def query_user_grant(self, user_code): + # e.g. we saved user grant info in redis + data = redis.get('oauth_user_grant:' + user_code) + if not data: + return None + + user_id, allowed = data.split() + user = User.get(user_id) + return user, bool(allowed) + + Note, user grant information is saved by verification endpoint. + """ + raise NotImplementedError() + + def should_slow_down(self, credential): + """The authorization request is still pending and polling should + continue, but the interval MUST be increased by 5 seconds for this + and all subsequent requests. + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/endpoint.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/endpoint.py new file mode 100644 index 0000000..49221f0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/endpoint.py @@ -0,0 +1,170 @@ +from authlib.consts import default_json_headers +from authlib.common.security import generate_token +from authlib.common.urls import add_params_to_uri + + +class DeviceAuthorizationEndpoint: + """This OAuth 2.0 [RFC6749] protocol extension enables OAuth clients to + request user authorization from applications on devices that have + limited input capabilities or lack a suitable browser. Such devices + include smart TVs, media consoles, picture frames, and printers, + which lack an easy input method or a suitable browser required for + traditional OAuth interactions. Here is the authorization flow:: + + +----------+ +----------------+ + | |>---(A)-- Client Identifier --->| | + | | | | + | |<---(B)-- Device Code, ---<| | + | | User Code, | | + | Device | & Verification URI | | + | Client | | | + | | [polling] | | + | |>---(E)-- Device Code --->| | + | | & Client Identifier | | + | | | Authorization | + | |<---(F)-- Access Token ---<| Server | + +----------+ (& Optional Refresh Token) | | + v | | + : | | + (C) User Code & Verification URI | | + : | | + v | | + +----------+ | | + | End User | | | + | at |<---(D)-- End user reviews --->| | + | Browser | authorization request | | + +----------+ +----------------+ + + This DeviceAuthorizationEndpoint is the implementation of step (A) and (B). + + (A) The client requests access from the authorization server and + includes its client identifier in the request. + + (B) The authorization server issues a device code and an end-user + code and provides the end-user verification URI. + """ + + ENDPOINT_NAME = 'device_authorization' + CLIENT_AUTH_METHODS = ['client_secret_basic', 'client_secret_post', 'none'] + + #: customize "user_code" type, string or digital + USER_CODE_TYPE = 'string' + + #: The lifetime in seconds of the "device_code" and "user_code" + EXPIRES_IN = 1800 + + #: The minimum amount of time in seconds that the client SHOULD + #: wait between polling requests to the token endpoint. + INTERVAL = 5 + + def __init__(self, server): + self.server = server + + def __call__(self, request): + # make it callable for authorization server + # ``create_endpoint_response`` + return self.create_endpoint_response(request) + + def create_endpoint_request(self, request): + return self.server.create_oauth2_request(request) + + def authenticate_client(self, request): + """client_id is REQUIRED **if the client is not** authenticating with the + authorization server as described in Section 3.2.1. of [RFC6749]. + + This means the endpoint support "none" authentication method. In this case, + this endpoint's auth methods are: + + - client_secret_basic + - client_secret_post + - none + + Developers change the value of ``CLIENT_AUTH_METHODS`` in subclass. For + instance:: + + class MyDeviceAuthorizationEndpoint(DeviceAuthorizationEndpoint): + # only support ``client_secret_basic`` auth method + CLIENT_AUTH_METHODS = ['client_secret_basic'] + """ + client = self.server.authenticate_client( + request, self.CLIENT_AUTH_METHODS, self.ENDPOINT_NAME) + request.client = client + return client + + def create_endpoint_response(self, request): + # https://tools.ietf.org/html/rfc8628#section-3.1 + + self.authenticate_client(request) + self.server.validate_requested_scope(request.scope) + + device_code = self.generate_device_code() + user_code = self.generate_user_code() + verification_uri = self.get_verification_uri() + verification_uri_complete = add_params_to_uri( + verification_uri, [('user_code', user_code)]) + + data = { + 'device_code': device_code, + 'user_code': user_code, + 'verification_uri': verification_uri, + 'verification_uri_complete': verification_uri_complete, + 'expires_in': self.EXPIRES_IN, + 'interval': self.INTERVAL, + } + + self.save_device_credential(request.client_id, request.scope, data) + return 200, data, default_json_headers + + def generate_user_code(self): + """A method to generate ``user_code`` value for device authorization + endpoint. This method will generate a random string like MQNA-JPOZ. + Developers can rewrite this method to create their own ``user_code``. + """ + # https://tools.ietf.org/html/rfc8628#section-6.1 + if self.USER_CODE_TYPE == 'digital': + return create_digital_user_code() + return create_string_user_code() + + def generate_device_code(self): + """A method to generate ``device_code`` value for device authorization + endpoint. This method will generate a random string of 42 characters. + Developers can rewrite this method to create their own ``device_code``. + """ + return generate_token(42) + + def get_verification_uri(self): + """Define the ``verification_uri`` of device authorization endpoint. + Developers MUST implement this method in subclass:: + + def get_verification_uri(self): + return 'https://your-company.com/active' + """ + raise NotImplementedError() + + def save_device_credential(self, client_id, scope, data): + """Save device token into database for later use. Developers MUST + implement this method in subclass:: + + def save_device_credential(self, client_id, scope, data): + item = DeviceCredential( + client_id=client_id, + scope=scope, + **data + ) + item.save() + """ + raise NotImplementedError() + + +def create_string_user_code(): + base = 'BCDFGHJKLMNPQRSTVWXZ' + return '-'.join([generate_token(4, base), generate_token(4, base)]) + + +def create_digital_user_code(): + base = '0123456789' + return '-'.join([ + generate_token(3, base), + generate_token(3, base), + generate_token(3, base), + ]) diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/errors.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/errors.py new file mode 100644 index 0000000..4a63db8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/errors.py @@ -0,0 +1,27 @@ +from ..rfc6749.errors import OAuth2Error + +# https://tools.ietf.org/html/rfc8628#section-3.5 + + +class AuthorizationPendingError(OAuth2Error): + """The authorization request is still pending as the end user hasn't + yet completed the user-interaction steps (Section 3.3). + """ + error = 'authorization_pending' + + +class SlowDownError(OAuth2Error): + """A variant of "authorization_pending", the authorization request is + still pending and polling should continue, but the interval MUST + be increased by 5 seconds for this and all subsequent requests. + """ + error = 'slow_down' + + +class ExpiredTokenError(OAuth2Error): + """The "device_code" has expired, and the device authorization + session has concluded. The client MAY commence a new device + authorization request but SHOULD wait for user interaction before + restarting to avoid unnecessary polling. + """ + error = 'expired_token' diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/models.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/models.py new file mode 100644 index 0000000..39eb9a1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8628/models.py @@ -0,0 +1,38 @@ +import time + + +class DeviceCredentialMixin: + def get_client_id(self): + raise NotImplementedError() + + def get_scope(self): + raise NotImplementedError() + + def get_user_code(self): + raise NotImplementedError() + + def is_expired(self): + raise NotImplementedError() + + +class DeviceCredentialDict(dict, DeviceCredentialMixin): + def get_client_id(self): + return self['client_id'] + + def get_scope(self): + return self.get('scope') + + def get_user_code(self): + return self['user_code'] + + def get_nonce(self): + return self.get('nonce') + + def get_auth_time(self): + return self.get('auth_time') + + def is_expired(self): + expires_at = self.get('expires_at') + if expires_at: + return expires_at < time.time() + return False diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8693/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8693/__init__.py new file mode 100644 index 0000000..1a74f85 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8693/__init__.py @@ -0,0 +1,9 @@ +""" + authlib.oauth2.rfc8693 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents an implementation of + OAuth 2.0 Token Exchange. + + https://tools.ietf.org/html/rfc8693 +""" diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8693/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8693/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..ac20aca Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc8693/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__init__.py new file mode 100644 index 0000000..b914509 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__init__.py @@ -0,0 +1,11 @@ +from .introspection import JWTIntrospectionEndpoint +from .revocation import JWTRevocationEndpoint +from .token import JWTBearerTokenGenerator +from .token_validator import JWTBearerTokenValidator + +__all__ = [ + 'JWTBearerTokenGenerator', + 'JWTBearerTokenValidator', + 'JWTIntrospectionEndpoint', + 'JWTRevocationEndpoint', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1cccb22 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/claims.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/claims.cpython-312.pyc new file mode 100644 index 0000000..7a0e894 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/claims.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/introspection.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/introspection.cpython-312.pyc new file mode 100644 index 0000000..9bee9e5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/introspection.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/revocation.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/revocation.cpython-312.pyc new file mode 100644 index 0000000..c1eec5f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/revocation.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/token.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/token.cpython-312.pyc new file mode 100644 index 0000000..39ec41f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/token.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/token_validator.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/token_validator.cpython-312.pyc new file mode 100644 index 0000000..c2d8d60 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/__pycache__/token_validator.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/claims.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/claims.py new file mode 100644 index 0000000..4dcfea8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/claims.py @@ -0,0 +1,62 @@ +from authlib.jose.errors import InvalidClaimError +from authlib.jose.rfc7519 import JWTClaims + + +class JWTAccessTokenClaims(JWTClaims): + REGISTERED_CLAIMS = JWTClaims.REGISTERED_CLAIMS + [ + 'client_id', + 'auth_time', + 'acr', + 'amr', + 'scope', + 'groups', + 'roles', + 'entitlements', + ] + + def validate(self, **kwargs): + self.validate_typ() + + super().validate(**kwargs) + self.validate_client_id() + self.validate_auth_time() + self.validate_acr() + self.validate_amr() + self.validate_scope() + self.validate_groups() + self.validate_roles() + self.validate_entitlements() + + def validate_typ(self): + # The resource server MUST verify that the 'typ' header value is 'at+jwt' + # or 'application/at+jwt' and reject tokens carrying any other value. + if self.header['typ'].lower() not in ('at+jwt', 'application/at+jwt'): + raise InvalidClaimError('typ') + + def validate_client_id(self): + return self._validate_claim_value('client_id') + + def validate_auth_time(self): + auth_time = self.get('auth_time') + if auth_time and not isinstance(auth_time, (int, float)): + raise InvalidClaimError('auth_time') + + def validate_acr(self): + return self._validate_claim_value('acr') + + def validate_amr(self): + amr = self.get('amr') + if amr and not isinstance(self['amr'], list): + raise InvalidClaimError('amr') + + def validate_scope(self): + return self._validate_claim_value('scope') + + def validate_groups(self): + return self._validate_claim_value('groups') + + def validate_roles(self): + return self._validate_claim_value('roles') + + def validate_entitlements(self): + return self._validate_claim_value('entitlements') diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/introspection.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/introspection.py new file mode 100644 index 0000000..17b5eb5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/introspection.py @@ -0,0 +1,126 @@ +from ..rfc7662 import IntrospectionEndpoint +from authlib.common.errors import ContinueIteration +from authlib.consts import default_json_headers +from authlib.jose.errors import ExpiredTokenError +from authlib.jose.errors import InvalidClaimError +from authlib.oauth2.rfc6750.errors import InvalidTokenError +from authlib.oauth2.rfc9068.token_validator import JWTBearerTokenValidator + + +class JWTIntrospectionEndpoint(IntrospectionEndpoint): + ''' + JWTIntrospectionEndpoint inherits from :ref:`specs/rfc7662` + :class:`~authlib.oauth2.rfc7662.IntrospectionEndpoint` and implements the machinery + to automatically process the JWT access tokens. + + :param issuer: The issuer identifier for which tokens will be introspected. + + :param \\*\\*kwargs: Other parameters are inherited from + :class:`~authlib.oauth2.rfc7662.introspection.IntrospectionEndpoint`. + + :: + + class MyJWTAccessTokenIntrospectionEndpoint(JWTRevocationEndpoint): + def get_jwks(self): + ... + + def get_username(self, user_id): + ... + + authorization_server.register_endpoint( + MyJWTAccessTokenIntrospectionEndpoint( + issuer="https://authorization-server.example.org", + ) + ) + authorization_server.register_endpoint(MyRefreshTokenIntrospectionEndpoint) + + ''' + + #: Endpoint name to be registered + ENDPOINT_NAME = 'introspection' + + def __init__(self, issuer, server=None, *args, **kwargs): + super().__init__(*args, server=server, **kwargs) + self.issuer = issuer + + def create_endpoint_response(self, request): + '''''' + # The authorization server first validates the client credentials + client = self.authenticate_endpoint_client(request) + + # then verifies whether the token was issued to the client making + # the revocation request + token = self.authenticate_token(request, client) + + # the authorization server invalidates the token + body = self.create_introspection_payload(token) + return 200, body, default_json_headers + + def authenticate_token(self, request, client): + '''''' + self.check_params(request, client) + + # do not attempt to decode refresh_tokens + if request.form.get('token_type_hint') not in ('access_token', None): + raise ContinueIteration() + + validator = JWTBearerTokenValidator(issuer=self.issuer, resource_server=None) + validator.get_jwks = self.get_jwks + try: + token = validator.authenticate_token(request.form['token']) + + # if the token is not a JWT, fall back to the regular flow + except InvalidTokenError: + raise ContinueIteration() + + if token and self.check_permission(token, client, request): + return token + + def create_introspection_payload(self, token): + if not token: + return {'active': False} + + try: + token.validate() + except ExpiredTokenError: + return {'active': False} + except InvalidClaimError as exc: + if exc.claim_name == 'iss': + raise ContinueIteration() + raise InvalidTokenError() + + + payload = { + 'active': True, + 'token_type': 'Bearer', + 'client_id': token['client_id'], + 'scope': token['scope'], + 'sub': token['sub'], + 'aud': token['aud'], + 'iss': token['iss'], + 'exp': token['exp'], + 'iat': token['iat'], + } + + if username := self.get_username(token['sub']): + payload['username'] = username + + return payload + + def get_jwks(self): + '''Return the JWKs that will be used to check the JWT access token signature. + Developers MUST re-implement this method:: + + def get_jwks(self): + return load_jwks("jwks.json") + ''' + raise NotImplementedError() + + def get_username(self, user_id: str) -> str: + '''Returns an username from a user ID. + Developers MAY re-implement this method:: + + def get_username(self, user_id): + return User.get(id=user_id).username + ''' + return None diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/revocation.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/revocation.py new file mode 100644 index 0000000..9453c79 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/revocation.py @@ -0,0 +1,70 @@ +from ..rfc6749 import UnsupportedTokenTypeError +from ..rfc7009 import RevocationEndpoint +from authlib.common.errors import ContinueIteration +from authlib.oauth2.rfc6750.errors import InvalidTokenError +from authlib.oauth2.rfc9068.token_validator import JWTBearerTokenValidator + + +class JWTRevocationEndpoint(RevocationEndpoint): + '''JWTRevocationEndpoint inherits from `RFC7009`_ + :class:`~authlib.oauth2.rfc7009.RevocationEndpoint`. + + The JWT access tokens cannot be revoked. + If the submitted token is a JWT access token, then revocation returns + a `invalid_token_error`. + + :param issuer: The issuer identifier. + + :param \\*\\*kwargs: Other parameters are inherited from + :class:`~authlib.oauth2.rfc7009.RevocationEndpoint`. + + Plain text access tokens and other kind of tokens such as refresh_tokens + will be ignored by this endpoint and passed to the next revocation endpoint:: + + class MyJWTAccessTokenRevocationEndpoint(JWTRevocationEndpoint): + def get_jwks(self): + ... + + authorization_server.register_endpoint( + MyJWTAccessTokenRevocationEndpoint( + issuer="https://authorization-server.example.org", + ) + ) + authorization_server.register_endpoint(MyRefreshTokenRevocationEndpoint) + + .. _RFC7009: https://tools.ietf.org/html/rfc7009 + ''' + + def __init__(self, issuer, server=None, *args, **kwargs): + super().__init__(*args, server=server, **kwargs) + self.issuer = issuer + + def authenticate_token(self, request, client): + '''''' + self.check_params(request, client) + + # do not attempt to revoke refresh_tokens + if request.form.get('token_type_hint') not in ('access_token', None): + raise ContinueIteration() + + validator = JWTBearerTokenValidator(issuer=self.issuer, resource_server=None) + validator.get_jwks = self.get_jwks + + try: + validator.authenticate_token(request.form['token']) + + # if the token is not a JWT, fall back to the regular flow + except InvalidTokenError: + raise ContinueIteration() + + # JWT access token cannot be revoked + raise UnsupportedTokenTypeError() + + def get_jwks(self): + '''Return the JWKs that will be used to check the JWT access token signature. + Developers MUST re-implement this method:: + + def get_jwks(self): + return load_jwks("jwks.json") + ''' + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/token.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/token.py new file mode 100644 index 0000000..6751b88 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/token.py @@ -0,0 +1,218 @@ +import time +from typing import List +from typing import Optional +from typing import Union + +from authlib.common.security import generate_token +from authlib.jose import jwt +from authlib.oauth2.rfc6750.token import BearerTokenGenerator + + +class JWTBearerTokenGenerator(BearerTokenGenerator): + '''A JWT formatted access token generator. + + :param issuer: The issuer identifier. Will appear in the JWT ``iss`` claim. + + :param \\*\\*kwargs: Other parameters are inherited from + :class:`~authlib.oauth2.rfc6750.token.BearerTokenGenerator`. + + This token generator can be registered into the authorization server:: + + class MyJWTBearerTokenGenerator(JWTBearerTokenGenerator): + def get_jwks(self): + ... + + def get_extra_claims(self, client, grant_type, user, scope): + ... + + authorization_server.register_token_generator( + 'default', + MyJWTBearerTokenGenerator(issuer='https://authorization-server.example.org'), + ) + ''' + + def __init__( + self, + issuer, + alg='RS256', + refresh_token_generator=None, + expires_generator=None, + ): + super().__init__( + self.access_token_generator, refresh_token_generator, expires_generator + ) + self.issuer = issuer + self.alg = alg + + def get_jwks(self): + '''Return the JWKs that will be used to sign the JWT access token. + Developers MUST re-implement this method:: + + def get_jwks(self): + return load_jwks("jwks.json") + ''' + raise NotImplementedError() + + def get_extra_claims(self, client, grant_type, user, scope): + '''Return extra claims to add in the JWT access token. Developers MAY + re-implement this method to add identity claims like the ones in + :ref:`specs/oidc` ID Token, or any other arbitrary claims:: + + def get_extra_claims(self, client, grant_type, user, scope): + return generate_user_info(user, scope) + ''' + return {} + + def get_audiences(self, client, user, scope) -> Union[str, List[str]]: + '''Return the audience for the token. By default this simply returns + the client ID. Developpers MAY re-implement this method to add extra + audiences:: + + def get_audiences(self, client, user, scope): + return [ + client.get_client_id(), + resource_server.get_id(), + ] + ''' + return client.get_client_id() + + def get_acr(self, user) -> Optional[str]: + '''Authentication Context Class Reference. + Returns a user-defined case sensitive string indicating the class of + authentication the used performed. Token audience may refuse to give access to + some resources if some ACR criterias are not met. + :ref:`specs/oidc` defines one special value: ``0`` means that the user + authentication did not respect `ISO29115`_ level 1, and will be refused monetary + operations. Developers MAY re-implement this method:: + + def get_acr(self, user): + if user.insecure_session(): + return '0' + return 'urn:mace:incommon:iap:silver' + + .. _ISO29115: https://www.iso.org/standard/45138.html + ''' + return None + + def get_auth_time(self, user) -> Optional[int]: + '''User authentication time. + Time when the End-User authentication occurred. Its value is a JSON number + representing the number of seconds from 1970-01-01T0:0:0Z as measured in UTC + until the date/time. Developers MAY re-implement this method:: + + def get_auth_time(self, user): + return datetime.timestamp(user.get_auth_time()) + ''' + return None + + def get_amr(self, user) -> Optional[List[str]]: + '''Authentication Methods References. + Defined by :ref:`specs/oidc` as an option list of user-defined case-sensitive + strings indication which authentication methods have been used to authenticate + the user. Developers MAY re-implement this method:: + + def get_amr(self, user): + return ['2FA'] if user.has_2fa_enabled() else [] + ''' + return None + + def get_jti(self, client, grant_type, user, scope) -> str: + '''JWT ID. + Create an unique identifier for the token. Developers MAY re-implement + this method:: + + def get_jti(self, client, grant_type, user scope): + return generate_random_string(16) + ''' + return generate_token(16) + + def access_token_generator(self, client, grant_type, user, scope): + now = int(time.time()) + expires_in = now + self._get_expires_in(client, grant_type) + + token_data = { + 'iss': self.issuer, + 'exp': expires_in, + 'client_id': client.get_client_id(), + 'iat': now, + 'jti': self.get_jti(client, grant_type, user, scope), + 'scope': scope, + } + + # In cases of access tokens obtained through grants where a resource owner is + # involved, such as the authorization code grant, the value of 'sub' SHOULD + # correspond to the subject identifier of the resource owner. + + if user: + token_data['sub'] = user.get_user_id() + + # In cases of access tokens obtained through grants where no resource owner is + # involved, such as the client credentials grant, the value of 'sub' SHOULD + # correspond to an identifier the authorization server uses to indicate the + # client application. + + else: + token_data['sub'] = client.get_client_id() + + # If the request includes a 'resource' parameter (as defined in [RFC8707]), the + # resulting JWT access token 'aud' claim SHOULD have the same value as the + # 'resource' parameter in the request. + + # TODO: Implement this with RFC8707 + if False: # pragma: no cover + ... + + # If the request does not include a 'resource' parameter, the authorization + # server MUST use a default resource indicator in the 'aud' claim. If a 'scope' + # parameter is present in the request, the authorization server SHOULD use it to + # infer the value of the default resource indicator to be used in the 'aud' + # claim. The mechanism through which scopes are associated with default resource + # indicator values is outside the scope of this specification. + + else: + token_data['aud'] = self.get_audiences(client, user, scope) + + # If the values in the 'scope' parameter refer to different default resource + # indicator values, the authorization server SHOULD reject the request with + # 'invalid_scope' as described in Section 4.1.2.1 of [RFC6749]. + # TODO: Implement this with RFC8707 + + if auth_time := self.get_auth_time(user): + token_data['auth_time'] = auth_time + + # The meaning and processing of acr Claim Values is out of scope for this + # specification. + + if acr := self.get_acr(user): + token_data['acr'] = acr + + # The definition of particular values to be used in the amr Claim is beyond the + # scope of this specification. + + if amr := self.get_amr(user): + token_data['amr'] = amr + + # Authorization servers MAY return arbitrary attributes not defined in any + # existing specification, as long as the corresponding claim names are collision + # resistant or the access tokens are meant to be used only within a private + # subsystem. Please refer to Sections 4.2 and 4.3 of [RFC7519] for details. + + token_data.update(self.get_extra_claims(client, grant_type, user, scope)) + + # This specification registers the 'application/at+jwt' media type, which can + # be used to indicate that the content is a JWT access token. JWT access tokens + # MUST include this media type in the 'typ' header parameter to explicitly + # declare that the JWT represents an access token complying with this profile. + # Per the definition of 'typ' in Section 4.1.9 of [RFC7515], it is RECOMMENDED + # that the 'application/' prefix be omitted. Therefore, the 'typ' value used + # SHOULD be 'at+jwt'. + + header = {'alg': self.alg, 'typ': 'at+jwt'} + + access_token = jwt.encode( + header, + token_data, + key=self.get_jwks(), + check=False, + ) + return access_token.decode() diff --git a/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/token_validator.py b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/token_validator.py new file mode 100644 index 0000000..dc152e2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oauth2/rfc9068/token_validator.py @@ -0,0 +1,163 @@ +''' + authlib.oauth2.rfc9068.token_validator + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Implementation of Validating JWT Access Tokens per `Section 4`_. + + .. _`Section 7`: https://www.rfc-editor.org/rfc/rfc9068.html#name-validating-jwt-access-token +''' +from authlib.jose import jwt +from authlib.jose.errors import DecodeError +from authlib.jose.errors import JoseError +from authlib.oauth2.rfc6750.errors import InsufficientScopeError +from authlib.oauth2.rfc6750.errors import InvalidTokenError +from authlib.oauth2.rfc6750.validator import BearerTokenValidator +from .claims import JWTAccessTokenClaims + + +class JWTBearerTokenValidator(BearerTokenValidator): + '''JWTBearerTokenValidator can protect your resource server endpoints. + + :param issuer: The issuer from which tokens will be accepted. + :param resource_server: An identifier for the current resource server, + which must appear in the JWT ``aud`` claim. + + Developers needs to implement the missing methods:: + + class MyJWTBearerTokenValidator(JWTBearerTokenValidator): + def get_jwks(self): + ... + + require_oauth = ResourceProtector() + require_oauth.register_token_validator( + MyJWTBearerTokenValidator( + issuer='https://authorization-server.example.org', + resource_server='https://resource-server.example.org', + ) + ) + + You can then protect resources depending on the JWT `scope`, `groups`, + `roles` or `entitlements` claims:: + + @require_oauth( + scope='profile', + groups='admins', + roles='student', + entitlements='captain', + ) + def resource_endpoint(): + ... + ''' + + def __init__(self, issuer, resource_server, *args, **kwargs): + self.issuer = issuer + self.resource_server = resource_server + super().__init__(*args, **kwargs) + + def get_jwks(self): + '''Return the JWKs that will be used to check the JWT access token signature. + Developers MUST re-implement this method. Typically the JWKs are statically + stored in the resource server configuration, or dynamically downloaded and + cached using :ref:`specs/rfc8414`:: + + def get_jwks(self): + if 'jwks' in cache: + return cache.get('jwks') + + server_metadata = get_server_metadata(self.issuer) + jwks_uri = server_metadata.get('jwks_uri') + cache['jwks'] = requests.get(jwks_uri).json() + return cache['jwks'] + ''' + raise NotImplementedError() + + def validate_iss(self, claims, iss: 'str') -> bool: + # The issuer identifier for the authorization server (which is typically + # obtained during discovery) MUST exactly match the value of the 'iss' + # claim. + return iss == self.issuer + + def authenticate_token(self, token_string): + '''''' + # empty docstring avoids to display the irrelevant parent docstring + + claims_options = { + 'iss': {'essential': True, 'validate': self.validate_iss}, + 'exp': {'essential': True}, + 'aud': {'essential': True, 'value': self.resource_server}, + 'sub': {'essential': True}, + 'client_id': {'essential': True}, + 'iat': {'essential': True}, + 'jti': {'essential': True}, + 'auth_time': {'essential': False}, + 'acr': {'essential': False}, + 'amr': {'essential': False}, + 'scope': {'essential': False}, + 'groups': {'essential': False}, + 'roles': {'essential': False}, + 'entitlements': {'essential': False}, + } + jwks = self.get_jwks() + + # If the JWT access token is encrypted, decrypt it using the keys and algorithms + # that the resource server specified during registration. If encryption was + # negotiated with the authorization server at registration time and the incoming + # JWT access token is not encrypted, the resource server SHOULD reject it. + + # The resource server MUST validate the signature of all incoming JWT access + # tokens according to [RFC7515] using the algorithm specified in the JWT 'alg' + # Header Parameter. The resource server MUST reject any JWT in which the value + # of 'alg' is 'none'. The resource server MUST use the keys provided by the + # authorization server. + try: + return jwt.decode( + token_string, + key=jwks, + claims_cls=JWTAccessTokenClaims, + claims_options=claims_options, + ) + except DecodeError: + raise InvalidTokenError( + realm=self.realm, extra_attributes=self.extra_attributes + ) + + def validate_token( + self, token, scopes, request, groups=None, roles=None, entitlements=None + ): + '''''' + # empty docstring avoids to display the irrelevant parent docstring + try: + token.validate() + except JoseError as exc: + raise InvalidTokenError( + realm=self.realm, extra_attributes=self.extra_attributes + ) from exc + + # If an authorization request includes a scope parameter, the corresponding + # issued JWT access token SHOULD include a 'scope' claim as defined in Section + # 4.2 of [RFC8693]. All the individual scope strings in the 'scope' claim MUST + # have meaning for the resources indicated in the 'aud' claim. See Section 5 for + # more considerations about the relationship between scope strings and resources + # indicated by the 'aud' claim. + + if self.scope_insufficient(token.get('scope', []), scopes): + raise InsufficientScopeError() + + # Many authorization servers embed authorization attributes that go beyond the + # delegated scenarios described by [RFC7519] in the access tokens they issue. + # Typical examples include resource owner memberships in roles and groups that + # are relevant to the resource being accessed, entitlements assigned to the + # resource owner for the targeted resource that the authorization server knows + # about, and so on. An authorization server wanting to include such attributes + # in a JWT access token SHOULD use the 'groups', 'roles', and 'entitlements' + # attributes of the 'User' resource schema defined by Section 4.1.2 of + # [RFC7643]) as claim types. + + if self.scope_insufficient(token.get('groups'), groups): + raise InvalidTokenError() + + if self.scope_insufficient(token.get('roles'), roles): + raise InvalidTokenError() + + if self.scope_insufficient(token.get('entitlements'), entitlements): + raise InvalidTokenError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oidc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3366810 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__init__.py new file mode 100644 index 0000000..212ebc0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__init__.py @@ -0,0 +1,23 @@ +""" + authlib.oidc.core + ~~~~~~~~~~~~~~~~~ + + OpenID Connect Core 1.0 Implementation. + + http://openid.net/specs/openid-connect-core-1_0.html +""" + +from .models import AuthorizationCodeMixin +from .claims import ( + IDToken, CodeIDToken, ImplicitIDToken, HybridIDToken, + UserInfo, get_claim_cls_by_response_type, +) +from .grants import OpenIDToken, OpenIDCode, OpenIDHybridGrant, OpenIDImplicitGrant + + +__all__ = [ + 'AuthorizationCodeMixin', + 'IDToken', 'CodeIDToken', 'ImplicitIDToken', 'HybridIDToken', + 'UserInfo', 'get_claim_cls_by_response_type', + 'OpenIDToken', 'OpenIDCode', 'OpenIDHybridGrant', 'OpenIDImplicitGrant', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..ed5efb8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/claims.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/claims.cpython-312.pyc new file mode 100644 index 0000000..ddc5ae9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/claims.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..28bbd0f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..89680d4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/util.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..06d06ba Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/core/__pycache__/util.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/claims.py b/myenv/lib/python3.12/site-packages/authlib/oidc/core/claims.py new file mode 100644 index 0000000..f867458 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/core/claims.py @@ -0,0 +1,242 @@ +import time +import hmac +from authlib.common.encoding import to_bytes +from authlib.jose import JWTClaims +from authlib.jose.errors import ( + MissingClaimError, + InvalidClaimError, +) +from .util import create_half_hash + +__all__ = [ + 'IDToken', 'CodeIDToken', 'ImplicitIDToken', 'HybridIDToken', + 'UserInfo', 'get_claim_cls_by_response_type' +] + +_REGISTERED_CLAIMS = [ + 'iss', 'sub', 'aud', 'exp', 'nbf', 'iat', + 'auth_time', 'nonce', 'acr', 'amr', 'azp', + 'at_hash', +] + + +class IDToken(JWTClaims): + ESSENTIAL_CLAIMS = ['iss', 'sub', 'aud', 'exp', 'iat'] + + def validate(self, now=None, leeway=0): + for k in self.ESSENTIAL_CLAIMS: + if k not in self: + raise MissingClaimError(k) + + self._validate_essential_claims() + if now is None: + now = int(time.time()) + + self.validate_iss() + self.validate_sub() + self.validate_aud() + self.validate_exp(now, leeway) + self.validate_nbf(now, leeway) + self.validate_iat(now, leeway) + self.validate_auth_time() + self.validate_nonce() + self.validate_acr() + self.validate_amr() + self.validate_azp() + self.validate_at_hash() + + def validate_auth_time(self): + """Time when the End-User authentication occurred. Its value is a JSON + number representing the number of seconds from 1970-01-01T0:0:0Z as + measured in UTC until the date/time. When a max_age request is made or + when auth_time is requested as an Essential Claim, then this Claim is + REQUIRED; otherwise, its inclusion is OPTIONAL. + """ + auth_time = self.get('auth_time') + if self.params.get('max_age') and not auth_time: + raise MissingClaimError('auth_time') + + if auth_time and not isinstance(auth_time, (int, float)): + raise InvalidClaimError('auth_time') + + def validate_nonce(self): + """String value used to associate a Client session with an ID Token, + and to mitigate replay attacks. The value is passed through unmodified + from the Authentication Request to the ID Token. If present in the ID + Token, Clients MUST verify that the nonce Claim Value is equal to the + value of the nonce parameter sent in the Authentication Request. If + present in the Authentication Request, Authorization Servers MUST + include a nonce Claim in the ID Token with the Claim Value being the + nonce value sent in the Authentication Request. Authorization Servers + SHOULD perform no other processing on nonce values used. The nonce + value is a case sensitive string. + """ + nonce_value = self.params.get('nonce') + if nonce_value: + if 'nonce' not in self: + raise MissingClaimError('nonce') + if nonce_value != self['nonce']: + raise InvalidClaimError('nonce') + + def validate_acr(self): + """OPTIONAL. Authentication Context Class Reference. String specifying + an Authentication Context Class Reference value that identifies the + Authentication Context Class that the authentication performed + satisfied. The value "0" indicates the End-User authentication did not + meet the requirements of `ISO/IEC 29115`_ level 1. Authentication + using a long-lived browser cookie, for instance, is one example where + the use of "level 0" is appropriate. Authentications with level 0 + SHOULD NOT be used to authorize access to any resource of any monetary + value. An absolute URI or an `RFC 6711`_ registered name SHOULD be + used as the acr value; registered names MUST NOT be used with a + different meaning than that which is registered. Parties using this + claim will need to agree upon the meanings of the values used, which + may be context-specific. The acr value is a case sensitive string. + + .. _`ISO/IEC 29115`: https://www.iso.org/standard/45138.html + .. _`RFC 6711`: https://tools.ietf.org/html/rfc6711 + """ + return self._validate_claim_value('acr') + + def validate_amr(self): + """OPTIONAL. Authentication Methods References. JSON array of strings + that are identifiers for authentication methods used in the + authentication. For instance, values might indicate that both password + and OTP authentication methods were used. The definition of particular + values to be used in the amr Claim is beyond the scope of this + specification. Parties using this claim will need to agree upon the + meanings of the values used, which may be context-specific. The amr + value is an array of case sensitive strings. + """ + amr = self.get('amr') + if amr and not isinstance(self['amr'], list): + raise InvalidClaimError('amr') + + def validate_azp(self): + """OPTIONAL. Authorized party - the party to which the ID Token was + issued. If present, it MUST contain the OAuth 2.0 Client ID of this + party. This Claim is only needed when the ID Token has a single + audience value and that audience is different than the authorized + party. It MAY be included even when the authorized party is the same + as the sole audience. The azp value is a case sensitive string + containing a StringOrURI value. + """ + aud = self.get('aud') + client_id = self.params.get('client_id') + required = False + if aud and client_id: + if isinstance(aud, list) and len(aud) == 1: + aud = aud[0] + if aud != client_id: + required = True + + azp = self.get('azp') + if required and not azp: + raise MissingClaimError('azp') + + if azp and client_id and azp != client_id: + raise InvalidClaimError('azp') + + def validate_at_hash(self): + """OPTIONAL. Access Token hash value. Its value is the base64url + encoding of the left-most half of the hash of the octets of the ASCII + representation of the access_token value, where the hash algorithm + used is the hash algorithm used in the alg Header Parameter of the + ID Token's JOSE Header. For instance, if the alg is RS256, hash the + access_token value with SHA-256, then take the left-most 128 bits and + base64url encode them. The at_hash value is a case sensitive string. + """ + access_token = self.params.get('access_token') + at_hash = self.get('at_hash') + if at_hash and access_token: + if not _verify_hash(at_hash, access_token, self.header['alg']): + raise InvalidClaimError('at_hash') + + +class CodeIDToken(IDToken): + RESPONSE_TYPES = ('code',) + REGISTERED_CLAIMS = _REGISTERED_CLAIMS + + +class ImplicitIDToken(IDToken): + RESPONSE_TYPES = ('id_token', 'id_token token') + ESSENTIAL_CLAIMS = ['iss', 'sub', 'aud', 'exp', 'iat', 'nonce'] + REGISTERED_CLAIMS = _REGISTERED_CLAIMS + + def validate_at_hash(self): + """If the ID Token is issued from the Authorization Endpoint with an + access_token value, which is the case for the response_type value + id_token token, this is REQUIRED; it MAY NOT be used when no Access + Token is issued, which is the case for the response_type value + id_token. + """ + access_token = self.params.get('access_token') + if access_token and 'at_hash' not in self: + raise MissingClaimError('at_hash') + super().validate_at_hash() + + +class HybridIDToken(ImplicitIDToken): + RESPONSE_TYPES = ('code id_token', 'code token', 'code id_token token') + REGISTERED_CLAIMS = _REGISTERED_CLAIMS + ['c_hash'] + + def validate(self, now=None, leeway=0): + super().validate(now=now, leeway=leeway) + self.validate_c_hash() + + def validate_c_hash(self): + """Code hash value. Its value is the base64url encoding of the + left-most half of the hash of the octets of the ASCII representation + of the code value, where the hash algorithm used is the hash algorithm + used in the alg Header Parameter of the ID Token's JOSE Header. For + instance, if the alg is HS512, hash the code value with SHA-512, then + take the left-most 256 bits and base64url encode them. The c_hash + value is a case sensitive string. + If the ID Token is issued from the Authorization Endpoint with a code, + which is the case for the response_type values code id_token and code + id_token token, this is REQUIRED; otherwise, its inclusion is OPTIONAL. + """ + code = self.params.get('code') + c_hash = self.get('c_hash') + if code: + if not c_hash: + raise MissingClaimError('c_hash') + if not _verify_hash(c_hash, code, self.header['alg']): + raise InvalidClaimError('c_hash') + + +class UserInfo(dict): + """The standard claims of a UserInfo object. Defined per `Section 5.1`_. + + .. _`Section 5.1`: http://openid.net/specs/openid-connect-core-1_0.html#StandardClaims + """ + + #: registered claims that UserInfo supports + REGISTERED_CLAIMS = [ + 'sub', 'name', 'given_name', 'family_name', 'middle_name', 'nickname', + 'preferred_username', 'profile', 'picture', 'website', 'email', + 'email_verified', 'gender', 'birthdate', 'zoneinfo', 'locale', + 'phone_number', 'phone_number_verified', 'address', 'updated_at', + ] + + def __getattr__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError as error: + if key in self.REGISTERED_CLAIMS: + return self.get(key) + raise error + + +def get_claim_cls_by_response_type(response_type): + claims_classes = (CodeIDToken, ImplicitIDToken, HybridIDToken) + for claims_cls in claims_classes: + if response_type in claims_cls.RESPONSE_TYPES: + return claims_cls + + +def _verify_hash(signature, s, alg): + hash_value = create_half_hash(s, alg) + if not hash_value: + return True + return hmac.compare_digest(hash_value, to_bytes(signature)) diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/errors.py b/myenv/lib/python3.12/site-packages/authlib/oidc/core/errors.py new file mode 100644 index 0000000..e5fb630 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/core/errors.py @@ -0,0 +1,78 @@ +from authlib.oauth2 import OAuth2Error + + +class InteractionRequiredError(OAuth2Error): + """The Authorization Server requires End-User interaction of some form + to proceed. This error MAY be returned when the prompt parameter value + in the Authentication Request is none, but the Authentication Request + cannot be completed without displaying a user interface for End-User + interaction. + + http://openid.net/specs/openid-connect-core-1_0.html#AuthError + """ + error = 'interaction_required' + + +class LoginRequiredError(OAuth2Error): + """The Authorization Server requires End-User authentication. This error + MAY be returned when the prompt parameter value in the Authentication + Request is none, but the Authentication Request cannot be completed + without displaying a user interface for End-User authentication. + + http://openid.net/specs/openid-connect-core-1_0.html#AuthError + """ + error = 'login_required' + + +class AccountSelectionRequiredError(OAuth2Error): + """The End-User is REQUIRED to select a session at the Authorization + Server. The End-User MAY be authenticated at the Authorization Server + with different associated accounts, but the End-User did not select a + session. This error MAY be returned when the prompt parameter value in + the Authentication Request is none, but the Authentication Request cannot + be completed without displaying a user interface to prompt for a session + to use. + + http://openid.net/specs/openid-connect-core-1_0.html#AuthError + """ + error = 'account_selection_required' + + +class ConsentRequiredError(OAuth2Error): + """The Authorization Server requires End-User consent. This error MAY be + returned when the prompt parameter value in the Authentication Request is + none, but the Authentication Request cannot be completed without + displaying a user interface for End-User consent. + + http://openid.net/specs/openid-connect-core-1_0.html#AuthError + """ + error = 'consent_required' + + +class InvalidRequestURIError(OAuth2Error): + """The request_uri in the Authorization Request returns an error or + contains invalid data. + + http://openid.net/specs/openid-connect-core-1_0.html#AuthError + """ + error = 'invalid_request_uri' + + +class InvalidRequestObjectError(OAuth2Error): + """The request parameter contains an invalid Request Object.""" + error = 'invalid_request_object' + + +class RequestNotSupportedError(OAuth2Error): + """The OP does not support use of the request parameter.""" + error = 'request_not_supported' + + +class RequestURINotSupportedError(OAuth2Error): + """The OP does not support use of the request_uri parameter.""" + error = 'request_uri_not_supported' + + +class RegistrationNotSupportedError(OAuth2Error): + """The OP does not support use of the registration parameter.""" + error = 'registration_not_supported' diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__init__.py new file mode 100644 index 0000000..8b4b002 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__init__.py @@ -0,0 +1,10 @@ +from .code import OpenIDToken, OpenIDCode +from .implicit import OpenIDImplicitGrant +from .hybrid import OpenIDHybridGrant + +__all__ = [ + 'OpenIDToken', + 'OpenIDCode', + 'OpenIDImplicitGrant', + 'OpenIDHybridGrant', +] diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..39ccac1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/code.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/code.cpython-312.pyc new file mode 100644 index 0000000..b745aad Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/code.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/hybrid.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/hybrid.cpython-312.pyc new file mode 100644 index 0000000..5927402 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/hybrid.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/implicit.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/implicit.cpython-312.pyc new file mode 100644 index 0000000..c3c7fde Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/implicit.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/util.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..c860a54 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/__pycache__/util.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/code.py b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/code.py new file mode 100644 index 0000000..9ac3bfb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/code.py @@ -0,0 +1,142 @@ +""" + authlib.oidc.core.grants.code + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Implementation of Authentication using the Authorization Code Flow + per `Section 3.1`_. + + .. _`Section 3.1`: http://openid.net/specs/openid-connect-core-1_0.html#CodeFlowAuth +""" + +import logging +from authlib.oauth2.rfc6749 import OAuth2Request +from .util import ( + is_openid_scope, + validate_nonce, + validate_request_prompt, + generate_id_token, +) + +log = logging.getLogger(__name__) + + +class OpenIDToken: + def get_jwt_config(self, grant): # pragma: no cover + """Get the JWT configuration for OpenIDCode extension. The JWT + configuration will be used to generate ``id_token``. Developers + MUST implement this method in subclass, e.g.:: + + def get_jwt_config(self, grant): + return { + 'key': read_private_key_file(key_path), + 'alg': 'RS256', + 'iss': 'issuer-identity', + 'exp': 3600 + } + + :param grant: AuthorizationCodeGrant instance + :return: dict + """ + raise NotImplementedError() + + def generate_user_info(self, user, scope): + """Provide user information for the given scope. Developers + MUST implement this method in subclass, e.g.:: + + from authlib.oidc.core import UserInfo + + def generate_user_info(self, user, scope): + user_info = UserInfo(sub=user.id, name=user.name) + if 'email' in scope: + user_info['email'] = user.email + return user_info + + :param user: user instance + :param scope: scope of the token + :return: ``authlib.oidc.core.UserInfo`` instance + """ + raise NotImplementedError() + + def get_audiences(self, request): + """Parse `aud` value for id_token, default value is client id. Developers + MAY rewrite this method to provide a customized audience value. + """ + client = request.client + return [client.get_client_id()] + + def process_token(self, grant, token): + scope = token.get('scope') + if not scope or not is_openid_scope(scope): + # standard authorization code flow + return token + + request: OAuth2Request = grant.request + authorization_code = request.authorization_code + + config = self.get_jwt_config(grant) + config['aud'] = self.get_audiences(request) + + if authorization_code: + config['nonce'] = authorization_code.get_nonce() + config['auth_time'] = authorization_code.get_auth_time() + + user_info = self.generate_user_info(request.user, token['scope']) + id_token = generate_id_token(token, user_info, **config) + token['id_token'] = id_token + return token + + def __call__(self, grant): + grant.register_hook('process_token', self.process_token) + + +class OpenIDCode(OpenIDToken): + """An extension from OpenID Connect for "grant_type=code" request. Developers + MUST implement the missing methods:: + + class MyOpenIDCode(OpenIDCode): + def get_jwt_config(self, grant): + return {...} + + def exists_nonce(self, nonce, request): + return check_if_nonce_in_cache(request.client_id, nonce) + + def generate_user_info(self, user, scope): + return {...} + + The register this extension with AuthorizationCodeGrant:: + + authorization_server.register_grant(AuthorizationCodeGrant, extensions=[MyOpenIDCode()]) + """ + def __init__(self, require_nonce=False): + self.require_nonce = require_nonce + + def exists_nonce(self, nonce, request): + """Check if the given nonce is existing in your database. Developers + MUST implement this method in subclass, e.g.:: + + def exists_nonce(self, nonce, request): + exists = AuthorizationCode.query.filter_by( + client_id=request.client_id, nonce=nonce + ).first() + return bool(exists) + + :param nonce: A string of "nonce" parameter in request + :param request: OAuth2Request instance + :return: Boolean + """ + raise NotImplementedError() + + def validate_openid_authorization_request(self, grant): + validate_nonce(grant.request, self.exists_nonce, self.require_nonce) + + def __call__(self, grant): + grant.register_hook('process_token', self.process_token) + if is_openid_scope(grant.request.scope): + grant.register_hook( + 'after_validate_authorization_request', + self.validate_openid_authorization_request + ) + grant.register_hook( + 'after_validate_consent_request', + validate_request_prompt + ) diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/hybrid.py b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/hybrid.py new file mode 100644 index 0000000..384c867 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/hybrid.py @@ -0,0 +1,90 @@ +import logging +from authlib.common.security import generate_token +from authlib.oauth2.rfc6749 import InvalidScopeError +from authlib.oauth2.rfc6749.grants.authorization_code import ( + validate_code_authorization_request +) +from .implicit import OpenIDImplicitGrant +from .util import is_openid_scope, validate_nonce + +log = logging.getLogger(__name__) + + +class OpenIDHybridGrant(OpenIDImplicitGrant): + #: Generated "code" length + AUTHORIZATION_CODE_LENGTH = 48 + + RESPONSE_TYPES = {'code id_token', 'code token', 'code id_token token'} + GRANT_TYPE = 'code' + DEFAULT_RESPONSE_MODE = 'fragment' + + def generate_authorization_code(self): + """"The method to generate "code" value for authorization code data. + Developers may rewrite this method, or customize the code length with:: + + class MyAuthorizationCodeGrant(AuthorizationCodeGrant): + AUTHORIZATION_CODE_LENGTH = 32 # default is 48 + """ + return generate_token(self.AUTHORIZATION_CODE_LENGTH) + + def save_authorization_code(self, code, request): + """Save authorization_code for later use. Developers MUST implement + it in subclass. Here is an example:: + + def save_authorization_code(self, code, request): + client = request.client + auth_code = AuthorizationCode( + code=code, + client_id=client.client_id, + redirect_uri=request.redirect_uri, + scope=request.scope, + nonce=request.data.get('nonce'), + user_id=request.user.id, + ) + auth_code.save() + """ + raise NotImplementedError() + + def validate_authorization_request(self): + if not is_openid_scope(self.request.scope): + raise InvalidScopeError( + 'Missing "openid" scope', + redirect_uri=self.request.redirect_uri, + redirect_fragment=True, + ) + self.register_hook( + 'after_validate_authorization_request', + lambda grant: validate_nonce( + grant.request, grant.exists_nonce, required=True) + ) + return validate_code_authorization_request(self) + + def create_granted_params(self, grant_user): + self.request.user = grant_user + client = self.request.client + code = self.generate_authorization_code() + self.save_authorization_code(code, self.request) + params = [('code', code)] + token = self.generate_token( + grant_type='implicit', + user=grant_user, + scope=self.request.scope, + include_refresh_token=False + ) + + response_types = self.request.response_type.split() + if 'token' in response_types: + log.debug('Grant token %r to %r', token, client) + self.server.save_token(token, self.request) + if 'id_token' in response_types: + token = self.process_implicit_token(token, code) + else: + # response_type is "code id_token" + token = { + 'expires_in': token['expires_in'], + 'scope': token['scope'] + } + token = self.process_implicit_token(token, code) + + params.extend([(k, token[k]) for k in token]) + return params diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/implicit.py b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/implicit.py new file mode 100644 index 0000000..15bc1fa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/implicit.py @@ -0,0 +1,150 @@ +import logging +from authlib.oauth2.rfc6749 import ( + OAuth2Error, + InvalidScopeError, + AccessDeniedError, + ImplicitGrant, +) +from .util import ( + is_openid_scope, + validate_nonce, + validate_request_prompt, + create_response_mode_response, + generate_id_token, +) + +log = logging.getLogger(__name__) + + +class OpenIDImplicitGrant(ImplicitGrant): + RESPONSE_TYPES = {'id_token token', 'id_token'} + DEFAULT_RESPONSE_MODE = 'fragment' + + def exists_nonce(self, nonce, request): + """Check if the given nonce is existing in your database. Developers + should implement this method in subclass, e.g.:: + + def exists_nonce(self, nonce, request): + exists = AuthorizationCode.query.filter_by( + client_id=request.client_id, nonce=nonce + ).first() + return bool(exists) + + :param nonce: A string of "nonce" parameter in request + :param request: OAuth2Request instance + :return: Boolean + """ + raise NotImplementedError() + + def get_jwt_config(self): + """Get the JWT configuration for OpenIDImplicitGrant. The JWT + configuration will be used to generate ``id_token``. Developers + MUST implement this method in subclass, e.g.:: + + def get_jwt_config(self): + return { + 'key': read_private_key_file(key_path), + 'alg': 'RS256', + 'iss': 'issuer-identity', + 'exp': 3600 + } + + :return: dict + """ + raise NotImplementedError() + + def generate_user_info(self, user, scope): + """Provide user information for the given scope. Developers + MUST implement this method in subclass, e.g.:: + + from authlib.oidc.core import UserInfo + + def generate_user_info(self, user, scope): + user_info = UserInfo(sub=user.id, name=user.name) + if 'email' in scope: + user_info['email'] = user.email + return user_info + + :param user: user instance + :param scope: scope of the token + :return: ``authlib.oidc.core.UserInfo`` instance + """ + raise NotImplementedError() + + def get_audiences(self, request): + """Parse `aud` value for id_token, default value is client id. Developers + MAY rewrite this method to provide a customized audience value. + """ + client = request.client + return [client.get_client_id()] + + def validate_authorization_request(self): + if not is_openid_scope(self.request.scope): + raise InvalidScopeError( + 'Missing "openid" scope', + redirect_uri=self.request.redirect_uri, + redirect_fragment=True, + ) + redirect_uri = super().validate_authorization_request() + try: + validate_nonce(self.request, self.exists_nonce, required=True) + except OAuth2Error as error: + error.redirect_uri = redirect_uri + error.redirect_fragment = True + raise error + return redirect_uri + + def validate_consent_request(self): + redirect_uri = self.validate_authorization_request() + validate_request_prompt(self, redirect_uri, redirect_fragment=True) + + def create_authorization_response(self, redirect_uri, grant_user): + state = self.request.state + if grant_user: + params = self.create_granted_params(grant_user) + if state: + params.append(('state', state)) + else: + error = AccessDeniedError(state=state) + params = error.get_body() + + # http://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#ResponseModes + response_mode = self.request.data.get('response_mode', self.DEFAULT_RESPONSE_MODE) + return create_response_mode_response( + redirect_uri=redirect_uri, + params=params, + response_mode=response_mode, + ) + + def create_granted_params(self, grant_user): + self.request.user = grant_user + client = self.request.client + token = self.generate_token( + user=grant_user, + scope=self.request.scope, + include_refresh_token=False + ) + if self.request.response_type == 'id_token': + token = { + 'expires_in': token['expires_in'], + 'scope': token['scope'], + } + token = self.process_implicit_token(token) + else: + log.debug('Grant token %r to %r', token, client) + self.server.save_token(token, self.request) + token = self.process_implicit_token(token) + params = [(k, token[k]) for k in token] + return params + + def process_implicit_token(self, token, code=None): + config = self.get_jwt_config() + config['aud'] = self.get_audiences(self.request) + config['nonce'] = self.request.data.get('nonce') + if code is not None: + config['code'] = code + + user_info = self.generate_user_info(self.request.user, token['scope']) + id_token = generate_id_token(token, user_info, **config) + token['id_token'] = id_token + return token diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/util.py b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/util.py new file mode 100644 index 0000000..3b57dbe --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/core/grants/util.py @@ -0,0 +1,131 @@ +import time +from authlib.oauth2.rfc6749 import InvalidRequestError +from authlib.oauth2.rfc6749 import scope_to_list +from authlib.jose import jwt +from authlib.common.encoding import to_native +from authlib.common.urls import add_params_to_uri, quote_url +from ..util import create_half_hash +from ..errors import ( + LoginRequiredError, + AccountSelectionRequiredError, + ConsentRequiredError, +) + + +def is_openid_scope(scope): + scopes = scope_to_list(scope) + return scopes and 'openid' in scopes + + +def validate_request_prompt(grant, redirect_uri, redirect_fragment=False): + prompt = grant.request.data.get('prompt') + end_user = grant.request.user + if not prompt: + if not end_user: + grant.prompt = 'login' + return grant + + if prompt == 'none' and not end_user: + raise LoginRequiredError( + redirect_uri=redirect_uri, + redirect_fragment=redirect_fragment) + + prompts = prompt.split() + if 'none' in prompts and len(prompts) > 1: + # If this parameter contains none with any other value, + # an error is returned + raise InvalidRequestError( + 'Invalid "prompt" parameter.', + redirect_uri=redirect_uri, + redirect_fragment=redirect_fragment) + + prompt = _guess_prompt_value( + end_user, prompts, redirect_uri, redirect_fragment=redirect_fragment) + if prompt: + grant.prompt = prompt + return grant + + +def validate_nonce(request, exists_nonce, required=False): + nonce = request.data.get('nonce') + if not nonce: + if required: + raise InvalidRequestError('Missing "nonce" in request.') + return True + + if exists_nonce(nonce, request): + raise InvalidRequestError('Replay attack') + + +def generate_id_token( + token, user_info, key, iss, aud, alg='RS256', exp=3600, + nonce=None, auth_time=None, code=None): + + now = int(time.time()) + if auth_time is None: + auth_time = now + + payload = { + 'iss': iss, + 'aud': aud, + 'iat': now, + 'exp': now + exp, + 'auth_time': auth_time, + } + if nonce: + payload['nonce'] = nonce + + if code: + payload['c_hash'] = to_native(create_half_hash(code, alg)) + + access_token = token.get('access_token') + if access_token: + payload['at_hash'] = to_native(create_half_hash(access_token, alg)) + + payload.update(user_info) + return to_native(jwt.encode({'alg': alg}, payload, key)) + + +def create_response_mode_response(redirect_uri, params, response_mode): + if response_mode == 'form_post': + tpl = ( + 'Redirecting' + '' + '
{}
' + ) + inputs = ''.join([ + ''.format( + quote_url(k), quote_url(v)) + for k, v in params + ]) + body = tpl.format(quote_url(redirect_uri), inputs) + return 200, body, [('Content-Type', 'text/html; charset=utf-8')] + + if response_mode == 'query': + uri = add_params_to_uri(redirect_uri, params, fragment=False) + elif response_mode == 'fragment': + uri = add_params_to_uri(redirect_uri, params, fragment=True) + else: + raise InvalidRequestError('Invalid "response_mode" value') + + return 302, '', [('Location', uri)] + + +def _guess_prompt_value(end_user, prompts, redirect_uri, redirect_fragment): + # http://openid.net/specs/openid-connect-core-1_0.html#AuthRequest + + if not end_user and 'login' in prompts: + return 'login' + + if 'consent' in prompts: + if not end_user: + raise ConsentRequiredError( + redirect_uri=redirect_uri, + redirect_fragment=redirect_fragment) + return 'consent' + elif 'select_account' in prompts: + if not end_user: + raise AccountSelectionRequiredError( + redirect_uri=redirect_uri, + redirect_fragment=redirect_fragment) + return 'select_account' diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/models.py b/myenv/lib/python3.12/site-packages/authlib/oidc/core/models.py new file mode 100644 index 0000000..5f41405 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/core/models.py @@ -0,0 +1,13 @@ +from authlib.oauth2.rfc6749 import ( + AuthorizationCodeMixin as _AuthorizationCodeMixin +) + + +class AuthorizationCodeMixin(_AuthorizationCodeMixin): + def get_nonce(self): + """Get "nonce" value of the authorization code object.""" + raise NotImplementedError() + + def get_auth_time(self): + """Get "auth_time" value of the authorization code object.""" + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/core/util.py b/myenv/lib/python3.12/site-packages/authlib/oidc/core/util.py new file mode 100644 index 0000000..6df005d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/core/util.py @@ -0,0 +1,12 @@ +import hashlib +from authlib.common.encoding import to_bytes, urlsafe_b64encode + + +def create_half_hash(s, alg): + hash_type = f'sha{alg[2:]}' + hash_alg = getattr(hashlib, hash_type, None) + if not hash_alg: + return None + data_digest = hash_alg(to_bytes(s)).digest() + slice_index = int(len(data_digest) / 2) + return urlsafe_b64encode(data_digest[:slice_index]) diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__init__.py b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__init__.py new file mode 100644 index 0000000..1e76401 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__init__.py @@ -0,0 +1,13 @@ +""" + authlib.oidc.discover + ~~~~~~~~~~~~~~~~~~~~~ + + OpenID Connect Discovery 1.0 Implementation. + + https://openid.net/specs/openid-connect-discovery-1_0.html +""" + +from .models import OpenIDProviderMetadata +from .well_known import get_well_known_url + +__all__ = ['OpenIDProviderMetadata', 'get_well_known_url'] diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..e50aa7a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__pycache__/models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..f296003 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__pycache__/models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__pycache__/well_known.cpython-312.pyc b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__pycache__/well_known.cpython-312.pyc new file mode 100644 index 0000000..5f2148b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/__pycache__/well_known.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/models.py b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/models.py new file mode 100644 index 0000000..d9329ef --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/models.py @@ -0,0 +1,283 @@ +from authlib.oauth2.rfc8414 import AuthorizationServerMetadata +from authlib.oauth2.rfc8414.models import validate_array_value + + +class OpenIDProviderMetadata(AuthorizationServerMetadata): + REGISTRY_KEYS = [ + 'issuer', 'authorization_endpoint', 'token_endpoint', + 'jwks_uri', 'registration_endpoint', 'scopes_supported', + 'response_types_supported', 'response_modes_supported', + 'grant_types_supported', + 'token_endpoint_auth_methods_supported', + 'token_endpoint_auth_signing_alg_values_supported', + 'service_documentation', 'ui_locales_supported', + 'op_policy_uri', 'op_tos_uri', + + # added by OpenID + 'acr_values_supported', 'subject_types_supported', + 'id_token_signing_alg_values_supported', + 'id_token_encryption_alg_values_supported', + 'id_token_encryption_enc_values_supported', + 'userinfo_signing_alg_values_supported', + 'userinfo_encryption_alg_values_supported', + 'userinfo_encryption_enc_values_supported', + 'request_object_signing_alg_values_supported', + 'request_object_encryption_alg_values_supported', + 'request_object_encryption_enc_values_supported', + 'display_values_supported', + 'claim_types_supported', + 'claims_supported', + 'claims_locales_supported', + 'claims_parameter_supported', + 'request_parameter_supported', + 'request_uri_parameter_supported', + 'require_request_uri_registration', + + # not defined by OpenID + # 'revocation_endpoint', + # 'revocation_endpoint_auth_methods_supported', + # 'revocation_endpoint_auth_signing_alg_values_supported', + # 'introspection_endpoint', + # 'introspection_endpoint_auth_methods_supported', + # 'introspection_endpoint_auth_signing_alg_values_supported', + # 'code_challenge_methods_supported', + ] + + def validate_jwks_uri(self): + # REQUIRED in OpenID Connect + jwks_uri = self.get('jwks_uri') + if jwks_uri is None: + raise ValueError('"jwks_uri" is required') + return super().validate_jwks_uri() + + def validate_acr_values_supported(self): + """OPTIONAL. JSON array containing a list of the Authentication + Context Class References that this OP supports. + """ + validate_array_value(self, 'acr_values_supported') + + def validate_subject_types_supported(self): + """REQUIRED. JSON array containing a list of the Subject Identifier + types that this OP supports. Valid types include pairwise and public. + """ + # 1. REQUIRED + values = self.get('subject_types_supported') + if values is None: + raise ValueError('"subject_types_supported" is required') + + # 2. JSON array + if not isinstance(values, list): + raise ValueError('"subject_types_supported" MUST be JSON array') + + # 3. Valid types include pairwise and public + valid_types = {'pairwise', 'public'} + if not valid_types.issuperset(set(values)): + raise ValueError( + '"subject_types_supported" contains invalid values') + + def validate_id_token_signing_alg_values_supported(self): + """REQUIRED. JSON array containing a list of the JWS signing + algorithms (alg values) supported by the OP for the ID Token to + encode the Claims in a JWT [JWT]. The algorithm RS256 MUST be + included. The value none MAY be supported, but MUST NOT be used + unless the Response Type used returns no ID Token from the + Authorization Endpoint (such as when using the Authorization + Code Flow). + """ + # 1. REQUIRED + values = self.get('id_token_signing_alg_values_supported') + if values is None: + raise ValueError('"id_token_signing_alg_values_supported" is required') + + # 2. JSON array + if not isinstance(values, list): + raise ValueError('"id_token_signing_alg_values_supported" MUST be JSON array') + + # 3. The algorithm RS256 MUST be included + if 'RS256' not in values: + raise ValueError( + '"RS256" MUST be included in "id_token_signing_alg_values_supported"') + + def validate_id_token_encryption_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (alg values) supported by the OP for the ID Token to + encode the Claims in a JWT. + """ + validate_array_value(self, 'id_token_encryption_alg_values_supported') + + def validate_id_token_encryption_enc_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (enc values) supported by the OP for the ID Token to + encode the Claims in a JWT. + """ + validate_array_value(self, 'id_token_encryption_enc_values_supported') + + def validate_userinfo_signing_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWS signing + algorithms (alg values) [JWA] supported by the UserInfo Endpoint + to encode the Claims in a JWT. The value none MAY be included. + """ + validate_array_value(self, 'userinfo_signing_alg_values_supported') + + def validate_userinfo_encryption_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (alg values) [JWA] supported by the UserInfo Endpoint + to encode the Claims in a JWT. + """ + validate_array_value(self, 'userinfo_encryption_alg_values_supported') + + def validate_userinfo_encryption_enc_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (enc values) [JWA] supported by the UserInfo Endpoint + to encode the Claims in a JWT. + """ + validate_array_value(self, 'userinfo_encryption_enc_values_supported') + + def validate_request_object_signing_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWS signing + algorithms (alg values) supported by the OP for Request Objects, + which are described in Section 6.1 of OpenID Connect Core 1.0. + These algorithms are used both when the Request Object is passed + by value (using the request parameter) and when it is passed by + reference (using the request_uri parameter). Servers SHOULD support + none and RS256. + """ + values = self.get('request_object_signing_alg_values_supported') + if not values: + return + + if not isinstance(values, list): + raise ValueError('"request_object_signing_alg_values_supported" MUST be JSON array') + + # Servers SHOULD support none and RS256 + if 'none' not in values or 'RS256' not in values: + raise ValueError( + '"request_object_signing_alg_values_supported" ' + 'SHOULD support none and RS256') + + def validate_request_object_encryption_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (alg values) supported by the OP for Request Objects. + These algorithms are used both when the Request Object is passed + by value and when it is passed by reference. + """ + validate_array_value(self, 'request_object_encryption_alg_values_supported') + + def validate_request_object_encryption_enc_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (enc values) supported by the OP for Request Objects. + These algorithms are used both when the Request Object is passed + by value and when it is passed by reference. + """ + validate_array_value(self, 'request_object_encryption_enc_values_supported') + + def validate_display_values_supported(self): + """OPTIONAL. JSON array containing a list of the display parameter + values that the OpenID Provider supports. These values are described + in Section 3.1.2.1 of OpenID Connect Core 1.0. + """ + values = self.get('display_values_supported') + if not values: + return + + if not isinstance(values, list): + raise ValueError('"display_values_supported" MUST be JSON array') + + valid_values = {'page', 'popup', 'touch', 'wap'} + if not valid_values.issuperset(set(values)): + raise ValueError('"display_values_supported" contains invalid values') + + def validate_claim_types_supported(self): + """OPTIONAL. JSON array containing a list of the Claim Types that + the OpenID Provider supports. These Claim Types are described in + Section 5.6 of OpenID Connect Core 1.0. Values defined by this + specification are normal, aggregated, and distributed. If omitted, + the implementation supports only normal Claims. + """ + values = self.get('claim_types_supported') + if not values: + return + + if not isinstance(values, list): + raise ValueError('"claim_types_supported" MUST be JSON array') + + valid_values = {'normal', 'aggregated', 'distributed'} + if not valid_values.issuperset(set(values)): + raise ValueError('"claim_types_supported" contains invalid values') + + def validate_claims_supported(self): + """RECOMMENDED. JSON array containing a list of the Claim Names + of the Claims that the OpenID Provider MAY be able to supply values + for. Note that for privacy or other reasons, this might not be an + exhaustive list. + """ + validate_array_value(self, 'claims_supported') + + def validate_claims_locales_supported(self): + """OPTIONAL. Languages and scripts supported for values in Claims + being returned, represented as a JSON array of BCP47 [RFC5646] + language tag values. Not all languages and scripts are necessarily + supported for all Claim values. + """ + validate_array_value(self, 'claims_locales_supported') + + def validate_claims_parameter_supported(self): + """OPTIONAL. Boolean value specifying whether the OP supports use of + the claims parameter, with true indicating support. If omitted, the + default value is false. + """ + _validate_boolean_value(self, 'claims_parameter_supported') + + def validate_request_parameter_supported(self): + """OPTIONAL. Boolean value specifying whether the OP supports use of + the request parameter, with true indicating support. If omitted, the + default value is false. + """ + _validate_boolean_value(self, 'request_parameter_supported') + + def validate_request_uri_parameter_supported(self): + """OPTIONAL. Boolean value specifying whether the OP supports use of + the request_uri parameter, with true indicating support. If omitted, + the default value is true. + """ + _validate_boolean_value(self, 'request_uri_parameter_supported') + + def validate_require_request_uri_registration(self): + """OPTIONAL. Boolean value specifying whether the OP requires any + request_uri values used to be pre-registered using the request_uris + registration parameter. Pre-registration is REQUIRED when the value + is true. If omitted, the default value is false. + """ + _validate_boolean_value(self, 'require_request_uri_registration') + + @property + def claim_types_supported(self): + # If omitted, the implementation supports only normal Claims + return self.get('claim_types_supported', ['normal']) + + @property + def claims_parameter_supported(self): + # If omitted, the default value is false. + return self.get('claims_parameter_supported', False) + + @property + def request_parameter_supported(self): + # If omitted, the default value is false. + return self.get('request_parameter_supported', False) + + @property + def request_uri_parameter_supported(self): + # If omitted, the default value is true. + return self.get('request_uri_parameter_supported', True) + + @property + def require_request_uri_registration(self): + # If omitted, the default value is false. + return self.get('require_request_uri_registration', False) + + +def _validate_boolean_value(metadata, key): + if key not in metadata: + return + if metadata[key] not in (True, False): + raise ValueError(f'"{key}" MUST be boolean') diff --git a/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/well_known.py b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/well_known.py new file mode 100644 index 0000000..e3087a1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/authlib/oidc/discovery/well_known.py @@ -0,0 +1,17 @@ +from authlib.common.urls import urlparse + + +def get_well_known_url(issuer, external=False): + """Get well-known URI with issuer via Section 4.1. + + :param issuer: URL of the issuer + :param external: return full external url or not + :return: URL + """ + # https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationRequest + if external: + return issuer.rstrip('/') + '/.well-known/openid-configuration' + + parsed = urlparse.urlparse(issuer) + path = parsed.path + return path.rstrip('/') + '/.well-known/openid-configuration' diff --git a/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/LICENSE b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/LICENSE new file mode 100644 index 0000000..62b076c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/LICENSE @@ -0,0 +1,20 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/METADATA b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/METADATA new file mode 100644 index 0000000..0a3a772 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/METADATA @@ -0,0 +1,67 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2024.8.30 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Python: >=3.6 +License-File: LICENSE + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. diff --git a/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/RECORD b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/RECORD new file mode 100644 index 0000000..12374b0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/RECORD @@ -0,0 +1,14 @@ +certifi-2024.8.30.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2024.8.30.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 +certifi-2024.8.30.dist-info/METADATA,sha256=GhBHRVUN6a4ZdUgE_N5wmukJfyuoE-QyIl8Y3ifNQBM,2222 +certifi-2024.8.30.dist-info/RECORD,, +certifi-2024.8.30.dist-info/WHEEL,sha256=UvcQYKBHoFqaQd6LKyqHw9fxEolWLQnlzP0h_LgJAfI,91 +certifi-2024.8.30.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=p_GYZrjUwPBUhpLlCZoGb0miKBKSqDAyZC5DvIuqbHQ,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-312.pyc,, +certifi/__pycache__/__main__.cpython-312.pyc,, +certifi/__pycache__/core.cpython-312.pyc,, +certifi/cacert.pem,sha256=lO3rZukXdPyuk6BWUJFOKQliWaXH6HGh9l1GGrUgG0c,299427 +certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/WHEEL new file mode 100644 index 0000000..57e56b7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (74.0.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/top_level.txt b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/certifi-2024.8.30.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/myenv/lib/python3.12/site-packages/certifi/__init__.py b/myenv/lib/python3.12/site-packages/certifi/__init__.py new file mode 100644 index 0000000..f61d77f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2024.08.30" diff --git a/myenv/lib/python3.12/site-packages/certifi/__main__.py b/myenv/lib/python3.12/site-packages/certifi/__main__.py new file mode 100644 index 0000000..8945b5d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/myenv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..17fda1c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..f808dd4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc b/myenv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000..50323c2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/certifi/cacert.pem b/myenv/lib/python3.12/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..3c165a1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/certifi/cacert.pem @@ -0,0 +1,4929 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication RootCA3" +# Serial: 16247922307909811815 +# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 +# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a +# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 +-----BEGIN CERTIFICATE----- +MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV +BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw +JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 +MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg +Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r +CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA +lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG +TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 +9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 +8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 +g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we +GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst ++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M +0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ +T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw +HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS +YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA +FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd +9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI +UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ +OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke +gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf +iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV +nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD +2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// +1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad +TdJ0MN1kURXbg4NR16/9M51NZg== +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G3" +# Serial: 576386314500428537169965010905813481816650257167 +# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 +# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 +# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM +BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe +Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw +IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU +cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS +T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK +AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 +nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep +qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA +yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs +hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX +zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv +kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT +f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA +uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB +o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih +MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E +BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 +wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 +XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 +JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j +ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV +VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx +xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on +AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d +7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj +gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV ++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo +FGWsJwt0ivKH +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G4" +# Serial: 451799571007117016466790293371524403291602933463 +# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb +# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a +# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c +-----BEGIN CERTIFICATE----- +MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw +WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y +MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD +VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz +dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx +s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw +LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD +pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE +AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR +UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj +/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Label: "CommScope Public Trust ECC Root-01" +# Serial: 385011430473757362783587124273108818652468453534 +# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 +# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d +# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b +-----BEGIN CERTIFICATE----- +MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa +Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C +flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE +hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq +hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg +2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS +Um9poIyNStDuiw7LR47QjRE= +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Label: "CommScope Public Trust ECC Root-02" +# Serial: 234015080301808452132356021271193974922492992893 +# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 +# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 +# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a +-----BEGIN CERTIFICATE----- +MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa +Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL +j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU +v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq +hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n +ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV +mkzw5l4lIhVtwodZ0LKOag== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Label: "CommScope Public Trust RSA Root-01" +# Serial: 354030733275608256394402989253558293562031411421 +# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 +# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 +# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 +NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk +YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh +suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al +DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj +WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl +P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 +KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p +UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ +kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO +Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB +Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U +CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ +KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 +NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ +nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ +QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v +trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a +aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD +j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 +Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w +lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn +YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc +icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Label: "CommScope Public Trust RSA Root-02" +# Serial: 480062499834624527752716769107743131258796508494 +# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa +# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae +# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 +NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE +NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 +kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C +rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz +hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 +LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs +n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku +FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 +kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 +wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v +wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs +5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ +KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB +KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 ++VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme +APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq +pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT +6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF +sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt +PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d +lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 +v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O +rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS ECC Root 2020" +# Serial: 72082518505882327255703894282316633856 +# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd +# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec +# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 +-----BEGIN CERTIFICATE----- +MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw +CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH +bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw +MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx +JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE +AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O +tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP +f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA +MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di +z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn +27iQ7t0l +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS RSA Root 2023" +# Serial: 44676229530606711399881795178081572759 +# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 +# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 +# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj +MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 +eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy +MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC +REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG +A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 +cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV +cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA +U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 +Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug +BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy +8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J +co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg +8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 +rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 +mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg ++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX +gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 +p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ +pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm +9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw +M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd +GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ +CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t +xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ +w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK +L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj +X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q +ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm +dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= +-----END CERTIFICATE----- + +# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" +# Serial: 65916896770016886708751106294915943533 +# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 +# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 +# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a +-----BEGIN CERTIFICATE----- +MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf +e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C +cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O +BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO +PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw +hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG +XSaQpYXFuXqUPoeovQA= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA CYBER Root CA" +# Serial: 85076849864375384482682434040119489222 +# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 +# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 +# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ +MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 +IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 +WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO +LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg +Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P +40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF +avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ +34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i +JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu +j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf +Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP +2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA +S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA +oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC +kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW +5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd +BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB +AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t +tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn +68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn +TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t +RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx +f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI +Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz +8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 +NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX +xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 +t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA12" +# Serial: 587887345431707215246142177076162061960426065942 +# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 +# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 +# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw +NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF +KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt +p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd +J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur +FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J +hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K +h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF +AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld +mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ +mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA +8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV +55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ +yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA14" +# Serial: 575790784512929437950770173562378038616896959179 +# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 +# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f +# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw +NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ +FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg +vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy +6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo +/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J +kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ +0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib +y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac +18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs +0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB +SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL +ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk +86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E +rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib +ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT +zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS +DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 +2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo +FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy +K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 +dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl +Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB +365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c +JRNItX+S +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA15" +# Serial: 126083514594751269499665114766174399806381178503 +# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 +# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d +# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a +-----BEGIN CERTIFICATE----- +MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw +UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM +dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy +NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl +cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 +IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 +wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR +ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT +9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp +4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 +bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= +-----END CERTIFICATE----- diff --git a/myenv/lib/python3.12/site-packages/certifi/core.py b/myenv/lib/python3.12/site-packages/certifi/core.py new file mode 100644 index 0000000..91f538b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/certifi/core.py @@ -0,0 +1,114 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys +import atexit + +def exit_cacert_ctx() -> None: + _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +elif sys.version_info >= (3, 7): + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") + +else: + import os + import types + from typing import Union + + Package = Union[types.ModuleType, str] + Resource = Union[str, "os.PathLike"] + + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict' + ) -> str: + with open(where(), encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where() -> str: + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/myenv/lib/python3.12/site-packages/certifi/py.typed b/myenv/lib/python3.12/site-packages/certifi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/LICENSE b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/LICENSE new file mode 100644 index 0000000..29225ee --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/LICENSE @@ -0,0 +1,26 @@ + +Except when otherwise stated (look for LICENSE files in directories or +information at the beginning of each file) all software and +documentation is licensed as follows: + + The MIT License + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + diff --git a/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/METADATA b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/METADATA new file mode 100644 index 0000000..60b0779 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/METADATA @@ -0,0 +1,40 @@ +Metadata-Version: 2.1 +Name: cffi +Version: 1.17.1 +Summary: Foreign Function Interface for Python calling C code. +Home-page: http://cffi.readthedocs.org +Author: Armin Rigo, Maciej Fijalkowski +Author-email: python-cffi@googlegroups.com +License: MIT +Project-URL: Documentation, http://cffi.readthedocs.org/ +Project-URL: Source Code, https://github.com/python-cffi/cffi +Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues +Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html +Project-URL: Downloads, https://github.com/python-cffi/cffi/releases +Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: License :: OSI Approved :: MIT License +Requires-Python: >=3.8 +License-File: LICENSE +Requires-Dist: pycparser + + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ diff --git a/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/RECORD b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/RECORD new file mode 100644 index 0000000..23fa52b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/RECORD @@ -0,0 +1,48 @@ +_cffi_backend.cpython-312-x86_64-linux-gnu.so,sha256=-fK60bkCudr6tjAHt4dA3x_CHaOWgVs_Lb2J0JGO3Po,1114632 +cffi-1.17.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cffi-1.17.1.dist-info/LICENSE,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294 +cffi-1.17.1.dist-info/METADATA,sha256=u6nuvP_qPJKu2zvIbi2zkGzVu7KjnnRIYUFyIrOY3j4,1531 +cffi-1.17.1.dist-info/RECORD,, +cffi-1.17.1.dist-info/WHEEL,sha256=h7F_RlbsFAwUaa98BSEEv6RQhdTqVo2FhuJDzTSKXxc,151 +cffi-1.17.1.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75 +cffi-1.17.1.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi/__init__.py,sha256=H6t_ebva6EeHpUuItFLW1gbRp94eZRNJODLaWKdbx1I,513 +cffi/__pycache__/__init__.cpython-312.pyc,, +cffi/__pycache__/_imp_emulation.cpython-312.pyc,, +cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc,, +cffi/__pycache__/api.cpython-312.pyc,, +cffi/__pycache__/backend_ctypes.cpython-312.pyc,, +cffi/__pycache__/cffi_opcode.cpython-312.pyc,, +cffi/__pycache__/commontypes.cpython-312.pyc,, +cffi/__pycache__/cparser.cpython-312.pyc,, +cffi/__pycache__/error.cpython-312.pyc,, +cffi/__pycache__/ffiplatform.cpython-312.pyc,, +cffi/__pycache__/lock.cpython-312.pyc,, +cffi/__pycache__/model.cpython-312.pyc,, +cffi/__pycache__/pkgconfig.cpython-312.pyc,, +cffi/__pycache__/recompiler.cpython-312.pyc,, +cffi/__pycache__/setuptools_ext.cpython-312.pyc,, +cffi/__pycache__/vengine_cpy.cpython-312.pyc,, +cffi/__pycache__/vengine_gen.cpython-312.pyc,, +cffi/__pycache__/verifier.cpython-312.pyc,, +cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908 +cffi/_cffi_include.h,sha256=Exhmgm9qzHWzWivjfTe0D7Xp4rPUkVxdNuwGhMTMzbw,15055 +cffi/_embedding.h,sha256=EDKw5QrLvQoe3uosXB3H1xPVTYxsn33eV3A43zsA_Fw,18787 +cffi/_imp_emulation.py,sha256=RxREG8zAbI2RPGBww90u_5fi8sWdahpdipOoPzkp7C0,2960 +cffi/_shimmed_dist_utils.py,sha256=Bjj2wm8yZbvFvWEx5AEfmqaqZyZFhYfoyLLQHkXZuao,2230 +cffi/api.py,sha256=alBv6hZQkjpmZplBphdaRn2lPO9-CORs_M7ixabvZWI,42169 +cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454 +cffi/cffi_opcode.py,sha256=JDV5l0R0_OadBX_uE7xPPTYtMdmpp8I9UYd6av7aiDU,5731 +cffi/commontypes.py,sha256=7N6zPtCFlvxXMWhHV08psUjdYIK2XgsN3yo5dgua_v4,2805 +cffi/cparser.py,sha256=0qI3mEzZSNVcCangoyXOoAcL-RhpQL08eG8798T024s,44789 +cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 +cffi/ffiplatform.py,sha256=avxFjdikYGJoEtmJO7ewVmwG_VEVl6EZ_WaNhZYCqv4,3584 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/model.py,sha256=W30UFQZE73jL5Mx5N81YT77us2W2iJjTm0XYfnwz1cg,21797 +cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 +cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 +cffi/recompiler.py,sha256=sim4Tm7lamt2Jn8uzKN0wMYp6ODByk3g7of47-h9LD4,65367 +cffi/setuptools_ext.py,sha256=-ebj79lO2_AUH-kRcaja2pKY1Z_5tloGwsJgzK8P3Cc,8871 +cffi/vengine_cpy.py,sha256=8UagT6ZEOZf6Dju7_CfNulue8CnsHLEzJYhnqUhoF04,43752 +cffi/vengine_gen.py,sha256=DUlEIrDiVin1Pnhn1sfoamnS5NLqfJcOdhRoeSNeJRg,26939 +cffi/verifier.py,sha256=oX8jpaohg2Qm3aHcznidAdvrVm5N4sQYG0a3Eo5mIl4,11182 diff --git a/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/WHEEL new file mode 100644 index 0000000..0631b9a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (74.1.1) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 + diff --git a/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/entry_points.txt b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/entry_points.txt new file mode 100644 index 0000000..4b0274f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules diff --git a/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/top_level.txt b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/top_level.txt new file mode 100644 index 0000000..f645779 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi-1.17.1.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/myenv/lib/python3.12/site-packages/cffi/__init__.py b/myenv/lib/python3.12/site-packages/cffi/__init__.py new file mode 100644 index 0000000..2e35a38 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "1.17.1" +__version_info__ = (1, 17, 1) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6509e9a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc new file mode 100644 index 0000000..910bbf0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc new file mode 100644 index 0000000..0c19f93 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc new file mode 100644 index 0000000..42e9da2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc new file mode 100644 index 0000000..24307f0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc new file mode 100644 index 0000000..01ef4c2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc new file mode 100644 index 0000000..c114a2d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc new file mode 100644 index 0000000..17782f6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc new file mode 100644 index 0000000..92442e2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc new file mode 100644 index 0000000..233ca56 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc new file mode 100644 index 0000000..050c2dc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc new file mode 100644 index 0000000..f4a729c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc new file mode 100644 index 0000000..0f4e7ad Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc new file mode 100644 index 0000000..e7c3c5a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc new file mode 100644 index 0000000..539df6c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc new file mode 100644 index 0000000..54c50b9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc new file mode 100644 index 0000000..ec68120 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc new file mode 100644 index 0000000..aed5073 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cffi/_cffi_errors.h b/myenv/lib/python3.12/site-packages/cffi/_cffi_errors.h new file mode 100644 index 0000000..158e059 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/_cffi_errors.h @@ -0,0 +1,149 @@ +#ifndef CFFI_MESSAGEBOX +# ifdef _MSC_VER +# define CFFI_MESSAGEBOX 1 +# else +# define CFFI_MESSAGEBOX 0 +# endif +#endif + + +#if CFFI_MESSAGEBOX +/* Windows only: logic to take the Python-CFFI embedding logic + initialization errors and display them in a background thread + with MessageBox. The idea is that if the whole program closes + as a result of this problem, then likely it is already a console + program and you can read the stderr output in the console too. + If it is not a console program, then it will likely show its own + dialog to complain, or generally not abruptly close, and for this + case the background thread should stay alive. +*/ +static void *volatile _cffi_bootstrap_text; + +static PyObject *_cffi_start_error_capture(void) +{ + PyObject *result = NULL; + PyObject *x, *m, *bi; + + if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, + (void *)1, NULL) != NULL) + return (PyObject *)1; + + m = PyImport_AddModule("_cffi_error_capture"); + if (m == NULL) + goto error; + + result = PyModule_GetDict(m); + if (result == NULL) + goto error; + +#if PY_MAJOR_VERSION >= 3 + bi = PyImport_ImportModule("builtins"); +#else + bi = PyImport_ImportModule("__builtin__"); +#endif + if (bi == NULL) + goto error; + PyDict_SetItemString(result, "__builtins__", bi); + Py_DECREF(bi); + + x = PyRun_String( + "import sys\n" + "class FileLike:\n" + " def write(self, x):\n" + " try:\n" + " of.write(x)\n" + " except: pass\n" + " self.buf += x\n" + " def flush(self):\n" + " pass\n" + "fl = FileLike()\n" + "fl.buf = ''\n" + "of = sys.stderr\n" + "sys.stderr = fl\n" + "def done():\n" + " sys.stderr = of\n" + " return fl.buf\n", /* make sure the returned value stays alive */ + Py_file_input, + result, result); + Py_XDECREF(x); + + error: + if (PyErr_Occurred()) + { + PyErr_WriteUnraisable(Py_None); + PyErr_Clear(); + } + return result; +} + +#pragma comment(lib, "user32.lib") + +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) +{ + Sleep(666); /* may be interrupted if the whole process is closing */ +#if PY_MAJOR_VERSION >= 3 + MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, + L"Python-CFFI error", + MB_OK | MB_ICONERROR); +#else + MessageBoxA(NULL, (char *)_cffi_bootstrap_text, + "Python-CFFI error", + MB_OK | MB_ICONERROR); +#endif + _cffi_bootstrap_text = NULL; + return 0; +} + +static void _cffi_stop_error_capture(PyObject *ecap) +{ + PyObject *s; + void *text; + + if (ecap == (PyObject *)1) + return; + + if (ecap == NULL) + goto error; + + s = PyRun_String("done()", Py_eval_input, ecap, ecap); + if (s == NULL) + goto error; + + /* Show a dialog box, but in a background thread, and + never show multiple dialog boxes at once. */ +#if PY_MAJOR_VERSION >= 3 + text = PyUnicode_AsWideCharString(s, NULL); +#else + text = PyString_AsString(s); +#endif + + _cffi_bootstrap_text = text; + + if (text != NULL) + { + HANDLE h; + h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, + NULL, 0, NULL); + if (h != NULL) + CloseHandle(h); + } + /* decref the string, but it should stay alive as 'fl.buf' + in the small module above. It will really be freed only if + we later get another similar error. So it's a leak of at + most one copy of the small module. That's fine for this + situation which is usually a "fatal error" anyway. */ + Py_DECREF(s); + PyErr_Clear(); + return; + + error: + _cffi_bootstrap_text = NULL; + PyErr_Clear(); +} + +#else + +static PyObject *_cffi_start_error_capture(void) { return NULL; } +static void _cffi_stop_error_capture(PyObject *ecap) { } + +#endif diff --git a/myenv/lib/python3.12/site-packages/cffi/_cffi_include.h b/myenv/lib/python3.12/site-packages/cffi/_cffi_include.h new file mode 100644 index 0000000..908a1d7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/_cffi_include.h @@ -0,0 +1,389 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. + + The implementation is messy (issue #350): on Windows, with _MSC_VER, + we have to define Py_LIMITED_API even before including pyconfig.h. + In that case, we guess what pyconfig.h will do to the macros above, + and check our guess after the #include. + + Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv + version >= 16.0.0. With older versions of either, you don't get a + copy of PYTHON3.DLL in the virtualenv. We can't check the version of + CPython *before* we even include pyconfig.h. ffi.set_source() puts + a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is + running on Windows < 3.5, as an attempt at fixing it, but that's + arguably wrong because it may not be the target version of Python. + Still better than nothing I guess. As another workaround, you can + remove the definition of Py_LIMITED_API here. + + See also 'py_limited_api' in cffi/setuptools_ext.py. +*/ +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) +# ifdef _MSC_VER +# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# include + /* sanity-check: Py_LIMITED_API will cause crashes if any of these + are also defined. Normally, the Python file PC/pyconfig.h does not + cause any of these to be defined, with the exception that _DEBUG + causes Py_DEBUG. Double-check that. */ +# ifdef Py_LIMITED_API +# if defined(Py_DEBUG) +# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" +# endif +# if defined(Py_TRACE_REFS) +# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" +# endif +# if defined(Py_REF_DEBUG) +# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" +# endif +# endif +# else +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _cffi_to_c_wchar3216_t \ + ((int(*)(PyObject *))_cffi_exports[26]) +#define _cffi_from_c_wchar3216_t \ + ((PyObject *(*)(int))_cffi_exports[27]) +#define _CFFI_NUM_EXPORTS 28 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + + +#ifdef HAVE_WCHAR_H +typedef wchar_t _cffi_wchar_t; +#else +typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ +#endif + +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 2) + return (uint16_t)_cffi_to_c_wchar_t(o); + else + return (uint16_t)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) +{ + if (sizeof(_cffi_wchar_t) == 2) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 4) + return (int)_cffi_to_c_wchar_t(o); + else + return (int)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) +{ + if (sizeof(_cffi_wchar_t) == 4) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +_CFFI_UNUSED_FN static int +_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +_CFFI_UNUSED_FN static void +_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/myenv/lib/python3.12/site-packages/cffi/_embedding.h b/myenv/lib/python3.12/site-packages/cffi/_embedding.h new file mode 100644 index 0000000..94d8b30 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/_embedding.h @@ -0,0 +1,550 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. If _cffi_start_python() fails, then this is set + to NULL; otherwise, it should never be NULL. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.17.1" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +#if PY_VERSION_HEX < 0x03080000 +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ +#endif + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + + In Python >= 3.12, this stopped working because that particular + tp_version_tag gets modified during interpreter startup. It's + arguably a bad idea before 3.12 too, but again we can't change + that because someone might use a mixture of cffi embedded + modules, and no-one reported a bug so far. In Python >= 3.12 + we go instead for PyCapsuleType.tp_as_buffer, which is supposed + to always be NULL. We write to it temporarily a pointer to + a struct full of NULLs, which is semantically the same. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else +# if PY_VERSION_HEX < 0x030C0000 + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value = -42; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); +# else + static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs; + empty_buffer_procs.mark = -42; + PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *) + &PyCapsule_Type.tp_as_buffer; + PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf; +# endif + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { +# if PY_VERSION_HEX < 0x030C0000 + assert(old_value == locked_value); +# else + /* The pointer should point to a possibly different + empty_buffer_procs from another C extension module */ + assert(((struct ebp_s *)old_value)->mark == -42); +# endif + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + if (!Py_IsInitialized()) { + _cffi_py_initialize(); +#if PY_VERSION_HEX < 0x03070000 + PyEval_InitThreads(); +#endif + PyEval_SaveThread(); /* release the GIL */ + /* the returned tstate must be the one that has been stored into the + autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ + } + else { +#if PY_VERSION_HEX < 0x03070000 + /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ + PyGILState_STATE state = PyGILState_Ensure(); + PyEval_InitThreads(); + PyGILState_Release(state); +#endif + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void *func; /* function pointer */ + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/myenv/lib/python3.12/site-packages/cffi/_imp_emulation.py b/myenv/lib/python3.12/site-packages/cffi/_imp_emulation.py new file mode 100644 index 0000000..136abdd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/_imp_emulation.py @@ -0,0 +1,83 @@ + +try: + # this works on Python < 3.12 + from imp import * + +except ImportError: + # this is a limited emulation for Python >= 3.12. + # Note that this is used only for tests or for the old ffi.verify(). + # This is copied from the source code of Python 3.11. + + from _imp import (acquire_lock, release_lock, + is_builtin, is_frozen) + + from importlib._bootstrap import _load + + from importlib import machinery + import os + import sys + import tokenize + + SEARCH_ERROR = 0 + PY_SOURCE = 1 + PY_COMPILED = 2 + C_EXTENSION = 3 + PY_RESOURCE = 4 + PKG_DIRECTORY = 5 + C_BUILTIN = 6 + PY_FROZEN = 7 + PY_CODERESOURCE = 8 + IMP_HOOK = 9 + + def get_suffixes(): + extensions = [(s, 'rb', C_EXTENSION) + for s in machinery.EXTENSION_SUFFIXES] + source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] + bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] + return extensions + source + bytecode + + def find_module(name, path=None): + if not isinstance(name, str): + raise TypeError("'name' must be a str, not {}".format(type(name))) + elif not isinstance(path, (type(None), list)): + # Backwards-compatibility + raise RuntimeError("'path' must be None or a list, " + "not {}".format(type(path))) + + if path is None: + if is_builtin(name): + return None, None, ('', '', C_BUILTIN) + elif is_frozen(name): + return None, None, ('', '', PY_FROZEN) + else: + path = sys.path + + for entry in path: + package_directory = os.path.join(entry, name) + for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: + package_file_name = '__init__' + suffix + file_path = os.path.join(package_directory, package_file_name) + if os.path.isfile(file_path): + return None, package_directory, ('', '', PKG_DIRECTORY) + for suffix, mode, type_ in get_suffixes(): + file_name = name + suffix + file_path = os.path.join(entry, file_name) + if os.path.isfile(file_path): + break + else: + continue + break # Break out of outer loop when breaking out of inner loop. + else: + raise ImportError(name, name=name) + + encoding = None + if 'b' not in mode: + with open(file_path, 'rb') as file: + encoding = tokenize.detect_encoding(file.readline)[0] + file = open(file_path, mode, encoding=encoding) + return file, file_path, (suffix, mode, type_) + + def load_dynamic(name, path, file=None): + loader = machinery.ExtensionFileLoader(name, path) + spec = machinery.ModuleSpec(name=name, loader=loader, origin=path) + return _load(spec) diff --git a/myenv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py b/myenv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py new file mode 100644 index 0000000..c3d2312 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py @@ -0,0 +1,45 @@ +""" +Temporary shim module to indirect the bits of distutils we need from setuptools/distutils while providing useful +error messages beyond `No module named 'distutils' on Python >= 3.12, or when setuptools' vendored distutils is broken. + +This is a compromise to avoid a hard-dep on setuptools for Python >= 3.12, since many users don't need runtime compilation support from CFFI. +""" +import sys + +try: + # import setuptools first; this is the most robust way to ensure its embedded distutils is available + # (the .pth shim should usually work, but this is even more robust) + import setuptools +except Exception as ex: + if sys.version_info >= (3, 12): + # Python 3.12 has no built-in distutils to fall back on, so any import problem is fatal + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. The setuptools module is missing or non-functional.") from ex + + # silently ignore on older Pythons (support fallback to stdlib distutils where available) +else: + del setuptools + +try: + # bring in just the bits of distutils we need, whether they really came from setuptools or stdlib-embedded distutils + from distutils import log, sysconfig + from distutils.ccompiler import CCompiler + from distutils.command.build_ext import build_ext + from distutils.core import Distribution, Extension + from distutils.dir_util import mkpath + from distutils.errors import DistutilsSetupError, CompileError, LinkError + from distutils.log import set_threshold, set_verbosity + + if sys.platform == 'win32': + try: + # FUTURE: msvc9compiler module was removed in setuptools 74; consider removing, as it's only used by an ancient patch in `recompiler` + from distutils.msvc9compiler import MSVCCompiler + except ImportError: + MSVCCompiler = None +except Exception as ex: + if sys.version_info >= (3, 12): + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. Please install the setuptools package.") from ex + + # anything older, just let the underlying distutils import error fly + raise Exception("This CFFI feature requires distutils. Please install the distutils or setuptools package.") from ex + +del sys diff --git a/myenv/lib/python3.12/site-packages/cffi/api.py b/myenv/lib/python3.12/site-packages/cffi/api.py new file mode 100644 index 0000000..5a474f3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/api.py @@ -0,0 +1,967 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +_unspecified = object() + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + self._typecache = model.get_typecache(backend) + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in list(backend.__dict__): + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False, pack=None): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + Alternatively, 'pack' can be a small integer, and requests for + alignment greater than that are ignored (pack=1 is equivalent to + packed=True). + """ + self._cdef(csource, override=override, packed=packed, pack=pack) + + def embedding_api(self, csource, packed=False, pack=None): + self._cdef(csource, packed=packed, pack=pack, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + if not (isinstance(name, basestring) or + name is None or + isinstance(name, self.CData)): + raise TypeError("dlopen(name): name must be a file name, None, " + "or an already-opened 'void *' handle") + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def dlclose(self, lib): + """Close a library obtained with ffi.dlopen(). After this call, + access to functions or variables from the library will fail + (possibly with a segmentation fault). + """ + type(lib).__cffi_close__(lib) + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, cdecl, python_buffer=_unspecified, + require_writable=False): + """Return a cdata of the given type pointing to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + + The first argument is optional and default to 'char[]'. + """ + if python_buffer is _unspecified: + cdecl, python_buffer = self.BCharA, cdecl + elif isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.from_buffer(cdecl, python_buffer, + require_writable) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor, size=0): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + + The optional 'size' gives an estimate of the size, used to + trigger the garbage collection more eagerly. So far only used + on PyPy. It tells the GC that the returned object keeps alive + roughly 'size' bytes of external memory. + """ + return self._backend.gcp(cdata, destructor, size) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def release(self, x): + self._backend.release(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from cffi._shimmed_dist_utils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, + source_extension='.c', **kwds): + from . import pkgconfig + if not isinstance(pkgconfig_libs, list): + raise TypeError("the pkgconfig_libs argument must be a list " + "of package names") + kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) + pkgconfig.merge_flags(kwds, kwds2) + self.set_source(module_name, source, source_extension, **kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from cffi._shimmed_dist_utils import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, + uses_ffiplatform=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, + uses_ffiplatform=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if not isinstance(name, basestring): + if sys.platform != "win32" or name is not None: + return backend.load_library(name, flags) + name = "c" # Windows: load_library(None) fails, but this works + # on Python 2 (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): + raise OSError("dlopen(None) cannot work on Windows for Python 3 " + "(see http://bugs.python.org/issue23606)") + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + def __cffi_close__(self): + backendlib.close_lib() + self.__dict__.clear() + # + if isinstance(libname, basestring): + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/myenv/lib/python3.12/site-packages/cffi/backend_ctypes.py b/myenv/lib/python3.12/site-packages/cffi/backend_ctypes.py new file mode 100644 index 0000000..e7956a7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1121 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return int(self._value) + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + if isinstance(init, CTypesGenericArray): + if (len(init) != len(blob) or + not isinstance(init, CTypesArray)): + raise TypeError("length/type mismatch: %s" % (init,)) + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0, + pack=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + elif pack: + struct_or_union._pack_ = pack + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor, size=0): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/myenv/lib/python3.12/site-packages/cffi/cffi_opcode.py b/myenv/lib/python3.12/site-packages/cffi/cffi_opcode.py new file mode 100644 index 0000000..6421df6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,187 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 +PRIM_FLOATCOMPLEX = 48 +PRIM_DOUBLECOMPLEX = 49 +PRIM_CHAR16 = 50 +PRIM_CHAR32 = 51 + +_NUM_PRIM = 52 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + '_cffi_float_complex_t': PRIM_FLOATCOMPLEX, + '_cffi_double_complex_t': PRIM_DOUBLECOMPLEX, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'char16_t': PRIM_CHAR16, + 'char32_t': PRIM_CHAR32, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/myenv/lib/python3.12/site-packages/cffi/commontypes.py b/myenv/lib/python3.12/site-packages/cffi/commontypes.py new file mode 100644 index 0000000..d4dae35 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/commontypes.py @@ -0,0 +1,82 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above +COMMON_TYPES['float _Complex'] = '_cffi_float_complex_t' +COMMON_TYPES['double _Complex'] = '_cffi_double_complex_t' + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/myenv/lib/python3.12/site-packages/cffi/cparser.py b/myenv/lib/python3.12/site-packages/cffi/cparser.py new file mode 100644 index 0000000..eee83ca --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/cparser.py @@ -0,0 +1,1015 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +def _workaround_for_static_import_finders(): + # Issue #392: packaging tools like cx_Freeze can not find these + # because pycparser uses exec dynamic import. This is an obscure + # workaround. This function is never called. + import pycparser.yacctab + import pycparser.lextab + +CDEF_SOURCE_STRING = "" +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _warn_for_string_literal(csource): + if '"' not in csource: + return + for line in csource.splitlines(): + if '"' in line and not line.lstrip().startswith('#'): + import warnings + warnings.warn("String literal found in cdef() or type source. " + "String literals are ignored here, but you should " + "remove them anyway because some character sequences " + "confuse pre-parsing.") + break + +def _warn_for_non_extern_non_static_global_variable(decl): + if not decl.storage: + import warnings + warnings.warn("Global variable '%s' in cdef(): for consistency " + "with C it should have a storage class specifier " + "(usually 'extern')" % (decl.name,)) + +def _remove_line_directives(csource): + # _r_line_directive matches whole lines, without the final \n, if they + # start with '#line' with some spacing allowed, or '#NUMBER'. This + # function stores them away and replaces them with exactly the string + # '#line@N', where N is the index in the list 'line_directives'. + line_directives = [] + def replace(m): + i = len(line_directives) + line_directives.append(m.group()) + return '#line@%d' % i + csource = _r_line_directive.sub(replace, csource) + return csource, line_directives + +def _put_back_line_directives(csource, line_directives): + def replace(m): + s = m.group() + if not s.startswith('#line@'): + raise AssertionError("unexpected #line directive " + "(should have been processed and removed") + return line_directives[int(s[6:])] + return _r_line_directive.sub(replace, csource) + +def _preprocess(csource): + # First, remove the lines of the form '#line N "filename"' because + # the "filename" part could confuse the rest + csource, line_directives = _remove_line_directives(csource) + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literals (except in line directives)! + def replace_keeping_newlines(m): + return ' ' + m.group().count('\n') * '\n' + csource = _r_comment.sub(replace_keeping_newlines, csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Now there should not be any string literal left; warn if we get one + _warn_for_string_literal(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + csource = csource.replace('...', ' __dotdotdot__ ') + # Finally, put back the line directives + csource = _put_back_line_directives(csource, line_directives) + return csource, macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = [] + csourcelines.append('# 1 ""') + for typename in typenames: + csourcelines.append('typedef int %s;' % typename) + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + # this forces pycparser to consider the following in the file + # called from line 1 + csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) + csourcelines.append(csource) + csourcelines.append('') # see test_missing_newline_bug + fullcsource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(fullcsource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) + # and interpret that as a line number. This will not work if + # the user gives explicit ``# NUM "FILE"`` directives. + line = None + msg = str(e) + match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) + if match: + linenum = int(match.group(1), 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, pack=None, + dllexport=False): + if packed: + if packed != True: + raise ValueError("'packed' should be False or True; use " + "'pack' to give another value") + if pack: + raise ValueError("cannot give both 'pack' and 'packed'") + pack = 1 + elif pack: + if pack & (pack - 1): + raise ValueError("'pack' must be a power of two, not %r" % + (pack,)) + else: + pack = 0 + prev_options = self._options + try: + self._options = {'override': override, + 'packed': pack, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + current_decl = None + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + current_decl = decl + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True, + typedef_example="*(%s *)0" % (decl.name,)) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + # skip pragma, only in pycparser 2.15 + import warnings + warnings.warn( + "#pragma in cdef() are entirely ignored. " + "They should be removed for now, otherwise your " + "code might behave differently in a future version " + "of CFFI if #pragma support gets added. Note that " + "'#pragma pack' needs to be replaced with the " + "'packed' keyword argument to cdef().") + else: + raise CDefError("unexpected <%s>: this construct is valid " + "C but not valid in cdef()" % + decl.__class__.__name__, decl) + except CDefError as e: + if len(e.args) == 1: + e.args = e.args + (current_decl,) + raise + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + _warn_for_non_extern_non_static_global_variable(decl) + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, + typedef_example=None): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + # a hack: in 'typedef int foo_t[...][...];', don't use '...' as + # the length but use directly the C expression that would be + # generated by recompiler.py. This lets the typedef be used in + # many more places within recompiler.py + if typedef_example is not None: + if length == '...': + length = '_cffi_array_len(%s)' % (typedef_example,) + typedef_example = "*" + typedef_example + # + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok, + typedef_example=typedef_example) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if '0' <= s[0] <= '9': + s = s.rstrip('uUlL') + try: + if s.startswith('0'): + return int(s, 8) + else: + return int(s, 10) + except ValueError: + if len(s) > 1: + if s.lower()[0:2] == '0x': + return int(s, 16) + elif s.lower()[0:2] == '0b': + return int(s, 2) + raise CDefError("invalid constant %r" % (s,)) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if isinstance(exprnode, pycparser.c_ast.BinaryOp): + left = self._parse_constant(exprnode.left) + right = self._parse_constant(exprnode.right) + if exprnode.op == '+': + return left + right + elif exprnode.op == '-': + return left - right + elif exprnode.op == '*': + return left * right + elif exprnode.op == '/': + return self._c_div(left, right) + elif exprnode.op == '%': + return left - self._c_div(left, right) * right + elif exprnode.op == '<<': + return left << right + elif exprnode.op == '>>': + return left >> right + elif exprnode.op == '&': + return left & right + elif exprnode.op == '|': + return left | right + elif exprnode.op == '^': + return left ^ right + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _c_div(self, a, b): + result = a // b + if ((a < 0) ^ (b < 0)) and (a % b) != 0: + result += 1 + return result + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/myenv/lib/python3.12/site-packages/cffi/error.py b/myenv/lib/python3.12/site-packages/cffi/error.py new file mode 100644 index 0000000..0a27247 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/error.py @@ -0,0 +1,31 @@ + +class FFIError(Exception): + __module__ = 'cffi' + +class CDefError(Exception): + __module__ = 'cffi' + def __str__(self): + try: + current_decl = self.args[1] + filename = current_decl.coord.file + linenum = current_decl.coord.line + prefix = '%s:%d: ' % (filename, linenum) + except (AttributeError, TypeError, IndexError): + prefix = '' + return '%s%s' % (prefix, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + __module__ = 'cffi' + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + __module__ = 'cffi' + +class PkgConfigError(Exception): + """ An error raised for missing modules in pkg-config + """ + __module__ = 'cffi' diff --git a/myenv/lib/python3.12/site-packages/cffi/ffiplatform.py b/myenv/lib/python3.12/site-packages/cffi/ffiplatform.py new file mode 100644 index 0000000..adca28f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/ffiplatform.py @@ -0,0 +1,113 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + from cffi._shimmed_dist_utils import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from cffi._shimmed_dist_utils import Distribution, CompileError, LinkError, set_threshold, set_verbosity + + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = set_threshold(0) or 0 + try: + set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + set_threshold(old_level) + except (CompileError, LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() diff --git a/myenv/lib/python3.12/site-packages/cffi/lock.py b/myenv/lib/python3.12/site-packages/cffi/lock.py new file mode 100644 index 0000000..db91b71 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/myenv/lib/python3.12/site-packages/cffi/model.py b/myenv/lib/python3.12/site-packages/cffi/model.py new file mode 100644 index 0000000..e5f4cae --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/model.py @@ -0,0 +1,618 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + def is_complex_type(self): + return False + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + '_cffi_float_complex_t': 'j', + '_cffi_double_complex_t': 'j', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'char16_t': 'c', + 'char32_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + def is_complex_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = " *&" + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + extra = qualify(quals, extra) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def length_is_unknown(self): + return isinstance(self.length, str) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length_is_unknown(): + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = 0 + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def anonymous_struct_fields(self): + if self.fldtypes is not None: + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + yield type + + def enumfields(self, expand_anonymous_struct_union=True): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if (name == '' and isinstance(type, StructOrUnion) + and expand_anonymous_struct_union): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + extra_flags = () + if self.packed: + if self.packed == 1: + extra_flags = (8,) # SF_PACKED + else: + extra_flags = (0, self.packed) + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, *extra_flags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() +_typecache_cffi_backend = weakref.WeakValueDictionary() + +def get_typecache(backend): + # returns _typecache_cffi_backend if backend is the _cffi_backend + # module, or type(backend).__typecache if backend is an instance of + # CTypesBackend (or some FakeBackend class during tests) + if isinstance(backend, types.ModuleType): + return _typecache_cffi_backend + with global_lock: + if not hasattr(type(backend), '__typecache'): + type(backend).__typecache = weakref.WeakValueDictionary() + return type(backend).__typecache + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._typecache[key] + except KeyError: + pass + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/myenv/lib/python3.12/site-packages/cffi/parse_c_type.h b/myenv/lib/python3.12/site-packages/cffi/parse_c_type.h new file mode 100644 index 0000000..84e4ef8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/parse_c_type.h @@ -0,0 +1,181 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 +#define _CFFI_PRIM_FLOATCOMPLEX 48 +#define _CFFI_PRIM_DOUBLECOMPLEX 49 +#define _CFFI_PRIM_CHAR16 50 +#define _CFFI_PRIM_CHAR32 51 + +#define _CFFI__NUM_PRIM 52 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/myenv/lib/python3.12/site-packages/cffi/pkgconfig.py b/myenv/lib/python3.12/site-packages/cffi/pkgconfig.py new file mode 100644 index 0000000..5c93f15 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/pkgconfig.py @@ -0,0 +1,121 @@ +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi +import sys, os, subprocess + +from .error import PkgConfigError + + +def merge_flags(cfg1, cfg2): + """Merge values from cffi config flags cfg2 to cf1 + + Example: + merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) + {"libraries": ["one", "two"]} + """ + for key, value in cfg2.items(): + if key not in cfg1: + cfg1[key] = value + else: + if not isinstance(cfg1[key], list): + raise TypeError("cfg1[%r] should be a list of strings" % (key,)) + if not isinstance(value, list): + raise TypeError("cfg2[%r] should be a list of strings" % (key,)) + cfg1[key].extend(value) + return cfg1 + + +def call(libname, flag, encoding=sys.getfilesystemencoding()): + """Calls pkg-config and returns the output if found + """ + a = ["pkg-config", "--print-errors"] + a.append(flag) + a.append(libname) + try: + pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError as e: + raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) + + bout, berr = pc.communicate() + if pc.returncode != 0: + try: + berr = berr.decode(encoding) + except Exception: + pass + raise PkgConfigError(berr.strip()) + + if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x + try: + bout = bout.decode(encoding) + except UnicodeDecodeError: + raise PkgConfigError("pkg-config %s %s returned bytes that cannot " + "be decoded with encoding %r:\n%r" % + (flag, libname, encoding, bout)) + + if os.altsep != '\\' and '\\' in bout: + raise PkgConfigError("pkg-config %s %s returned an unsupported " + "backslash-escaped output:\n%r" % + (flag, libname, bout)) + return bout + + +def flags_from_pkgconfig(libs): + r"""Return compiler line flags for FFI.set_source based on pkg-config output + + Usage + ... + ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) + + If pkg-config is installed on build machine, then arguments include_dirs, + library_dirs, libraries, define_macros, extra_compile_args and + extra_link_args are extended with an output of pkg-config for libfoo and + libbar. + + Raises PkgConfigError in case the pkg-config call fails. + """ + + def get_include_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-I")] + + def get_library_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-L")] + + def get_libraries(string): + return [x[2:] for x in string.split() if x.startswith("-l")] + + # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils + def get_macros(string): + def _macro(x): + x = x[2:] # drop "-D" + if '=' in x: + return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") + else: + return (x, None) # "-Dfoo" => ("foo", None) + return [_macro(x) for x in string.split() if x.startswith("-D")] + + def get_other_cflags(string): + return [x for x in string.split() if not x.startswith("-I") and + not x.startswith("-D")] + + def get_other_libs(string): + return [x for x in string.split() if not x.startswith("-L") and + not x.startswith("-l")] + + # return kwargs for given libname + def kwargs(libname): + fse = sys.getfilesystemencoding() + all_cflags = call(libname, "--cflags") + all_libs = call(libname, "--libs") + return { + "include_dirs": get_include_dirs(all_cflags), + "library_dirs": get_library_dirs(all_libs), + "libraries": get_libraries(all_libs), + "define_macros": get_macros(all_cflags), + "extra_compile_args": get_other_cflags(all_cflags), + "extra_link_args": get_other_libs(all_libs), + } + + # merge all arguments together + ret = {} + for libname in libs: + lib_flags = kwargs(libname) + merge_flags(ret, lib_flags) + return ret diff --git a/myenv/lib/python3.12/site-packages/cffi/recompiler.py b/myenv/lib/python3.12/site-packages/cffi/recompiler.py new file mode 100644 index 0000000..57781a3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/recompiler.py @@ -0,0 +1,1598 @@ +import os, sys, io +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + +USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or + sys.version_info >= (3, 5)) + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _enum_fields(self, tp): + # When producing C, expand all anonymous struct/union fields. + # That's necessary to have C code checking the offsets of the + # individual fields contained in them. When producing Python, + # don't do it and instead write it like it is, with the + # corresponding fields having an empty name. Empty names are + # recognized at runtime when we import the generated Python + # file. + expand_anonymous_struct_union = not self.target_is_python + return tp.enumfields(expand_anonymous_struct_union) + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in self._enum_fields(tp): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + if not USE_LIMITED_API: + prnt('#define _CFFI_NO_LIMITED_API') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy > 0 or self.ffi._embedding is not None: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if flags & 1: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + '(%s)alloca((size_t)datasize) : NULL;' % ( + tovar, tp.get_c_name(''))) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + prnt(' PyObject *pyresult;') + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in self._enum_fields(tp): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(self._enum_fields(tp)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)4096)->%s) - (char *)4096' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s, 0, 0 };' % ( + self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + if not isinstance(s, bytes): # unicode + s = s.encode('utf-8') # -> bytes + else: + s.decode('utf-8') # got bytes, check for valid utf-8 + try: + s.decode('ascii') + except UnicodeDecodeError: + s = b'# -*- encoding: utf8 -*-\n' + s + for line in s.splitlines(True): + comment = line + if type('//') is bytes: # python2 + line = map(ord, line) # make a list of integers + else: # python3 + # type(line) is bytes, which enumerates like a list of integers + comment = ascii(comment)[1:-1] + prnt(('// ' + comment).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (c,) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _is_file_like(maybefile): + # compare to xml.etree.ElementTree._get_writer + return hasattr(maybefile, 'write') + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + if _is_file_like(target_file): + recompiler.write_source_to_f(target_file, preamble) + return True + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + # FUTURE: this module was removed in setuptools 74; this is likely dead code and should be removed, + # since the toolchain it supports (VS2005-2008) is also long dead. + from cffi._shimmed_dist_utils import MSVCCompiler + if MSVCCompiler is not None: + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from cffi._shimmed_dist_utils import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from cffi._shimmed_dist_utils import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, + uses_ffiplatform=True, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + if call_c_compiler and _is_file_like(c_file): + raise TypeError("Writing to file-like objects is not supported " + "with call_c_compiler=True") + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + if uses_ffiplatform: + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + else: + ext = None + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/myenv/lib/python3.12/site-packages/cffi/setuptools_ext.py b/myenv/lib/python3.12/site-packages/cffi/setuptools_ext.py new file mode 100644 index 0000000..681b49d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,216 @@ +import os +import sys + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from cffi._shimmed_dist_utils import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + + On Windows, with CPython <= 3.4, it's better not to use py_limited_api + because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. + Recently (2020) we started shipping only >= 3.5 wheels, though. So + we'll give it another try and set py_limited_api on Windows >= 3.5. + """ + from cffi import recompiler + + if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') + and recompiler.USE_LIMITED_API): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import Extension, log, mkpath + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import log, mkpath + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + def get_source_files(self): + # This is called from 'setup.py sdist' only. Exclude + # the generate .py module in this case. + saved_py_modules = self.py_modules + try: + if saved_py_modules: + self.py_modules = [m for m in saved_py_modules + if m != module_name] + return base_class.get_source_files(self) + finally: + self.py_modules = saved_py_modules + dist.cmdclass['build_py'] = build_py_make_mod + + # distutils and setuptools have no notion I could find of a + # generated python module. If we don't add module_name to + # dist.py_modules, then things mostly work but there are some + # combination of options (--root and --record) that will miss + # the module. So we add it here, which gives a few apparently + # harmless warnings about not finding the file outside the + # build directory. + # Then we need to hack more in get_source_files(); see above. + if dist.py_modules is None: + dist.py_modules = [] + dist.py_modules.append(module_name) + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/myenv/lib/python3.12/site-packages/cffi/vengine_cpy.py b/myenv/lib/python3.12/site-packages/cffi/vengine_cpy.py new file mode 100644 index 0000000..eb0b6f7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,1084 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys +from . import model +from .error import VerificationError +from . import _imp_emulation as imp + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif tp.is_complex_type(): + raise VerificationError( + "not implemented in verify(): complex types") + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + 'alloca((size_t)datasize) : NULL;' % (tovar,)) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + prnt(' PyObject *pyresult;') + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = tp.length_is_unknown()) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/myenv/lib/python3.12/site-packages/cffi/vengine_gen.py b/myenv/lib/python3.12/site-packages/cffi/vengine_gen.py new file mode 100644 index 0000000..bffc821 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/vengine_gen.py @@ -0,0 +1,679 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length_is_unknown(): + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif +''' diff --git a/myenv/lib/python3.12/site-packages/cffi/verifier.py b/myenv/lib/python3.12/site-packages/cffi/verifier.py new file mode 100644 index 0000000..e392a2b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cffi/verifier.py @@ -0,0 +1,306 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join(['%d.%d' % sys.version_info[:2], + __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + dirname = os.path.dirname(filename) + if dirname and not os.path.isdir(dirname): + os.makedirs(dirname) diff --git a/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/METADATA b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/METADATA new file mode 100644 index 0000000..1173bde --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/METADATA @@ -0,0 +1,138 @@ +Metadata-Version: 2.3 +Name: cryptography +Version: 43.0.1 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security :: Cryptography +Requires-Dist: cffi >=1.12 ; platform_python_implementation != 'PyPy' +Requires-Dist: bcrypt >=3.1.5 ; extra == 'ssh' +Requires-Dist: nox ; extra == 'nox' +Requires-Dist: cryptography-vectors ==43.0.1 ; extra == 'test' +Requires-Dist: pytest >=6.2.0 ; extra == 'test' +Requires-Dist: pytest-benchmark ; extra == 'test' +Requires-Dist: pytest-cov ; extra == 'test' +Requires-Dist: pytest-xdist ; extra == 'test' +Requires-Dist: pretend ; extra == 'test' +Requires-Dist: certifi ; extra == 'test' +Requires-Dist: pytest-randomly ; extra == 'test-randomorder' +Requires-Dist: sphinx >=5.3.0 ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme >=1.1.1 ; extra == 'docs' +Requires-Dist: pyenchant >=1.6.11 ; extra == 'docstest' +Requires-Dist: readme-renderer ; extra == 'docstest' +Requires-Dist: sphinxcontrib-spelling >=4.0.1 ; extra == 'docstest' +Requires-Dist: build ; extra == 'sdist' +Requires-Dist: ruff ; extra == 'pep8test' +Requires-Dist: mypy ; extra == 'pep8test' +Requires-Dist: check-sdist ; extra == 'pep8test' +Requires-Dist: click ; extra == 'pep8test' +Provides-Extra: ssh +Provides-Extra: nox +Provides-Extra: test +Provides-Extra: test-randomorder +Provides-Extra: docs +Provides-Extra: docstest +Provides-Extra: sdist +Provides-Extra: pep8test +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. +Author: The cryptography developers +Author-email: The Python Cryptographic Authority and individual contributors +License: Apache-2.0 OR BSD-3-Clause +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst; charset=UTF-8 +Project-URL: homepage, https://github.com/pyca/cryptography +Project-URL: documentation, https://cryptography.io/ +Project-URL: source, https://github.com/pyca/cryptography/ +Project-URL: issues, https://github.com/pyca/cryptography/issues +Project-URL: changelog, https://cryptography.io/en/latest/changelog/ + +pyca/cryptography +================= + +.. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.org/project/cryptography/ + :alt: Latest Version + +.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + +.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main + :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain + + +``cryptography`` is a package which provides cryptographic recipes and +primitives to Python developers. Our goal is for it to be your "cryptographic +standard library". It supports Python 3.7+ and PyPy3 7.3.11+. + +``cryptography`` includes both high level recipes and low level interfaces to +common cryptographic algorithms such as symmetric ciphers, message digests, and +key derivation functions. For example, to encrypt something with +``cryptography``'s high level symmetric encryption recipe: + +.. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + b'...' + >>> f.decrypt(token) + b'A really secret message. Not for prying eyes.' + +You can find more information in the `documentation`_. + +You can install ``cryptography`` with: + +.. code-block:: console + + $ pip install cryptography + +For full details see `the installation documentation`_. + +Discussion +~~~~~~~~~~ + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a `cryptography-dev`_ mailing list for development discussion. + +You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get +involved. + +Security +~~~~~~~~ + +Need to report a security issue? Please consult our `security reporting`_ +documentation. + + +.. _`documentation`: https://cryptography.io/ +.. _`the installation documentation`: https://cryptography.io/en/latest/installation/ +.. _`issue tracker`: https://github.com/pyca/cryptography/issues +.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev +.. _`security reporting`: https://cryptography.io/en/latest/security/ + diff --git a/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/RECORD b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/RECORD new file mode 100644 index 0000000..d44495b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/RECORD @@ -0,0 +1,173 @@ +cryptography-43.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cryptography-43.0.1.dist-info/METADATA,sha256=AGSptf0qwYYF5RLvcScxitnPJZ6URUiMFp8jelkGAuE,5440 +cryptography-43.0.1.dist-info/RECORD,, +cryptography-43.0.1.dist-info/WHEEL,sha256=5SNCVD9cb88a-xAIrDHIo1CvpgNriOYcNgb4b8rPcOw,107 +cryptography-43.0.1.dist-info/license_files/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197 +cryptography-43.0.1.dist-info/license_files/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360 +cryptography-43.0.1.dist-info/license_files/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532 +cryptography/__about__.py,sha256=pY_pmYXjJTK-LjfCu7ot0NMj0QC2dkD1dCPyV8QjISM,445 +cryptography/__init__.py,sha256=mthuUrTd4FROCpUYrTIqhjz6s6T9djAZrV7nZ1oMm2o,364 +cryptography/__pycache__/__about__.cpython-312.pyc,, +cryptography/__pycache__/__init__.cpython-312.pyc,, +cryptography/__pycache__/exceptions.cpython-312.pyc,, +cryptography/__pycache__/fernet.cpython-312.pyc,, +cryptography/__pycache__/utils.cpython-312.pyc,, +cryptography/exceptions.py,sha256=835EWILc2fwxw-gyFMriciC2SqhViETB10LBSytnDIc,1087 +cryptography/fernet.py,sha256=aPj82w-Z_1GBXUtWRUsZdVbMwRo5Mbjj0wkA9wG4rkw,6696 +cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455 +cryptography/hazmat/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/__pycache__/_oid.cpython-312.pyc,, +cryptography/hazmat/_oid.py,sha256=e9yLmxtdQtuL94ztQv3SGtt_ea1Mx6aUwGftJsP6EXk,15201 +cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361 +cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305 +cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc,, +cryptography/hazmat/backends/openssl/backend.py,sha256=pUXUbugLwMm2Gls-h5U5fw2RvepaNjEvnao6CTmL1xQ,9648 +cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/bindings/_rust.abi3.so,sha256=vpsvBh7aOALJ8gTHDGvx57yr72aQRfzxr6cZBU9518E,10837832 +cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=wb1OT76lG19vjq97_q2MM3qdJlQhyloXfVbKFDmRse4,737 +cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230 +cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=BrGjC8J6nwuS-r3EVcdXJB8ndotfY9mbQYOfpbPG0HA,354 +cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640 +cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=R-xJ-XmJZ1lOk-fWHHvRnP3QNTCFnKv-l3xlNWfLVt4,868 +cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=Lvn250QMdPyeF-hoBF6rkQgHLBJxVauXCb8i8uYTomQ,1368 +cryptography/hazmat/bindings/_rust/openssl/aead.pyi,sha256=i0gA3jUQ4rkJXTGGZrq-AuY-VQLN31lyDeWuDZ0zJYw,2553 +cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi,sha256=iK0ZhQ-WyCQbjaraaFgK6q4PpD-7Rf5RDHkFD3YEW_g,1301 +cryptography/hazmat/bindings/_rust/openssl/cmac.pyi,sha256=nPH0X57RYpsAkRowVpjQiHE566ThUTx7YXrsadmrmHk,564 +cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=Z3TC-G04-THtSdAOPLM1h2G7ml5bda1ElZUcn5wpuhk,1564 +cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=qBtkgj2albt2qFcnZ9UDrhzoNhCVO7HTby5VSf1EXMI,1299 +cryptography/hazmat/bindings/_rust/openssl/ec.pyi,sha256=zJy0pRa5n-_p2dm45PxECB_-B6SVZyNKfjxFDpPqT38,1691 +cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=OJsrblS2nHptZctva-pAKFL5q8yPEAkhmjPZpJ6TA94,493 +cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=SkPHK2HdbYN02TVQEUOgW3iTdiEY7HBE4DijpdkAzmk,475 +cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=J8HoN0GdtPcjRAfNHr5Elva_nkmQfq63L75_z9dd8Uc,573 +cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=ZmLJ73pmxcZFC1XosWEiXMRYtvJJor3ZLdCQOJu85Cw,662 +cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=wPS5c7NLspM2632II0I4iH1RSxZvSRtBOVqmpyQATfk,544 +cryptography/hazmat/bindings/_rust/openssl/keys.pyi,sha256=JSrlGNaW49ZCZ1hcb-YJdS1EAbsMwRbVEcLL0P9OApA,872 +cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=9iogF7Q4i81IkOS-IMXp6HvxFF_3cNy_ucrAjVQnn14,540 +cryptography/hazmat/bindings/_rust/openssl/rsa.pyi,sha256=2OQCNSXkxgc-3uw1xiCCloIQTV6p9_kK79Yu0rhZgPc,1364 +cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=2BKdbrddM_9SMUpdvHKGhb9MNjURCarPxccbUDzHeoA,484 +cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=AoRMWNvCJTiH5L-lkIkCdPlrPLUdJvvfXpIvf1GmxpM,466 +cryptography/hazmat/bindings/_rust/pkcs12.pyi,sha256=afhB_6M8xI1MIE5vxkaDF1jSxA48ib1--NiOxtf6boM,1394 +cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=QCmuA0IgDr4iOecUOXgUUeh3BAjJx8ubjz__EnNbyGY,972 +cryptography/hazmat/bindings/_rust/test_support.pyi,sha256=Xo1Gd7bh9rU4HuIS4pm9UwCY6IS1gInvFwmhABLOVO4,936 +cryptography/hazmat/bindings/_rust/x509.pyi,sha256=WLrGmqmFss8dXKhlG_J9nVhoCcodR72xJdCoxEuBtjY,3551 +cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc,, +cryptography/hazmat/bindings/openssl/_conditional.py,sha256=dkGKGU-22uR2ZKeOOwaSxEJCGaafgUjb2romWcu03QE,5163 +cryptography/hazmat/bindings/openssl/binding.py,sha256=e1gnFAZBPrkJ3CsiZV-ug6kaPdNTAEROaUFiFrUh71M,4042 +cryptography/hazmat/decrepit/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 +cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/decrepit/ciphers/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 +cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc,, +cryptography/hazmat/decrepit/ciphers/algorithms.py,sha256=HWA4PKDS2w4D2dQoRerpLRU7Kntt5vJeJC7j--AlZVU,2520 +cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc,, +cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532 +cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=gKa0WrLz6K4fqhnGbfBYKDSxgLxsPU0uj_EK2UT47W4,1495 +cryptography/hazmat/primitives/_serialization.py,sha256=qrozc8fw2WZSbjk3DAlSl3ResxpauwJ74ZgGoUL-mj0,5142 +cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/dh.py,sha256=OOCjMClH1Bf14Sy7jAdwzEeCxFPb8XUe2qePbExvXwc,3420 +cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=xBwdf0pZOgvqjUKcO7Q0L3NxwalYj0SJDUqThemhSmI,3945 +cryptography/hazmat/primitives/asymmetric/ec.py,sha256=lwZmtAwi3PM8lsY1MsNaby_bVi--49OCxwE_1yqKC-A,10428 +cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=kl63fg7myuMjNTmMoVFeH6iVr0x5FkjNmggxIRTloJk,3423 +cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=2UzEDzzfkPn83UFVFlMZfIMbAixxY09WmQyrwinWTn8,3456 +cryptography/hazmat/primitives/asymmetric/padding.py,sha256=eZcvUqVLbe3u48SunLdeniaPlV4-k6pwBl67OW4jSy8,2885 +cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=nW_Ko7PID9UBJF10GVJOc_1L00ymFsfZDUJYtM5kfGQ,7637 +cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996 +cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790 +cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=VGYuRdIYuVBtizpFdNWd2bTrT10JRa1admQdBr08xz8,3341 +cryptography/hazmat/primitives/asymmetric/x448.py,sha256=GKKJBqYLr03VewMF18bXIM941aaWcZIQ4rC02GLLEmw,3374 +cryptography/hazmat/primitives/ciphers/__init__.py,sha256=eyEXmjk6_CZXaOPYDr7vAYGXr29QvzgWL2-4CSolLFs,680 +cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/aead.py,sha256=Fzlyx7w8KYQakzDp1zWgJnIr62zgZrgVh1u2h4exB54,634 +cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=QvBMDmphRZfNmykij58L5eDkd_2NnCzIpJpyX2QwMxc,4223 +cryptography/hazmat/primitives/ciphers/base.py,sha256=tg-XNaKUyETBi7ounGDEL1_ICn-s4FF9LR7moV58blI,4211 +cryptography/hazmat/primitives/ciphers/modes.py,sha256=BFpxEGSaxoeZjrQ4sqpyPDvKClrqfDKIBv7kYtFURhE,8192 +cryptography/hazmat/primitives/cmac.py,sha256=sz_s6H_cYnOvx-VNWdIKhRhe3Ymp8z8J0D3CBqOX3gg,338 +cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422 +cryptography/hazmat/primitives/hashes.py,sha256=EvDIJBhj83Z7f-oHbsA0TzZLFSDV_Yv8hQRdM4o8FD0,5091 +cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423 +cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750 +cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=bcn4NGXse-EsFl7nlU83e5ilop7TSHcX-CJJS107W80,3686 +cryptography/hazmat/primitives/kdf/hkdf.py,sha256=uhN5L87w4JvtAqQcPh_Ji2TPSc18IDThpaYJiHOWy3A,3015 +cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=eSuLK1sATkamgCAit794jLr7sDNlu5X0USdcWhwJdmk,9146 +cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=Xj3YIeX30h2BUaoJAtOo1RMXV_em0-eCG0PU_0FHJzM,1950 +cryptography/hazmat/primitives/kdf/scrypt.py,sha256=4QONhjxA_ZtuQtQ7QV3FnbB8ftrFnM52B4HPfV7hFys,2354 +cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=wCpWmwQjZ2vAu2rlk3R_PX0nINl8WGXYBmlyMOC5iPw,1992 +cryptography/hazmat/primitives/keywrap.py,sha256=XV4Pj2fqSeD-RqZVvY2cA3j5_7RwJSFygYuLfk2ujCo,5650 +cryptography/hazmat/primitives/padding.py,sha256=QUq0n-EAgEan9aQzuTsiJYGKbWiK1nSHkcYjDF1L1ok,5518 +cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355 +cryptography/hazmat/primitives/serialization/__init__.py,sha256=jyNx_7NcOEbVRBY4nP9ks0IVXBafbcYnTK27vafPLW8,1653 +cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/base.py,sha256=ikq5MJIwp_oUnjiaBco_PmQwOTYuGi-XkYUYHKy8Vo0,615 +cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=7vVXbiP7qhhvKAHJT_M8-LBZdbpOwrpWRHWxNrNqzXE,4492 +cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=CNzcsuDMyEFMe3EUii4NfJlQzmakB2hLlfRFYObnHRs,11141 +cryptography/hazmat/primitives/serialization/ssh.py,sha256=VKscMrVdYK5B9PQISjjdRMglRvqa_L3sDNm5vdjVHJY,51915 +cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258 +cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc,, +cryptography/hazmat/primitives/twofactor/hotp.py,sha256=l1YdRMIhfPIuHKkA66keBDHhNbnBAlh6-O44P-OHIK8,2976 +cryptography/hazmat/primitives/twofactor/totp.py,sha256=v0y0xKwtYrP83ypOo5Ofd441RJLOkaFfjmp554jo5F0,1450 +cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cryptography/utils.py,sha256=Rp7ppg4XIBVVzNQ6XngGndwkICJoYp6FoFOOgTWLJ7g,3925 +cryptography/x509/__init__.py,sha256=uGdiViR7KFnWGoJFVUStt-e_ufomWc87RQBGAZ7dT-4,7980 +cryptography/x509/__pycache__/__init__.cpython-312.pyc,, +cryptography/x509/__pycache__/base.cpython-312.pyc,, +cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc,, +cryptography/x509/__pycache__/extensions.cpython-312.pyc,, +cryptography/x509/__pycache__/general_name.cpython-312.pyc,, +cryptography/x509/__pycache__/name.cpython-312.pyc,, +cryptography/x509/__pycache__/ocsp.cpython-312.pyc,, +cryptography/x509/__pycache__/oid.cpython-312.pyc,, +cryptography/x509/__pycache__/verification.cpython-312.pyc,, +cryptography/x509/base.py,sha256=3NbbUn9wPruhmoPO7Cl3trc3SrqV2OFIBBE0P2l05mg,37081 +cryptography/x509/certificate_transparency.py,sha256=6HvzAD0dlSQVxy6tnDhGj0-pisp1MaJ9bxQNRr92inI,2261 +cryptography/x509/extensions.py,sha256=R70KkJ_c5NQ6Kx7Rho0sGJ0Rh-bOuBHjVOFSQGRAFCs,67370 +cryptography/x509/general_name.py,sha256=sP_rV11Qlpsk4x3XXGJY_Mv0Q_s9dtjeLckHsjpLQoQ,7836 +cryptography/x509/name.py,sha256=MYCxCSTQTpzhjxFPZaANqJ9fGrhESH73vPkoay8HSWM,14830 +cryptography/x509/ocsp.py,sha256=P6A02msz5pe-IkUFpvxezHvnEHGvPdXiD3S0wsuf4-I,20003 +cryptography/x509/oid.py,sha256=X8EbhkRTLrGuv9vHZSGqPd9zpvRVsonU_joWAL5LLY8,885 +cryptography/x509/verification.py,sha256=alfx3VaTSb2bMz7_7s788oL90vzgHwBjVINssdz0Gv0,796 diff --git a/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/WHEEL new file mode 100644 index 0000000..b3e268a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: maturin (1.7.0) +Root-Is-Purelib: false +Tag: cp39-abi3-manylinux_2_28_x86_64 + diff --git a/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/license_files/LICENSE b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/license_files/LICENSE new file mode 100644 index 0000000..b11f379 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/license_files/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made +under the terms of *both* these licenses. diff --git a/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/license_files/LICENSE.APACHE b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/license_files/LICENSE.APACHE new file mode 100644 index 0000000..62589ed --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/license_files/LICENSE.APACHE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/license_files/LICENSE.BSD b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/license_files/LICENSE.BSD new file mode 100644 index 0000000..ec1a29d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography-43.0.1.dist-info/license_files/LICENSE.BSD @@ -0,0 +1,27 @@ +Copyright (c) Individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of PyCA Cryptography nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/lib/python3.12/site-packages/cryptography/__about__.py b/myenv/lib/python3.12/site-packages/cryptography/__about__.py new file mode 100644 index 0000000..d224b2f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/__about__.py @@ -0,0 +1,17 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +__all__ = [ + "__author__", + "__copyright__", + "__version__", +] + +__version__ = "43.0.1" + + +__author__ = "The Python Cryptographic Authority and individual contributors" +__copyright__ = f"Copyright 2013-2024 {__author__}" diff --git a/myenv/lib/python3.12/site-packages/cryptography/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/__init__.py new file mode 100644 index 0000000..d374f75 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.__about__ import __author__, __copyright__, __version__ + +__all__ = [ + "__author__", + "__copyright__", + "__version__", +] diff --git a/myenv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc new file mode 100644 index 0000000..bb748fd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..cfee8b9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..d168c9f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc new file mode 100644 index 0000000..1458252 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..9f129be Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/exceptions.py b/myenv/lib/python3.12/site-packages/cryptography/exceptions.py new file mode 100644 index 0000000..fe125ea --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/exceptions.py @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.bindings._rust import exceptions as rust_exceptions + +if typing.TYPE_CHECKING: + from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +_Reasons = rust_exceptions._Reasons + + +class UnsupportedAlgorithm(Exception): + def __init__(self, message: str, reason: _Reasons | None = None) -> None: + super().__init__(message) + self._reason = reason + + +class AlreadyFinalized(Exception): + pass + + +class AlreadyUpdated(Exception): + pass + + +class NotYetFinalized(Exception): + pass + + +class InvalidTag(Exception): + pass + + +class InvalidSignature(Exception): + pass + + +class InternalError(Exception): + def __init__( + self, msg: str, err_code: list[rust_openssl.OpenSSLError] + ) -> None: + super().__init__(msg) + self.err_code = err_code + + +class InvalidKey(Exception): + pass diff --git a/myenv/lib/python3.12/site-packages/cryptography/fernet.py b/myenv/lib/python3.12/site-packages/cryptography/fernet.py new file mode 100644 index 0000000..35ce113 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/fernet.py @@ -0,0 +1,215 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import base64 +import binascii +import os +import time +import typing + +from cryptography import utils +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import hashes, padding +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.primitives.hmac import HMAC + + +class InvalidToken(Exception): + pass + + +_MAX_CLOCK_SKEW = 60 + + +class Fernet: + def __init__( + self, + key: bytes | str, + backend: typing.Any = None, + ) -> None: + try: + key = base64.urlsafe_b64decode(key) + except binascii.Error as exc: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) from exc + if len(key) != 32: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) + + self._signing_key = key[:16] + self._encryption_key = key[16:] + + @classmethod + def generate_key(cls) -> bytes: + return base64.urlsafe_b64encode(os.urandom(32)) + + def encrypt(self, data: bytes) -> bytes: + return self.encrypt_at_time(data, int(time.time())) + + def encrypt_at_time(self, data: bytes, current_time: int) -> bytes: + iv = os.urandom(16) + return self._encrypt_from_parts(data, current_time, iv) + + def _encrypt_from_parts( + self, data: bytes, current_time: int, iv: bytes + ) -> bytes: + utils._check_bytes("data", data) + + padder = padding.PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(data) + padder.finalize() + encryptor = Cipher( + algorithms.AES(self._encryption_key), + modes.CBC(iv), + ).encryptor() + ciphertext = encryptor.update(padded_data) + encryptor.finalize() + + basic_parts = ( + b"\x80" + + current_time.to_bytes(length=8, byteorder="big") + + iv + + ciphertext + ) + + h = HMAC(self._signing_key, hashes.SHA256()) + h.update(basic_parts) + hmac = h.finalize() + return base64.urlsafe_b64encode(basic_parts + hmac) + + def decrypt(self, token: bytes | str, ttl: int | None = None) -> bytes: + timestamp, data = Fernet._get_unverified_token_data(token) + if ttl is None: + time_info = None + else: + time_info = (ttl, int(time.time())) + return self._decrypt_data(data, timestamp, time_info) + + def decrypt_at_time( + self, token: bytes | str, ttl: int, current_time: int + ) -> bytes: + if ttl is None: + raise ValueError( + "decrypt_at_time() can only be used with a non-None ttl" + ) + timestamp, data = Fernet._get_unverified_token_data(token) + return self._decrypt_data(data, timestamp, (ttl, current_time)) + + def extract_timestamp(self, token: bytes | str) -> int: + timestamp, data = Fernet._get_unverified_token_data(token) + # Verify the token was not tampered with. + self._verify_signature(data) + return timestamp + + @staticmethod + def _get_unverified_token_data(token: bytes | str) -> tuple[int, bytes]: + if not isinstance(token, (str, bytes)): + raise TypeError("token must be bytes or str") + + try: + data = base64.urlsafe_b64decode(token) + except (TypeError, binascii.Error): + raise InvalidToken + + if not data or data[0] != 0x80: + raise InvalidToken + + if len(data) < 9: + raise InvalidToken + + timestamp = int.from_bytes(data[1:9], byteorder="big") + return timestamp, data + + def _verify_signature(self, data: bytes) -> None: + h = HMAC(self._signing_key, hashes.SHA256()) + h.update(data[:-32]) + try: + h.verify(data[-32:]) + except InvalidSignature: + raise InvalidToken + + def _decrypt_data( + self, + data: bytes, + timestamp: int, + time_info: tuple[int, int] | None, + ) -> bytes: + if time_info is not None: + ttl, current_time = time_info + if timestamp + ttl < current_time: + raise InvalidToken + + if current_time + _MAX_CLOCK_SKEW < timestamp: + raise InvalidToken + + self._verify_signature(data) + + iv = data[9:25] + ciphertext = data[25:-32] + decryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv) + ).decryptor() + plaintext_padded = decryptor.update(ciphertext) + try: + plaintext_padded += decryptor.finalize() + except ValueError: + raise InvalidToken + unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() + + unpadded = unpadder.update(plaintext_padded) + try: + unpadded += unpadder.finalize() + except ValueError: + raise InvalidToken + return unpadded + + +class MultiFernet: + def __init__(self, fernets: typing.Iterable[Fernet]): + fernets = list(fernets) + if not fernets: + raise ValueError( + "MultiFernet requires at least one Fernet instance" + ) + self._fernets = fernets + + def encrypt(self, msg: bytes) -> bytes: + return self.encrypt_at_time(msg, int(time.time())) + + def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes: + return self._fernets[0].encrypt_at_time(msg, current_time) + + def rotate(self, msg: bytes | str) -> bytes: + timestamp, data = Fernet._get_unverified_token_data(msg) + for f in self._fernets: + try: + p = f._decrypt_data(data, timestamp, None) + break + except InvalidToken: + pass + else: + raise InvalidToken + + iv = os.urandom(16) + return self._fernets[0]._encrypt_from_parts(p, timestamp, iv) + + def decrypt(self, msg: bytes | str, ttl: int | None = None) -> bytes: + for f in self._fernets: + try: + return f.decrypt(msg, ttl) + except InvalidToken: + pass + raise InvalidToken + + def decrypt_at_time( + self, msg: bytes | str, ttl: int, current_time: int + ) -> bytes: + for f in self._fernets: + try: + return f.decrypt_at_time(msg, ttl, current_time) + except InvalidToken: + pass + raise InvalidToken diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py new file mode 100644 index 0000000..b9f1187 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +""" +Hazardous Materials + +This is a "Hazardous Materials" module. You should ONLY use it if you're +100% absolutely sure that you know what you're doing because this module +is full of land mines, dragons, and dinosaurs with laser guns. +""" diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2261407 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc new file mode 100644 index 0000000..54e871f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py new file mode 100644 index 0000000..fd5e37d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py @@ -0,0 +1,313 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import ( + ObjectIdentifier as ObjectIdentifier, +) +from cryptography.hazmat.primitives import hashes + + +class ExtensionOID: + SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") + SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") + KEY_USAGE = ObjectIdentifier("2.5.29.15") + SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") + ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") + BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") + NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") + CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") + CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") + POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") + AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") + POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") + EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") + FRESHEST_CRL = ObjectIdentifier("2.5.29.46") + INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") + ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28") + AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") + SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") + OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") + TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24") + CRL_NUMBER = ObjectIdentifier("2.5.29.20") + DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27") + PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier( + "1.3.6.1.4.1.11129.2.4.2" + ) + PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3") + SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5") + MS_CERTIFICATE_TEMPLATE = ObjectIdentifier("1.3.6.1.4.1.311.21.7") + + +class OCSPExtensionOID: + NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2") + ACCEPTABLE_RESPONSES = ObjectIdentifier("1.3.6.1.5.5.7.48.1.4") + + +class CRLEntryExtensionOID: + CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") + CRL_REASON = ObjectIdentifier("2.5.29.21") + INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") + + +class NameOID: + COMMON_NAME = ObjectIdentifier("2.5.4.3") + COUNTRY_NAME = ObjectIdentifier("2.5.4.6") + LOCALITY_NAME = ObjectIdentifier("2.5.4.7") + STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") + STREET_ADDRESS = ObjectIdentifier("2.5.4.9") + ORGANIZATION_IDENTIFIER = ObjectIdentifier("2.5.4.97") + ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") + ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") + SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") + SURNAME = ObjectIdentifier("2.5.4.4") + GIVEN_NAME = ObjectIdentifier("2.5.4.42") + TITLE = ObjectIdentifier("2.5.4.12") + INITIALS = ObjectIdentifier("2.5.4.43") + GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") + X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45") + DN_QUALIFIER = ObjectIdentifier("2.5.4.46") + PSEUDONYM = ObjectIdentifier("2.5.4.65") + USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1") + DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") + EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") + JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3") + JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1") + JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier( + "1.3.6.1.4.1.311.60.2.1.2" + ) + BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15") + POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16") + POSTAL_CODE = ObjectIdentifier("2.5.4.17") + INN = ObjectIdentifier("1.2.643.3.131.1.1") + OGRN = ObjectIdentifier("1.2.643.100.1") + SNILS = ObjectIdentifier("1.2.643.100.3") + UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") + + +class SignatureAlgorithmOID: + RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") + RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") + # This is an alternate OID for RSA with SHA1 that is occasionally seen + _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29") + RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") + RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") + RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") + RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") + RSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.13") + RSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.14") + RSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.15") + RSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.16") + RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") + ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") + ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") + ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") + ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") + ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") + ECDSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.9") + ECDSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.10") + ECDSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.11") + ECDSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.12") + DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") + DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") + DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") + DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3") + DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4") + ED25519 = ObjectIdentifier("1.3.101.112") + ED448 = ObjectIdentifier("1.3.101.113") + GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3") + GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2") + GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3") + + +_SIG_OIDS_TO_HASH: dict[ObjectIdentifier, hashes.HashAlgorithm | None] = { + SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(), + SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.RSA_WITH_SHA3_224: hashes.SHA3_224(), + SignatureAlgorithmOID.RSA_WITH_SHA3_256: hashes.SHA3_256(), + SignatureAlgorithmOID.RSA_WITH_SHA3_384: hashes.SHA3_384(), + SignatureAlgorithmOID.RSA_WITH_SHA3_512: hashes.SHA3_512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_224: hashes.SHA3_224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_256: hashes.SHA3_256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_384: hashes.SHA3_384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_512: hashes.SHA3_512(), + SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ED25519: None, + SignatureAlgorithmOID.ED448: None, + SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None, + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None, + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None, +} + + +class PublicKeyAlgorithmOID: + DSA = ObjectIdentifier("1.2.840.10040.4.1") + EC_PUBLIC_KEY = ObjectIdentifier("1.2.840.10045.2.1") + RSAES_PKCS1_v1_5 = ObjectIdentifier("1.2.840.113549.1.1.1") + RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") + X25519 = ObjectIdentifier("1.3.101.110") + X448 = ObjectIdentifier("1.3.101.111") + ED25519 = ObjectIdentifier("1.3.101.112") + ED448 = ObjectIdentifier("1.3.101.113") + + +class ExtendedKeyUsageOID: + SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") + CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") + CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") + EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") + TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") + OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") + ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0") + SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2") + KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5") + IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17") + CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4") + + +class AuthorityInformationAccessOID: + CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") + OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") + + +class SubjectInformationAccessOID: + CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5") + + +class CertificatePoliciesOID: + CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") + CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") + ANY_POLICY = ObjectIdentifier("2.5.29.32.0") + + +class AttributeOID: + CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7") + UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") + + +_OID_NAMES = { + NameOID.COMMON_NAME: "commonName", + NameOID.COUNTRY_NAME: "countryName", + NameOID.LOCALITY_NAME: "localityName", + NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", + NameOID.STREET_ADDRESS: "streetAddress", + NameOID.ORGANIZATION_NAME: "organizationName", + NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", + NameOID.SERIAL_NUMBER: "serialNumber", + NameOID.SURNAME: "surname", + NameOID.GIVEN_NAME: "givenName", + NameOID.TITLE: "title", + NameOID.GENERATION_QUALIFIER: "generationQualifier", + NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier", + NameOID.DN_QUALIFIER: "dnQualifier", + NameOID.PSEUDONYM: "pseudonym", + NameOID.USER_ID: "userID", + NameOID.DOMAIN_COMPONENT: "domainComponent", + NameOID.EMAIL_ADDRESS: "emailAddress", + NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName", + NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName", + NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: ( + "jurisdictionStateOrProvinceName" + ), + NameOID.BUSINESS_CATEGORY: "businessCategory", + NameOID.POSTAL_ADDRESS: "postalAddress", + NameOID.POSTAL_CODE: "postalCode", + NameOID.INN: "INN", + NameOID.OGRN: "OGRN", + NameOID.SNILS: "SNILS", + NameOID.UNSTRUCTURED_NAME: "unstructuredName", + SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", + SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS", + SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", + SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", + SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", + SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", + SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", + SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", + SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", + SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", + SignatureAlgorithmOID.ED25519: "ed25519", + SignatureAlgorithmOID.ED448: "ed448", + SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: ( + "GOST R 34.11-94 with GOST R 34.10-2001" + ), + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: ( + "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)" + ), + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: ( + "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)" + ), + PublicKeyAlgorithmOID.DSA: "dsaEncryption", + PublicKeyAlgorithmOID.EC_PUBLIC_KEY: "id-ecPublicKey", + PublicKeyAlgorithmOID.RSAES_PKCS1_v1_5: "rsaEncryption", + PublicKeyAlgorithmOID.RSASSA_PSS: "rsassaPss", + PublicKeyAlgorithmOID.X25519: "X25519", + PublicKeyAlgorithmOID.X448: "X448", + ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", + ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", + ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", + ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", + ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", + ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", + ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin", + ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC", + ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", + ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", + ExtensionOID.KEY_USAGE: "keyUsage", + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", + ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", + ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", + ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: ( + "signedCertificateTimestampList" + ), + ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: ( + "signedCertificateTimestampList" + ), + ExtensionOID.PRECERT_POISON: "ctPoison", + ExtensionOID.MS_CERTIFICATE_TEMPLATE: "msCertificateTemplate", + CRLEntryExtensionOID.CRL_REASON: "cRLReason", + CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", + CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", + ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", + ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", + ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", + ExtensionOID.POLICY_MAPPINGS: "policyMappings", + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", + ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", + ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", + ExtensionOID.FRESHEST_CRL: "freshestCRL", + ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", + ExtensionOID.ISSUING_DISTRIBUTION_POINT: "issuingDistributionPoint", + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", + ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", + ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", + ExtensionOID.CRL_NUMBER: "cRLNumber", + ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator", + ExtensionOID.TLS_FEATURE: "TLSFeature", + AuthorityInformationAccessOID.OCSP: "OCSP", + AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", + SubjectInformationAccessOID.CA_REPOSITORY: "caRepository", + CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", + CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", + OCSPExtensionOID.NONCE: "OCSPNonce", + AttributeOID.CHALLENGE_PASSWORD: "challengePassword", +} diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py new file mode 100644 index 0000000..b4400aa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from typing import Any + + +def default_backend() -> Any: + from cryptography.hazmat.backends.openssl.backend import backend + + return backend diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..7a4c3ea Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py new file mode 100644 index 0000000..51b0447 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.backends.openssl.backend import backend + +__all__ = ["backend"] diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..9fd1ac5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc new file mode 100644 index 0000000..4131b6e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py new file mode 100644 index 0000000..c87d3e8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py @@ -0,0 +1,291 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.bindings.openssl import binding +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils +from cryptography.hazmat.primitives.asymmetric.padding import ( + MGF1, + OAEP, + PSS, + PKCS1v15, +) +from cryptography.hazmat.primitives.ciphers import ( + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, + Mode, +) + + +class Backend: + """ + OpenSSL API binding interfaces. + """ + + name = "openssl" + + # TripleDES encryption is disallowed/deprecated throughout 2023 in + # FIPS 140-3. To keep it simple we denylist any use of TripleDES (TDEA). + _fips_ciphers = (AES,) + # Sometimes SHA1 is still permissible. That logic is contained + # within the various *_supported methods. + _fips_hashes = ( + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA512_224, + hashes.SHA512_256, + hashes.SHA3_224, + hashes.SHA3_256, + hashes.SHA3_384, + hashes.SHA3_512, + hashes.SHAKE128, + hashes.SHAKE256, + ) + _fips_ecdh_curves = ( + ec.SECP224R1, + ec.SECP256R1, + ec.SECP384R1, + ec.SECP521R1, + ) + _fips_rsa_min_key_size = 2048 + _fips_rsa_min_public_exponent = 65537 + _fips_dsa_min_modulus = 1 << 2048 + _fips_dh_min_key_size = 2048 + _fips_dh_min_modulus = 1 << _fips_dh_min_key_size + + def __init__(self) -> None: + self._binding = binding.Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + self._fips_enabled = rust_openssl.is_fips_enabled() + + def __repr__(self) -> str: + return ( + f"" + ) + + def openssl_assert(self, ok: bool) -> None: + return binding._openssl_assert(ok) + + def _enable_fips(self) -> None: + # This function enables FIPS mode for OpenSSL 3.0.0 on installs that + # have the FIPS provider installed properly. + rust_openssl.enable_fips(rust_openssl._providers) + assert rust_openssl.is_fips_enabled() + self._fips_enabled = rust_openssl.is_fips_enabled() + + def openssl_version_text(self) -> str: + """ + Friendly string name of the loaded OpenSSL library. This is not + necessarily the same version as it was compiled against. + + Example: OpenSSL 3.2.1 30 Jan 2024 + """ + return rust_openssl.openssl_version_text() + + def openssl_version_number(self) -> int: + return rust_openssl.openssl_version() + + def _evp_md_from_algorithm(self, algorithm: hashes.HashAlgorithm): + if algorithm.name in ("blake2b", "blake2s"): + alg = f"{algorithm.name}{algorithm.digest_size * 8}".encode( + "ascii" + ) + else: + alg = algorithm.name.encode("ascii") + + evp_md = self._lib.EVP_get_digestbyname(alg) + return evp_md + + def hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if self._fips_enabled and not isinstance(algorithm, self._fips_hashes): + return False + + evp_md = self._evp_md_from_algorithm(algorithm) + return evp_md != self._ffi.NULL + + def signature_hash_supported( + self, algorithm: hashes.HashAlgorithm + ) -> bool: + # Dedicated check for hashing algorithm use in message digest for + # signatures, e.g. RSA PKCS#1 v1.5 SHA1 (sha1WithRSAEncryption). + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return False + return self.hash_supported(algorithm) + + def scrypt_supported(self) -> bool: + if self._fips_enabled: + return False + else: + return hasattr(rust_openssl.kdf, "derive_scrypt") + + def hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + # FIPS mode still allows SHA1 for HMAC + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return True + + return self.hash_supported(algorithm) + + def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool: + if self._fips_enabled: + # FIPS mode requires AES. TripleDES is disallowed/deprecated in + # FIPS 140-3. + if not isinstance(cipher, self._fips_ciphers): + return False + + return rust_openssl.ciphers.cipher_supported(cipher, mode) + + def pbkdf2_hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + return self.hmac_supported(algorithm) + + def _consume_errors(self) -> list[rust_openssl.OpenSSLError]: + return rust_openssl.capture_error_stack() + + def _oaep_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return False + + return isinstance( + algorithm, + ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ), + ) + + def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool: + if isinstance(padding, PKCS1v15): + return True + elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): + # SHA1 is permissible in MGF1 in FIPS even when SHA1 is blocked + # as signature algorithm. + if self._fips_enabled and isinstance( + padding._mgf._algorithm, hashes.SHA1 + ): + return True + else: + return self.hash_supported(padding._mgf._algorithm) + elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): + return self._oaep_hash_supported( + padding._mgf._algorithm + ) and self._oaep_hash_supported(padding._algorithm) + else: + return False + + def rsa_encryption_supported(self, padding: AsymmetricPadding) -> bool: + if self._fips_enabled and isinstance(padding, PKCS1v15): + return False + else: + return self.rsa_padding_supported(padding) + + def dsa_supported(self) -> bool: + return ( + not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not self._fips_enabled + ) + + def dsa_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if not self.dsa_supported(): + return False + return self.signature_hash_supported(algorithm) + + def cmac_algorithm_supported(self, algorithm) -> bool: + return self.cipher_supported( + algorithm, CBC(b"\x00" * algorithm.block_size) + ) + + def elliptic_curve_supported(self, curve: ec.EllipticCurve) -> bool: + if self._fips_enabled and not isinstance( + curve, self._fips_ecdh_curves + ): + return False + + return rust_openssl.ec.curve_supported(curve) + + def elliptic_curve_signature_algorithm_supported( + self, + signature_algorithm: ec.EllipticCurveSignatureAlgorithm, + curve: ec.EllipticCurve, + ) -> bool: + # We only support ECDSA right now. + if not isinstance(signature_algorithm, ec.ECDSA): + return False + + return self.elliptic_curve_supported(curve) and ( + isinstance(signature_algorithm.algorithm, asym_utils.Prehashed) + or self.hash_supported(signature_algorithm.algorithm) + ) + + def elliptic_curve_exchange_algorithm_supported( + self, algorithm: ec.ECDH, curve: ec.EllipticCurve + ) -> bool: + return self.elliptic_curve_supported(curve) and isinstance( + algorithm, ec.ECDH + ) + + def dh_supported(self) -> bool: + return not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + + def dh_x942_serialization_supported(self) -> bool: + return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1 + + def x25519_supported(self) -> bool: + if self._fips_enabled: + return False + return True + + def x448_supported(self) -> bool: + if self._fips_enabled: + return False + return ( + not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL + and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + ) + + def ed25519_supported(self) -> bool: + if self._fips_enabled: + return False + return True + + def ed448_supported(self) -> bool: + if self._fips_enabled: + return False + return ( + not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL + and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + ) + + def ecdsa_deterministic_supported(self) -> bool: + return ( + rust_openssl.CRYPTOGRAPHY_OPENSSL_320_OR_GREATER + and not self._fips_enabled + ) + + def poly1305_supported(self) -> bool: + if self._fips_enabled: + return False + return True + + def pkcs7_supported(self) -> bool: + return not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + + +backend = Backend() diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..06925d0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so new file mode 100755 index 0000000..f056bb8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi new file mode 100644 index 0000000..c0ea0a5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi @@ -0,0 +1,24 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import padding + +def check_pkcs7_padding(data: bytes) -> bool: ... +def check_ansix923_padding(data: bytes) -> bool: ... + +class PKCS7PaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: bytes) -> bytes: ... + def finalize(self) -> bytes: ... + +class ObjectIdentifier: + def __init__(self, val: str) -> None: ... + @property + def dotted_string(self) -> str: ... + @property + def _name(self) -> str: ... + +T = typing.TypeVar("T") diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi new file mode 100644 index 0000000..8010008 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi @@ -0,0 +1,8 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +lib = typing.Any +ffi = typing.Any diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi new file mode 100644 index 0000000..3b5f208 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi @@ -0,0 +1,7 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +def decode_dss_signature(signature: bytes) -> tuple[int, int]: ... +def encode_dss_signature(r: int, s: int) -> bytes: ... +def parse_spki_for_data(data: bytes) -> bytes: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi new file mode 100644 index 0000000..09f46b1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi @@ -0,0 +1,17 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +class _Reasons: + BACKEND_MISSING_INTERFACE: _Reasons + UNSUPPORTED_HASH: _Reasons + UNSUPPORTED_CIPHER: _Reasons + UNSUPPORTED_PADDING: _Reasons + UNSUPPORTED_MGF: _Reasons + UNSUPPORTED_PUBLIC_KEY_ALGORITHM: _Reasons + UNSUPPORTED_ELLIPTIC_CURVE: _Reasons + UNSUPPORTED_SERIALIZATION: _Reasons + UNSUPPORTED_X509: _Reasons + UNSUPPORTED_EXCHANGE_ALGORITHM: _Reasons + UNSUPPORTED_DIFFIE_HELLMAN: _Reasons + UNSUPPORTED_MAC: _Reasons diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi new file mode 100644 index 0000000..5e02145 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes +from cryptography.x509 import ocsp + +class OCSPRequest: ... +class OCSPResponse: ... +class OCSPSingleResponse: ... + +def load_der_ocsp_request(data: bytes) -> ocsp.OCSPRequest: ... +def load_der_ocsp_response(data: bytes) -> ocsp.OCSPResponse: ... +def create_ocsp_request( + builder: ocsp.OCSPRequestBuilder, +) -> ocsp.OCSPRequest: ... +def create_ocsp_response( + status: ocsp.OCSPResponseStatus, + builder: ocsp.OCSPResponseBuilder | None, + private_key: PrivateKeyTypes | None, + hash_algorithm: hashes.HashAlgorithm | None, +) -> ocsp.OCSPResponse: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi new file mode 100644 index 0000000..1e66d33 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi @@ -0,0 +1,71 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.bindings._rust.openssl import ( + aead, + ciphers, + cmac, + dh, + dsa, + ec, + ed448, + ed25519, + hashes, + hmac, + kdf, + keys, + poly1305, + rsa, + x448, + x25519, +) + +__all__ = [ + "aead", + "ciphers", + "cmac", + "dh", + "dsa", + "ec", + "ed448", + "ed25519", + "hashes", + "hmac", + "kdf", + "keys", + "openssl_version", + "openssl_version_text", + "poly1305", + "raise_openssl_error", + "rsa", + "x448", + "x25519", +] + +CRYPTOGRAPHY_IS_LIBRESSL: bool +CRYPTOGRAPHY_IS_BORINGSSL: bool +CRYPTOGRAPHY_OPENSSL_300_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_320_OR_GREATER: bool + +class Providers: ... + +_legacy_provider_loaded: bool +_providers: Providers + +def openssl_version() -> int: ... +def openssl_version_text() -> str: ... +def raise_openssl_error() -> typing.NoReturn: ... +def capture_error_stack() -> list[OpenSSLError]: ... +def is_fips_enabled() -> bool: ... +def enable_fips(providers: Providers) -> None: ... + +class OpenSSLError: + @property + def lib(self) -> int: ... + @property + def reason(self) -> int: ... + @property + def reason_text(self) -> bytes: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi new file mode 100644 index 0000000..047f49d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi @@ -0,0 +1,103 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +class AESGCM: + def __init__(self, key: bytes) -> None: ... + @staticmethod + def generate_key(key_size: int) -> bytes: ... + def encrypt( + self, + nonce: bytes, + data: bytes, + associated_data: bytes | None, + ) -> bytes: ... + def decrypt( + self, + nonce: bytes, + data: bytes, + associated_data: bytes | None, + ) -> bytes: ... + +class ChaCha20Poly1305: + def __init__(self, key: bytes) -> None: ... + @staticmethod + def generate_key() -> bytes: ... + def encrypt( + self, + nonce: bytes, + data: bytes, + associated_data: bytes | None, + ) -> bytes: ... + def decrypt( + self, + nonce: bytes, + data: bytes, + associated_data: bytes | None, + ) -> bytes: ... + +class AESCCM: + def __init__(self, key: bytes, tag_length: int = 16) -> None: ... + @staticmethod + def generate_key(key_size: int) -> bytes: ... + def encrypt( + self, + nonce: bytes, + data: bytes, + associated_data: bytes | None, + ) -> bytes: ... + def decrypt( + self, + nonce: bytes, + data: bytes, + associated_data: bytes | None, + ) -> bytes: ... + +class AESSIV: + def __init__(self, key: bytes) -> None: ... + @staticmethod + def generate_key(key_size: int) -> bytes: ... + def encrypt( + self, + data: bytes, + associated_data: list[bytes] | None, + ) -> bytes: ... + def decrypt( + self, + data: bytes, + associated_data: list[bytes] | None, + ) -> bytes: ... + +class AESOCB3: + def __init__(self, key: bytes) -> None: ... + @staticmethod + def generate_key(key_size: int) -> bytes: ... + def encrypt( + self, + nonce: bytes, + data: bytes, + associated_data: bytes | None, + ) -> bytes: ... + def decrypt( + self, + nonce: bytes, + data: bytes, + associated_data: bytes | None, + ) -> bytes: ... + +class AESGCMSIV: + def __init__(self, key: bytes) -> None: ... + @staticmethod + def generate_key(key_size: int) -> bytes: ... + def encrypt( + self, + nonce: bytes, + data: bytes, + associated_data: bytes | None, + ) -> bytes: ... + def decrypt( + self, + nonce: bytes, + data: bytes, + associated_data: bytes | None, + ) -> bytes: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi new file mode 100644 index 0000000..759f3b5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi @@ -0,0 +1,38 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import ciphers +from cryptography.hazmat.primitives.ciphers import modes + +@typing.overload +def create_encryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag +) -> ciphers.AEADEncryptionContext: ... +@typing.overload +def create_encryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode +) -> ciphers.CipherContext: ... +@typing.overload +def create_decryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag +) -> ciphers.AEADDecryptionContext: ... +@typing.overload +def create_decryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode +) -> ciphers.CipherContext: ... +def cipher_supported( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode +) -> bool: ... +def _advance( + ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int +) -> None: ... +def _advance_aad( + ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int +) -> None: ... + +class CipherContext: ... +class AEADEncryptionContext: ... +class AEADDecryptionContext: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi new file mode 100644 index 0000000..9c03508 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi @@ -0,0 +1,18 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import ciphers + +class CMAC: + def __init__( + self, + algorithm: ciphers.BlockCipherAlgorithm, + backend: typing.Any = None, + ) -> None: ... + def update(self, data: bytes) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, signature: bytes) -> None: ... + def copy(self) -> CMAC: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi new file mode 100644 index 0000000..08733d7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi @@ -0,0 +1,51 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import dh + +MIN_MODULUS_SIZE: int + +class DHPrivateKey: ... +class DHPublicKey: ... +class DHParameters: ... + +class DHPrivateNumbers: + def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None: ... + def private_key(self, backend: typing.Any = None) -> dh.DHPrivateKey: ... + @property + def x(self) -> int: ... + @property + def public_numbers(self) -> DHPublicNumbers: ... + +class DHPublicNumbers: + def __init__( + self, y: int, parameter_numbers: DHParameterNumbers + ) -> None: ... + def public_key(self, backend: typing.Any = None) -> dh.DHPublicKey: ... + @property + def y(self) -> int: ... + @property + def parameter_numbers(self) -> DHParameterNumbers: ... + +class DHParameterNumbers: + def __init__(self, p: int, g: int, q: int | None = None) -> None: ... + def parameters(self, backend: typing.Any = None) -> dh.DHParameters: ... + @property + def p(self) -> int: ... + @property + def g(self) -> int: ... + @property + def q(self) -> int | None: ... + +def generate_parameters( + generator: int, key_size: int, backend: typing.Any = None +) -> dh.DHParameters: ... +def from_pem_parameters( + data: bytes, backend: typing.Any = None +) -> dh.DHParameters: ... +def from_der_parameters( + data: bytes, backend: typing.Any = None +) -> dh.DHParameters: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi new file mode 100644 index 0000000..0922a4c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi @@ -0,0 +1,41 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import dsa + +class DSAPrivateKey: ... +class DSAPublicKey: ... +class DSAParameters: ... + +class DSAPrivateNumbers: + def __init__(self, x: int, public_numbers: DSAPublicNumbers) -> None: ... + @property + def x(self) -> int: ... + @property + def public_numbers(self) -> DSAPublicNumbers: ... + def private_key(self, backend: typing.Any = None) -> dsa.DSAPrivateKey: ... + +class DSAPublicNumbers: + def __init__( + self, y: int, parameter_numbers: DSAParameterNumbers + ) -> None: ... + @property + def y(self) -> int: ... + @property + def parameter_numbers(self) -> DSAParameterNumbers: ... + def public_key(self, backend: typing.Any = None) -> dsa.DSAPublicKey: ... + +class DSAParameterNumbers: + def __init__(self, p: int, q: int, g: int) -> None: ... + @property + def p(self) -> int: ... + @property + def q(self) -> int: ... + @property + def g(self) -> int: ... + def parameters(self, backend: typing.Any = None) -> dsa.DSAParameters: ... + +def generate_parameters(key_size: int) -> dsa.DSAParameters: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi new file mode 100644 index 0000000..5c3b7bf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import ec + +class ECPrivateKey: ... +class ECPublicKey: ... + +class EllipticCurvePrivateNumbers: + def __init__( + self, private_value: int, public_numbers: EllipticCurvePublicNumbers + ) -> None: ... + def private_key( + self, backend: typing.Any = None + ) -> ec.EllipticCurvePrivateKey: ... + @property + def private_value(self) -> int: ... + @property + def public_numbers(self) -> EllipticCurvePublicNumbers: ... + +class EllipticCurvePublicNumbers: + def __init__(self, x: int, y: int, curve: ec.EllipticCurve) -> None: ... + def public_key( + self, backend: typing.Any = None + ) -> ec.EllipticCurvePublicKey: ... + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + @property + def curve(self) -> ec.EllipticCurve: ... + def __eq__(self, other: object) -> bool: ... + +def curve_supported(curve: ec.EllipticCurve) -> bool: ... +def generate_private_key( + curve: ec.EllipticCurve, backend: typing.Any = None +) -> ec.EllipticCurvePrivateKey: ... +def from_private_numbers( + numbers: ec.EllipticCurvePrivateNumbers, +) -> ec.EllipticCurvePrivateKey: ... +def from_public_numbers( + numbers: ec.EllipticCurvePublicNumbers, +) -> ec.EllipticCurvePublicKey: ... +def from_public_bytes( + curve: ec.EllipticCurve, data: bytes +) -> ec.EllipticCurvePublicKey: ... +def derive_private_key( + private_value: int, curve: ec.EllipticCurve +) -> ec.EllipticCurvePrivateKey: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi new file mode 100644 index 0000000..5233f9a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi @@ -0,0 +1,12 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import ed25519 + +class Ed25519PrivateKey: ... +class Ed25519PublicKey: ... + +def generate_key() -> ed25519.Ed25519PrivateKey: ... +def from_private_bytes(data: bytes) -> ed25519.Ed25519PrivateKey: ... +def from_public_bytes(data: bytes) -> ed25519.Ed25519PublicKey: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi new file mode 100644 index 0000000..7a06520 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi @@ -0,0 +1,12 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import ed448 + +class Ed448PrivateKey: ... +class Ed448PublicKey: ... + +def generate_key() -> ed448.Ed448PrivateKey: ... +def from_private_bytes(data: bytes) -> ed448.Ed448PrivateKey: ... +def from_public_bytes(data: bytes) -> ed448.Ed448PublicKey: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi new file mode 100644 index 0000000..ca5f42a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi @@ -0,0 +1,17 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import hashes + +class Hash(hashes.HashContext): + def __init__( + self, algorithm: hashes.HashAlgorithm, backend: typing.Any = None + ) -> None: ... + @property + def algorithm(self) -> hashes.HashAlgorithm: ... + def update(self, data: bytes) -> None: ... + def finalize(self) -> bytes: ... + def copy(self) -> Hash: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi new file mode 100644 index 0000000..e38d9b5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi @@ -0,0 +1,21 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import hashes + +class HMAC(hashes.HashContext): + def __init__( + self, + key: bytes, + algorithm: hashes.HashAlgorithm, + backend: typing.Any = None, + ) -> None: ... + @property + def algorithm(self) -> hashes.HashAlgorithm: ... + def update(self, data: bytes) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, signature: bytes) -> None: ... + def copy(self) -> HMAC: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi new file mode 100644 index 0000000..034a8fe --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi @@ -0,0 +1,22 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.hashes import HashAlgorithm + +def derive_pbkdf2_hmac( + key_material: bytes, + algorithm: HashAlgorithm, + salt: bytes, + iterations: int, + length: int, +) -> bytes: ... +def derive_scrypt( + key_material: bytes, + salt: bytes, + n: int, + r: int, + p: int, + max_mem: int, + length: int, +) -> bytes: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi new file mode 100644 index 0000000..6815b7d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi @@ -0,0 +1,33 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric.types import ( + PrivateKeyTypes, + PublicKeyTypes, +) + +def load_der_private_key( + data: bytes, + password: bytes | None, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, +) -> PrivateKeyTypes: ... +def load_pem_private_key( + data: bytes, + password: bytes | None, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, +) -> PrivateKeyTypes: ... +def load_der_public_key( + data: bytes, + backend: typing.Any = None, +) -> PublicKeyTypes: ... +def load_pem_public_key( + data: bytes, + backend: typing.Any = None, +) -> PublicKeyTypes: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi new file mode 100644 index 0000000..2e9b0a9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +class Poly1305: + def __init__(self, key: bytes) -> None: ... + @staticmethod + def generate_tag(key: bytes, data: bytes) -> bytes: ... + @staticmethod + def verify_tag(key: bytes, data: bytes, tag: bytes) -> None: ... + def update(self, data: bytes) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, tag: bytes) -> None: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi new file mode 100644 index 0000000..ef7752d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi @@ -0,0 +1,55 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import rsa + +class RSAPrivateKey: ... +class RSAPublicKey: ... + +class RSAPrivateNumbers: + def __init__( + self, + p: int, + q: int, + d: int, + dmp1: int, + dmq1: int, + iqmp: int, + public_numbers: RSAPublicNumbers, + ) -> None: ... + @property + def p(self) -> int: ... + @property + def q(self) -> int: ... + @property + def d(self) -> int: ... + @property + def dmp1(self) -> int: ... + @property + def dmq1(self) -> int: ... + @property + def iqmp(self) -> int: ... + @property + def public_numbers(self) -> RSAPublicNumbers: ... + def private_key( + self, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, + ) -> rsa.RSAPrivateKey: ... + +class RSAPublicNumbers: + def __init__(self, e: int, n: int) -> None: ... + @property + def n(self) -> int: ... + @property + def e(self) -> int: ... + def public_key(self, backend: typing.Any = None) -> rsa.RSAPublicKey: ... + +def generate_private_key( + public_exponent: int, + key_size: int, +) -> rsa.RSAPrivateKey: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi new file mode 100644 index 0000000..da0f3ec --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi @@ -0,0 +1,12 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import x25519 + +class X25519PrivateKey: ... +class X25519PublicKey: ... + +def generate_key() -> x25519.X25519PrivateKey: ... +def from_private_bytes(data: bytes) -> x25519.X25519PrivateKey: ... +def from_public_bytes(data: bytes) -> x25519.X25519PublicKey: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi new file mode 100644 index 0000000..e51cfeb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi @@ -0,0 +1,12 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import x448 + +class X448PrivateKey: ... +class X448PublicKey: ... + +def generate_key() -> x448.X448PrivateKey: ... +def from_private_bytes(data: bytes) -> x448.X448PrivateKey: ... +def from_public_bytes(data: bytes) -> x448.X448PublicKey: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi new file mode 100644 index 0000000..40514c4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi @@ -0,0 +1,46 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography import x509 +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes +from cryptography.hazmat.primitives.serialization import ( + KeySerializationEncryption, +) +from cryptography.hazmat.primitives.serialization.pkcs12 import ( + PKCS12KeyAndCertificates, + PKCS12PrivateKeyTypes, +) + +class PKCS12Certificate: + def __init__( + self, cert: x509.Certificate, friendly_name: bytes | None + ) -> None: ... + @property + def friendly_name(self) -> bytes | None: ... + @property + def certificate(self) -> x509.Certificate: ... + +def load_key_and_certificates( + data: bytes, + password: bytes | None, + backend: typing.Any = None, +) -> tuple[ + PrivateKeyTypes | None, + x509.Certificate | None, + list[x509.Certificate], +]: ... +def load_pkcs12( + data: bytes, + password: bytes | None, + backend: typing.Any = None, +) -> PKCS12KeyAndCertificates: ... +def serialize_key_and_certificates( + name: bytes | None, + key: PKCS12PrivateKeyTypes | None, + cert: x509.Certificate | None, + cas: typing.Iterable[x509.Certificate | PKCS12Certificate] | None, + encryption_algorithm: KeySerializationEncryption, +) -> bytes: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi new file mode 100644 index 0000000..a72120a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi @@ -0,0 +1,30 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography import x509 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.serialization import pkcs7 + +def serialize_certificates( + certs: list[x509.Certificate], + encoding: serialization.Encoding, +) -> bytes: ... +def encrypt_and_serialize( + builder: pkcs7.PKCS7EnvelopeBuilder, + encoding: serialization.Encoding, + options: typing.Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def sign_and_serialize( + builder: pkcs7.PKCS7SignatureBuilder, + encoding: serialization.Encoding, + options: typing.Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def load_pem_pkcs7_certificates( + data: bytes, +) -> list[x509.Certificate]: ... +def load_der_pkcs7_certificates( + data: bytes, +) -> list[x509.Certificate]: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi new file mode 100644 index 0000000..a53ee25 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi @@ -0,0 +1,29 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography import x509 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.serialization import pkcs7 + +class TestCertificate: + not_after_tag: int + not_before_tag: int + issuer_value_tags: list[int] + subject_value_tags: list[int] + +def test_parse_certificate(data: bytes) -> TestCertificate: ... +def pkcs7_decrypt( + encoding: serialization.Encoding, + msg: bytes, + pkey: serialization.pkcs7.PKCS7PrivateKeyTypes, + cert_recipient: x509.Certificate, + options: list[pkcs7.PKCS7Options], +) -> bytes: ... +def pkcs7_verify( + encoding: serialization.Encoding, + sig: bytes, + msg: bytes | None, + certs: list[x509.Certificate], + options: list[pkcs7.PKCS7Options], +) -> None: ... diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi new file mode 100644 index 0000000..aa85657 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi @@ -0,0 +1,108 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import datetime +import typing + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric.padding import PSS, PKCS1v15 +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes + +def load_pem_x509_certificate( + data: bytes, backend: typing.Any = None +) -> x509.Certificate: ... +def load_der_x509_certificate( + data: bytes, backend: typing.Any = None +) -> x509.Certificate: ... +def load_pem_x509_certificates( + data: bytes, +) -> list[x509.Certificate]: ... +def load_pem_x509_crl( + data: bytes, backend: typing.Any = None +) -> x509.CertificateRevocationList: ... +def load_der_x509_crl( + data: bytes, backend: typing.Any = None +) -> x509.CertificateRevocationList: ... +def load_pem_x509_csr( + data: bytes, backend: typing.Any = None +) -> x509.CertificateSigningRequest: ... +def load_der_x509_csr( + data: bytes, backend: typing.Any = None +) -> x509.CertificateSigningRequest: ... +def encode_name_bytes(name: x509.Name) -> bytes: ... +def encode_extension_value(extension: x509.ExtensionType) -> bytes: ... +def create_x509_certificate( + builder: x509.CertificateBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, +) -> x509.Certificate: ... +def create_x509_csr( + builder: x509.CertificateSigningRequestBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, +) -> x509.CertificateSigningRequest: ... +def create_x509_crl( + builder: x509.CertificateRevocationListBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, +) -> x509.CertificateRevocationList: ... + +class Sct: ... +class Certificate: ... +class RevokedCertificate: ... +class CertificateRevocationList: ... +class CertificateSigningRequest: ... + +class PolicyBuilder: + def time(self, new_time: datetime.datetime) -> PolicyBuilder: ... + def store(self, new_store: Store) -> PolicyBuilder: ... + def max_chain_depth(self, new_max_chain_depth: int) -> PolicyBuilder: ... + def build_client_verifier(self) -> ClientVerifier: ... + def build_server_verifier( + self, subject: x509.verification.Subject + ) -> ServerVerifier: ... + +class VerifiedClient: + @property + def subjects(self) -> list[x509.GeneralName]: ... + @property + def chain(self) -> list[x509.Certificate]: ... + +class ClientVerifier: + @property + def validation_time(self) -> datetime.datetime: ... + @property + def store(self) -> Store: ... + @property + def max_chain_depth(self) -> int: ... + def verify( + self, + leaf: x509.Certificate, + intermediates: list[x509.Certificate], + ) -> VerifiedClient: ... + +class ServerVerifier: + @property + def subject(self) -> x509.verification.Subject: ... + @property + def validation_time(self) -> datetime.datetime: ... + @property + def store(self) -> Store: ... + @property + def max_chain_depth(self) -> int: ... + def verify( + self, + leaf: x509.Certificate, + intermediates: list[x509.Certificate], + ) -> list[x509.Certificate]: ... + +class Store: + def __init__(self, certs: list[x509.Certificate]) -> None: ... + +class VerificationError(Exception): + pass diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a545c91 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc new file mode 100644 index 0000000..433e1e4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc new file mode 100644 index 0000000..1ea942d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py new file mode 100644 index 0000000..73c06f7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py @@ -0,0 +1,183 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + + +def cryptography_has_set_cert_cb() -> list[str]: + return [ + "SSL_CTX_set_cert_cb", + "SSL_set_cert_cb", + ] + + +def cryptography_has_ssl_st() -> list[str]: + return [ + "SSL_ST_BEFORE", + "SSL_ST_OK", + "SSL_ST_INIT", + "SSL_ST_RENEGOTIATE", + ] + + +def cryptography_has_tls_st() -> list[str]: + return [ + "TLS_ST_BEFORE", + "TLS_ST_OK", + ] + + +def cryptography_has_ssl_sigalgs() -> list[str]: + return [ + "SSL_CTX_set1_sigalgs_list", + ] + + +def cryptography_has_psk() -> list[str]: + return [ + "SSL_CTX_use_psk_identity_hint", + "SSL_CTX_set_psk_server_callback", + "SSL_CTX_set_psk_client_callback", + ] + + +def cryptography_has_psk_tlsv13() -> list[str]: + return [ + "SSL_CTX_set_psk_find_session_callback", + "SSL_CTX_set_psk_use_session_callback", + "Cryptography_SSL_SESSION_new", + "SSL_CIPHER_find", + "SSL_SESSION_set1_master_key", + "SSL_SESSION_set_cipher", + "SSL_SESSION_set_protocol_version", + ] + + +def cryptography_has_custom_ext() -> list[str]: + return [ + "SSL_CTX_add_client_custom_ext", + "SSL_CTX_add_server_custom_ext", + "SSL_extension_supported", + ] + + +def cryptography_has_tlsv13_functions() -> list[str]: + return [ + "SSL_VERIFY_POST_HANDSHAKE", + "SSL_CTX_set_ciphersuites", + "SSL_verify_client_post_handshake", + "SSL_CTX_set_post_handshake_auth", + "SSL_set_post_handshake_auth", + "SSL_SESSION_get_max_early_data", + "SSL_write_early_data", + "SSL_read_early_data", + "SSL_CTX_set_max_early_data", + ] + + +def cryptography_has_engine() -> list[str]: + return [ + "ENGINE_by_id", + "ENGINE_init", + "ENGINE_finish", + "ENGINE_get_default_RAND", + "ENGINE_set_default_RAND", + "ENGINE_unregister_RAND", + "ENGINE_ctrl_cmd", + "ENGINE_free", + "ENGINE_get_name", + "ENGINE_ctrl_cmd_string", + "ENGINE_load_builtin_engines", + "ENGINE_load_private_key", + "ENGINE_load_public_key", + "SSL_CTX_set_client_cert_engine", + ] + + +def cryptography_has_verified_chain() -> list[str]: + return [ + "SSL_get0_verified_chain", + ] + + +def cryptography_has_srtp() -> list[str]: + return [ + "SSL_CTX_set_tlsext_use_srtp", + "SSL_set_tlsext_use_srtp", + "SSL_get_selected_srtp_profile", + ] + + +def cryptography_has_op_no_renegotiation() -> list[str]: + return [ + "SSL_OP_NO_RENEGOTIATION", + ] + + +def cryptography_has_dtls_get_data_mtu() -> list[str]: + return [ + "DTLS_get_data_mtu", + ] + + +def cryptography_has_ssl_cookie() -> list[str]: + return [ + "SSL_OP_COOKIE_EXCHANGE", + "DTLSv1_listen", + "SSL_CTX_set_cookie_generate_cb", + "SSL_CTX_set_cookie_verify_cb", + ] + + +def cryptography_has_prime_checks() -> list[str]: + return [ + "BN_prime_checks_for_size", + ] + + +def cryptography_has_unexpected_eof_while_reading() -> list[str]: + return ["SSL_R_UNEXPECTED_EOF_WHILE_READING"] + + +def cryptography_has_ssl_op_ignore_unexpected_eof() -> list[str]: + return [ + "SSL_OP_IGNORE_UNEXPECTED_EOF", + ] + + +def cryptography_has_get_extms_support() -> list[str]: + return ["SSL_get_extms_support"] + + +# This is a mapping of +# {condition: function-returning-names-dependent-on-that-condition} so we can +# loop over them and delete unsupported names at runtime. It will be removed +# when cffi supports #if in cdef. We use functions instead of just a dict of +# lists so we can use coverage to measure which are used. +CONDITIONAL_NAMES = { + "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb, + "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st, + "Cryptography_HAS_TLS_ST": cryptography_has_tls_st, + "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs, + "Cryptography_HAS_PSK": cryptography_has_psk, + "Cryptography_HAS_PSK_TLSv1_3": cryptography_has_psk_tlsv13, + "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext, + "Cryptography_HAS_TLSv1_3_FUNCTIONS": cryptography_has_tlsv13_functions, + "Cryptography_HAS_ENGINE": cryptography_has_engine, + "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain, + "Cryptography_HAS_SRTP": cryptography_has_srtp, + "Cryptography_HAS_OP_NO_RENEGOTIATION": ( + cryptography_has_op_no_renegotiation + ), + "Cryptography_HAS_DTLS_GET_DATA_MTU": cryptography_has_dtls_get_data_mtu, + "Cryptography_HAS_SSL_COOKIE": cryptography_has_ssl_cookie, + "Cryptography_HAS_PRIME_CHECKS": cryptography_has_prime_checks, + "Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING": ( + cryptography_has_unexpected_eof_while_reading + ), + "Cryptography_HAS_SSL_OP_IGNORE_UNEXPECTED_EOF": ( + cryptography_has_ssl_op_ignore_unexpected_eof + ), + "Cryptography_HAS_GET_EXTMS_SUPPORT": cryptography_has_get_extms_support, +} diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py new file mode 100644 index 0000000..d4dfeef --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py @@ -0,0 +1,121 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import os +import sys +import threading +import types +import typing +import warnings + +import cryptography +from cryptography.exceptions import InternalError +from cryptography.hazmat.bindings._rust import _openssl, openssl +from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES + + +def _openssl_assert(ok: bool) -> None: + if not ok: + errors = openssl.capture_error_stack() + + raise InternalError( + "Unknown OpenSSL error. This error is commonly encountered when " + "another library is not cleaning up the OpenSSL error stack. If " + "you are using cryptography with another library that uses " + "OpenSSL try disabling it before reporting a bug. Otherwise " + "please file an issue at https://github.com/pyca/cryptography/" + "issues with information on how to reproduce " + f"this. ({errors!r})", + errors, + ) + + +def build_conditional_library( + lib: typing.Any, + conditional_names: dict[str, typing.Callable[[], list[str]]], +) -> typing.Any: + conditional_lib = types.ModuleType("lib") + conditional_lib._original_lib = lib # type: ignore[attr-defined] + excluded_names = set() + for condition, names_cb in conditional_names.items(): + if not getattr(lib, condition): + excluded_names.update(names_cb()) + + for attr in dir(lib): + if attr not in excluded_names: + setattr(conditional_lib, attr, getattr(lib, attr)) + + return conditional_lib + + +class Binding: + """ + OpenSSL API wrapper. + """ + + lib: typing.ClassVar = None + ffi = _openssl.ffi + _lib_loaded = False + _init_lock = threading.Lock() + + def __init__(self) -> None: + self._ensure_ffi_initialized() + + @classmethod + def _ensure_ffi_initialized(cls) -> None: + with cls._init_lock: + if not cls._lib_loaded: + cls.lib = build_conditional_library( + _openssl.lib, CONDITIONAL_NAMES + ) + cls._lib_loaded = True + + @classmethod + def init_static_locks(cls) -> None: + cls._ensure_ffi_initialized() + + +def _verify_package_version(version: str) -> None: + # Occasionally we run into situations where the version of the Python + # package does not match the version of the shared object that is loaded. + # This may occur in environments where multiple versions of cryptography + # are installed and available in the python path. To avoid errors cropping + # up later this code checks that the currently imported package and the + # shared object that were loaded have the same version and raise an + # ImportError if they do not + so_package_version = _openssl.ffi.string( + _openssl.lib.CRYPTOGRAPHY_PACKAGE_VERSION + ) + if version.encode("ascii") != so_package_version: + raise ImportError( + "The version of cryptography does not match the loaded " + "shared object. This can happen if you have multiple copies of " + "cryptography installed in your Python path. Please try creating " + "a new virtual environment to resolve this issue. " + f"Loaded python version: {version}, " + f"shared object version: {so_package_version}" + ) + + _openssl_assert( + _openssl.lib.OpenSSL_version_num() == openssl.openssl_version(), + ) + + +_verify_package_version(cryptography.__version__) + +Binding.init_static_locks() + +if ( + sys.platform == "win32" + and os.environ.get("PROCESSOR_ARCHITEW6432") is not None +): + warnings.warn( + "You are using cryptography on a 32-bit Python on a 64-bit Windows " + "Operating System. Cryptography will be significantly faster if you " + "switch to using a 64-bit Python.", + UserWarning, + stacklevel=2, + ) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__init__.py new file mode 100644 index 0000000..41d7318 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a733949 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py new file mode 100644 index 0000000..41d7318 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..04b9a00 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc new file mode 100644 index 0000000..e68bde6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py new file mode 100644 index 0000000..a7d4aa3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py @@ -0,0 +1,107 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, + _verify_key_size, +) + + +class ARC4(CipherAlgorithm): + name = "RC4" + key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class TripleDES(BlockCipherAlgorithm): + name = "3DES" + block_size = 64 + key_sizes = frozenset([64, 128, 192]) + + def __init__(self, key: bytes): + if len(key) == 8: + key += key + key + elif len(key) == 16: + key += key[:8] + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class Blowfish(BlockCipherAlgorithm): + name = "Blowfish" + block_size = 64 + key_sizes = frozenset(range(32, 449, 8)) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class CAST5(BlockCipherAlgorithm): + name = "CAST5" + block_size = 64 + key_sizes = frozenset(range(40, 129, 8)) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class SEED(BlockCipherAlgorithm): + name = "SEED" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class IDEA(BlockCipherAlgorithm): + name = "IDEA" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +# This class only allows RC2 with a 128-bit key. No support for +# effective key bits or other key sizes is provided. +class RC2(BlockCipherAlgorithm): + name = "RC2" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..ea0bc28 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc new file mode 100644 index 0000000..361f09d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc new file mode 100644 index 0000000..320b91c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc new file mode 100644 index 0000000..13d20b1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc new file mode 100644 index 0000000..4f82e70 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc new file mode 100644 index 0000000..765e906 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc new file mode 100644 index 0000000..0448fba Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc new file mode 100644 index 0000000..e9c0b28 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc new file mode 100644 index 0000000..b908c7b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc new file mode 100644 index 0000000..62d6ba7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc new file mode 100644 index 0000000..380e453 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py new file mode 100644 index 0000000..ea55ffd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py @@ -0,0 +1,19 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +# This exists to break an import cycle. It is normally accessible from the +# asymmetric padding module. + + +class AsymmetricPadding(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this padding (e.g. "PSS", "PKCS1"). + """ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py new file mode 100644 index 0000000..588a616 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py @@ -0,0 +1,58 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils + +# This exists to break an import cycle. It is normally accessible from the +# ciphers module. + + +class CipherAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this mode (e.g. "AES", "Camellia"). + """ + + @property + @abc.abstractmethod + def key_sizes(self) -> frozenset[int]: + """ + Valid key sizes for this algorithm in bits + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The size of the key being used as an integer in bits (e.g. 128, 256). + """ + + +class BlockCipherAlgorithm(CipherAlgorithm): + key: bytes + + @property + @abc.abstractmethod + def block_size(self) -> int: + """ + The size of a block as an integer in bits (e.g. 64, 128). + """ + + +def _verify_key_size(algorithm: CipherAlgorithm, key: bytes) -> bytes: + # Verify that the key is instance of bytes + utils._check_byteslike("key", key) + + # Verify that the key size matches the expected key size + if len(key) * 8 not in algorithm.key_sizes: + raise ValueError( + f"Invalid key size ({len(key) * 8}) for {algorithm.name}." + ) + return key diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py new file mode 100644 index 0000000..4615772 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py @@ -0,0 +1,169 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils +from cryptography.hazmat.primitives.hashes import HashAlgorithm + +# This exists to break an import cycle. These classes are normally accessible +# from the serialization module. + + +class PBES(utils.Enum): + PBESv1SHA1And3KeyTripleDESCBC = "PBESv1 using SHA1 and 3-Key TripleDES" + PBESv2SHA256AndAES256CBC = "PBESv2 using SHA256 PBKDF2 and AES256 CBC" + + +class Encoding(utils.Enum): + PEM = "PEM" + DER = "DER" + OpenSSH = "OpenSSH" + Raw = "Raw" + X962 = "ANSI X9.62" + SMIME = "S/MIME" + + +class PrivateFormat(utils.Enum): + PKCS8 = "PKCS8" + TraditionalOpenSSL = "TraditionalOpenSSL" + Raw = "Raw" + OpenSSH = "OpenSSH" + PKCS12 = "PKCS12" + + def encryption_builder(self) -> KeySerializationEncryptionBuilder: + if self not in (PrivateFormat.OpenSSH, PrivateFormat.PKCS12): + raise ValueError( + "encryption_builder only supported with PrivateFormat.OpenSSH" + " and PrivateFormat.PKCS12" + ) + return KeySerializationEncryptionBuilder(self) + + +class PublicFormat(utils.Enum): + SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" + PKCS1 = "Raw PKCS#1" + OpenSSH = "OpenSSH" + Raw = "Raw" + CompressedPoint = "X9.62 Compressed Point" + UncompressedPoint = "X9.62 Uncompressed Point" + + +class ParameterFormat(utils.Enum): + PKCS3 = "PKCS3" + + +class KeySerializationEncryption(metaclass=abc.ABCMeta): + pass + + +class BestAvailableEncryption(KeySerializationEncryption): + def __init__(self, password: bytes): + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + self.password = password + + +class NoEncryption(KeySerializationEncryption): + pass + + +class KeySerializationEncryptionBuilder: + def __init__( + self, + format: PrivateFormat, + *, + _kdf_rounds: int | None = None, + _hmac_hash: HashAlgorithm | None = None, + _key_cert_algorithm: PBES | None = None, + ) -> None: + self._format = format + + self._kdf_rounds = _kdf_rounds + self._hmac_hash = _hmac_hash + self._key_cert_algorithm = _key_cert_algorithm + + def kdf_rounds(self, rounds: int) -> KeySerializationEncryptionBuilder: + if self._kdf_rounds is not None: + raise ValueError("kdf_rounds already set") + + if not isinstance(rounds, int): + raise TypeError("kdf_rounds must be an integer") + + if rounds < 1: + raise ValueError("kdf_rounds must be a positive integer") + + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=rounds, + _hmac_hash=self._hmac_hash, + _key_cert_algorithm=self._key_cert_algorithm, + ) + + def hmac_hash( + self, algorithm: HashAlgorithm + ) -> KeySerializationEncryptionBuilder: + if self._format is not PrivateFormat.PKCS12: + raise TypeError( + "hmac_hash only supported with PrivateFormat.PKCS12" + ) + + if self._hmac_hash is not None: + raise ValueError("hmac_hash already set") + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=self._kdf_rounds, + _hmac_hash=algorithm, + _key_cert_algorithm=self._key_cert_algorithm, + ) + + def key_cert_algorithm( + self, algorithm: PBES + ) -> KeySerializationEncryptionBuilder: + if self._format is not PrivateFormat.PKCS12: + raise TypeError( + "key_cert_algorithm only supported with " + "PrivateFormat.PKCS12" + ) + if self._key_cert_algorithm is not None: + raise ValueError("key_cert_algorithm already set") + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=self._kdf_rounds, + _hmac_hash=self._hmac_hash, + _key_cert_algorithm=algorithm, + ) + + def build(self, password: bytes) -> KeySerializationEncryption: + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + return _KeySerializationEncryption( + self._format, + password, + kdf_rounds=self._kdf_rounds, + hmac_hash=self._hmac_hash, + key_cert_algorithm=self._key_cert_algorithm, + ) + + +class _KeySerializationEncryption(KeySerializationEncryption): + def __init__( + self, + format: PrivateFormat, + password: bytes, + *, + kdf_rounds: int | None, + hmac_hash: HashAlgorithm | None, + key_cert_algorithm: PBES | None, + ): + self._format = format + self.password = password + + self._kdf_rounds = kdf_rounds + self._hmac_hash = hmac_hash + self._key_cert_algorithm = key_cert_algorithm diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..73c18d1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc new file mode 100644 index 0000000..ce0968c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc new file mode 100644 index 0000000..aef251f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc new file mode 100644 index 0000000..4f2186c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc new file mode 100644 index 0000000..905d0fd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc new file mode 100644 index 0000000..e58f5e9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc new file mode 100644 index 0000000..39dac67 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc new file mode 100644 index 0000000..5b2b46f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc new file mode 100644 index 0000000..9fe848b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..c8067a7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc new file mode 100644 index 0000000..27dc445 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc new file mode 100644 index 0000000..bec6c78 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py new file mode 100644 index 0000000..31c9748 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py @@ -0,0 +1,135 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization + +generate_parameters = rust_openssl.dh.generate_parameters + + +DHPrivateNumbers = rust_openssl.dh.DHPrivateNumbers +DHPublicNumbers = rust_openssl.dh.DHPublicNumbers +DHParameterNumbers = rust_openssl.dh.DHParameterNumbers + + +class DHParameters(metaclass=abc.ABCMeta): + @abc.abstractmethod + def generate_private_key(self) -> DHPrivateKey: + """ + Generates and returns a DHPrivateKey. + """ + + @abc.abstractmethod + def parameter_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.ParameterFormat, + ) -> bytes: + """ + Returns the parameters serialized as bytes. + """ + + @abc.abstractmethod + def parameter_numbers(self) -> DHParameterNumbers: + """ + Returns a DHParameterNumbers. + """ + + +DHParametersWithSerialization = DHParameters +DHParameters.register(rust_openssl.dh.DHParameters) + + +class DHPublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self) -> DHParameters: + """ + The DHParameters object associated with this public key. + """ + + @abc.abstractmethod + def public_numbers(self) -> DHPublicNumbers: + """ + Returns a DHPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + +DHPublicKeyWithSerialization = DHPublicKey +DHPublicKey.register(rust_openssl.dh.DHPublicKey) + + +class DHPrivateKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self) -> DHPublicKey: + """ + The DHPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self) -> DHParameters: + """ + The DHParameters object associated with this private key. + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: DHPublicKey) -> bytes: + """ + Given peer's DHPublicKey, carry out the key exchange and + return shared key as bytes. + """ + + @abc.abstractmethod + def private_numbers(self) -> DHPrivateNumbers: + """ + Returns a DHPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + +DHPrivateKeyWithSerialization = DHPrivateKey +DHPrivateKey.register(rust_openssl.dh.DHPrivateKey) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py new file mode 100644 index 0000000..6dd34c0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -0,0 +1,154 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils + + +class DSAParameters(metaclass=abc.ABCMeta): + @abc.abstractmethod + def generate_private_key(self) -> DSAPrivateKey: + """ + Generates and returns a DSAPrivateKey. + """ + + @abc.abstractmethod + def parameter_numbers(self) -> DSAParameterNumbers: + """ + Returns a DSAParameterNumbers. + """ + + +DSAParametersWithNumbers = DSAParameters +DSAParameters.register(rust_openssl.dsa.DSAParameters) + + +class DSAPrivateKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self) -> DSAPublicKey: + """ + The DSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self) -> DSAParameters: + """ + The DSAParameters object associated with this private key. + """ + + @abc.abstractmethod + def sign( + self, + data: bytes, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> bytes: + """ + Signs the data + """ + + @abc.abstractmethod + def private_numbers(self) -> DSAPrivateNumbers: + """ + Returns a DSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + +DSAPrivateKeyWithSerialization = DSAPrivateKey +DSAPrivateKey.register(rust_openssl.dsa.DSAPrivateKey) + + +class DSAPublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self) -> DSAParameters: + """ + The DSAParameters object associated with this public key. + """ + + @abc.abstractmethod + def public_numbers(self) -> DSAPublicNumbers: + """ + Returns a DSAPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: bytes, + data: bytes, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + +DSAPublicKeyWithSerialization = DSAPublicKey +DSAPublicKey.register(rust_openssl.dsa.DSAPublicKey) + +DSAPrivateNumbers = rust_openssl.dsa.DSAPrivateNumbers +DSAPublicNumbers = rust_openssl.dsa.DSAPublicNumbers +DSAParameterNumbers = rust_openssl.dsa.DSAParameterNumbers + + +def generate_parameters( + key_size: int, backend: typing.Any = None +) -> DSAParameters: + if key_size not in (1024, 2048, 3072, 4096): + raise ValueError("Key size must be 1024, 2048, 3072, or 4096 bits.") + + return rust_openssl.dsa.generate_parameters(key_size) + + +def generate_private_key( + key_size: int, backend: typing.Any = None +) -> DSAPrivateKey: + parameters = generate_parameters(key_size) + return parameters.generate_private_key() diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py new file mode 100644 index 0000000..da1fbea --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py @@ -0,0 +1,403 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat._oid import ObjectIdentifier +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils + + +class EllipticCurveOID: + SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1") + SECP224R1 = ObjectIdentifier("1.3.132.0.33") + SECP256K1 = ObjectIdentifier("1.3.132.0.10") + SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7") + SECP384R1 = ObjectIdentifier("1.3.132.0.34") + SECP521R1 = ObjectIdentifier("1.3.132.0.35") + BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7") + BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11") + BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13") + SECT163K1 = ObjectIdentifier("1.3.132.0.1") + SECT163R2 = ObjectIdentifier("1.3.132.0.15") + SECT233K1 = ObjectIdentifier("1.3.132.0.26") + SECT233R1 = ObjectIdentifier("1.3.132.0.27") + SECT283K1 = ObjectIdentifier("1.3.132.0.16") + SECT283R1 = ObjectIdentifier("1.3.132.0.17") + SECT409K1 = ObjectIdentifier("1.3.132.0.36") + SECT409R1 = ObjectIdentifier("1.3.132.0.37") + SECT571K1 = ObjectIdentifier("1.3.132.0.38") + SECT571R1 = ObjectIdentifier("1.3.132.0.39") + + +class EllipticCurve(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + The name of the curve. e.g. secp256r1. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + +class EllipticCurveSignatureAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def algorithm( + self, + ) -> asym_utils.Prehashed | hashes.HashAlgorithm: + """ + The digest algorithm used with this signature. + """ + + +class EllipticCurvePrivateKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def exchange( + self, algorithm: ECDH, peer_public_key: EllipticCurvePublicKey + ) -> bytes: + """ + Performs a key exchange operation using the provided algorithm with the + provided peer's public key. + """ + + @abc.abstractmethod + def public_key(self) -> EllipticCurvePublicKey: + """ + The EllipticCurvePublicKey for this private key. + """ + + @property + @abc.abstractmethod + def curve(self) -> EllipticCurve: + """ + The EllipticCurve that this key is on. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def sign( + self, + data: bytes, + signature_algorithm: EllipticCurveSignatureAlgorithm, + ) -> bytes: + """ + Signs the data + """ + + @abc.abstractmethod + def private_numbers(self) -> EllipticCurvePrivateNumbers: + """ + Returns an EllipticCurvePrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + +EllipticCurvePrivateKeyWithSerialization = EllipticCurvePrivateKey +EllipticCurvePrivateKey.register(rust_openssl.ec.ECPrivateKey) + + +class EllipticCurvePublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def curve(self) -> EllipticCurve: + """ + The EllipticCurve that this key is on. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def public_numbers(self) -> EllipticCurvePublicNumbers: + """ + Returns an EllipticCurvePublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: bytes, + data: bytes, + signature_algorithm: EllipticCurveSignatureAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @classmethod + def from_encoded_point( + cls, curve: EllipticCurve, data: bytes + ) -> EllipticCurvePublicKey: + utils._check_bytes("data", data) + + if len(data) == 0: + raise ValueError("data must not be an empty byte string") + + if data[0] not in [0x02, 0x03, 0x04]: + raise ValueError("Unsupported elliptic curve point type") + + return rust_openssl.ec.from_public_bytes(curve, data) + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + +EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey +EllipticCurvePublicKey.register(rust_openssl.ec.ECPublicKey) + +EllipticCurvePrivateNumbers = rust_openssl.ec.EllipticCurvePrivateNumbers +EllipticCurvePublicNumbers = rust_openssl.ec.EllipticCurvePublicNumbers + + +class SECT571R1(EllipticCurve): + name = "sect571r1" + key_size = 570 + + +class SECT409R1(EllipticCurve): + name = "sect409r1" + key_size = 409 + + +class SECT283R1(EllipticCurve): + name = "sect283r1" + key_size = 283 + + +class SECT233R1(EllipticCurve): + name = "sect233r1" + key_size = 233 + + +class SECT163R2(EllipticCurve): + name = "sect163r2" + key_size = 163 + + +class SECT571K1(EllipticCurve): + name = "sect571k1" + key_size = 571 + + +class SECT409K1(EllipticCurve): + name = "sect409k1" + key_size = 409 + + +class SECT283K1(EllipticCurve): + name = "sect283k1" + key_size = 283 + + +class SECT233K1(EllipticCurve): + name = "sect233k1" + key_size = 233 + + +class SECT163K1(EllipticCurve): + name = "sect163k1" + key_size = 163 + + +class SECP521R1(EllipticCurve): + name = "secp521r1" + key_size = 521 + + +class SECP384R1(EllipticCurve): + name = "secp384r1" + key_size = 384 + + +class SECP256R1(EllipticCurve): + name = "secp256r1" + key_size = 256 + + +class SECP256K1(EllipticCurve): + name = "secp256k1" + key_size = 256 + + +class SECP224R1(EllipticCurve): + name = "secp224r1" + key_size = 224 + + +class SECP192R1(EllipticCurve): + name = "secp192r1" + key_size = 192 + + +class BrainpoolP256R1(EllipticCurve): + name = "brainpoolP256r1" + key_size = 256 + + +class BrainpoolP384R1(EllipticCurve): + name = "brainpoolP384r1" + key_size = 384 + + +class BrainpoolP512R1(EllipticCurve): + name = "brainpoolP512r1" + key_size = 512 + + +_CURVE_TYPES: dict[str, EllipticCurve] = { + "prime192v1": SECP192R1(), + "prime256v1": SECP256R1(), + "secp192r1": SECP192R1(), + "secp224r1": SECP224R1(), + "secp256r1": SECP256R1(), + "secp384r1": SECP384R1(), + "secp521r1": SECP521R1(), + "secp256k1": SECP256K1(), + "sect163k1": SECT163K1(), + "sect233k1": SECT233K1(), + "sect283k1": SECT283K1(), + "sect409k1": SECT409K1(), + "sect571k1": SECT571K1(), + "sect163r2": SECT163R2(), + "sect233r1": SECT233R1(), + "sect283r1": SECT283R1(), + "sect409r1": SECT409R1(), + "sect571r1": SECT571R1(), + "brainpoolP256r1": BrainpoolP256R1(), + "brainpoolP384r1": BrainpoolP384R1(), + "brainpoolP512r1": BrainpoolP512R1(), +} + + +class ECDSA(EllipticCurveSignatureAlgorithm): + def __init__( + self, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + deterministic_signing: bool = False, + ): + from cryptography.hazmat.backends.openssl.backend import backend + + if ( + deterministic_signing + and not backend.ecdsa_deterministic_supported() + ): + raise UnsupportedAlgorithm( + "ECDSA with deterministic signature (RFC 6979) is not " + "supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + self._algorithm = algorithm + self._deterministic_signing = deterministic_signing + + @property + def algorithm( + self, + ) -> asym_utils.Prehashed | hashes.HashAlgorithm: + return self._algorithm + + @property + def deterministic_signing( + self, + ) -> bool: + return self._deterministic_signing + + +generate_private_key = rust_openssl.ec.generate_private_key + + +def derive_private_key( + private_value: int, + curve: EllipticCurve, + backend: typing.Any = None, +) -> EllipticCurvePrivateKey: + if not isinstance(private_value, int): + raise TypeError("private_value must be an integer type.") + + if private_value <= 0: + raise ValueError("private_value must be a positive integer.") + + return rust_openssl.ec.derive_private_key(private_value, curve) + + +class ECDH: + pass + + +_OID_TO_CURVE = { + EllipticCurveOID.SECP192R1: SECP192R1, + EllipticCurveOID.SECP224R1: SECP224R1, + EllipticCurveOID.SECP256K1: SECP256K1, + EllipticCurveOID.SECP256R1: SECP256R1, + EllipticCurveOID.SECP384R1: SECP384R1, + EllipticCurveOID.SECP521R1: SECP521R1, + EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1, + EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1, + EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1, + EllipticCurveOID.SECT163K1: SECT163K1, + EllipticCurveOID.SECT163R2: SECT163R2, + EllipticCurveOID.SECT233K1: SECT233K1, + EllipticCurveOID.SECT233R1: SECT233R1, + EllipticCurveOID.SECT283K1: SECT283K1, + EllipticCurveOID.SECT283R1: SECT283R1, + EllipticCurveOID.SECT409K1: SECT409K1, + EllipticCurveOID.SECT409R1: SECT409R1, + EllipticCurveOID.SECT571K1: SECT571K1, + EllipticCurveOID.SECT571R1: SECT571R1, +} + + +def get_curve_for_oid(oid: ObjectIdentifier) -> type[EllipticCurve]: + try: + return _OID_TO_CURVE[oid] + except KeyError: + raise LookupError( + "The provided object identifier has no matching elliptic " + "curve class" + ) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py new file mode 100644 index 0000000..3a26185 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py @@ -0,0 +1,116 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization + + +class Ed25519PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> Ed25519PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def verify(self, signature: bytes, data: bytes) -> None: + """ + Verify the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + +Ed25519PublicKey.register(rust_openssl.ed25519.Ed25519PublicKey) + + +class Ed25519PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> Ed25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.generate_key() + + @classmethod + def from_private_bytes(cls, data: bytes) -> Ed25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> Ed25519PublicKey: + """ + The Ed25519PublicKey derived from the private key. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def sign(self, data: bytes) -> bytes: + """ + Signs the data. + """ + + +Ed25519PrivateKey.register(rust_openssl.ed25519.Ed25519PrivateKey) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py new file mode 100644 index 0000000..78c82c4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py @@ -0,0 +1,118 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization + + +class Ed448PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> Ed448PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def verify(self, signature: bytes, data: bytes) -> None: + """ + Verify the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + +if hasattr(rust_openssl, "ed448"): + Ed448PublicKey.register(rust_openssl.ed448.Ed448PublicKey) + + +class Ed448PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> Ed448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.generate_key() + + @classmethod + def from_private_bytes(cls, data: bytes) -> Ed448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> Ed448PublicKey: + """ + The Ed448PublicKey derived from the private key. + """ + + @abc.abstractmethod + def sign(self, data: bytes) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + +if hasattr(rust_openssl, "x448"): + Ed448PrivateKey.register(rust_openssl.ed448.Ed448PrivateKey) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py new file mode 100644 index 0000000..b4babf4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py @@ -0,0 +1,113 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives._asymmetric import ( + AsymmetricPadding as AsymmetricPadding, +) +from cryptography.hazmat.primitives.asymmetric import rsa + + +class PKCS1v15(AsymmetricPadding): + name = "EMSA-PKCS1-v1_5" + + +class _MaxLength: + "Sentinel value for `MAX_LENGTH`." + + +class _Auto: + "Sentinel value for `AUTO`." + + +class _DigestLength: + "Sentinel value for `DIGEST_LENGTH`." + + +class PSS(AsymmetricPadding): + MAX_LENGTH = _MaxLength() + AUTO = _Auto() + DIGEST_LENGTH = _DigestLength() + name = "EMSA-PSS" + _salt_length: int | _MaxLength | _Auto | _DigestLength + + def __init__( + self, + mgf: MGF, + salt_length: int | _MaxLength | _Auto | _DigestLength, + ) -> None: + self._mgf = mgf + + if not isinstance( + salt_length, (int, _MaxLength, _Auto, _DigestLength) + ): + raise TypeError( + "salt_length must be an integer, MAX_LENGTH, " + "DIGEST_LENGTH, or AUTO" + ) + + if isinstance(salt_length, int) and salt_length < 0: + raise ValueError("salt_length must be zero or greater.") + + self._salt_length = salt_length + + @property + def mgf(self) -> MGF: + return self._mgf + + +class OAEP(AsymmetricPadding): + name = "EME-OAEP" + + def __init__( + self, + mgf: MGF, + algorithm: hashes.HashAlgorithm, + label: bytes | None, + ): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._mgf = mgf + self._algorithm = algorithm + self._label = label + + @property + def algorithm(self) -> hashes.HashAlgorithm: + return self._algorithm + + @property + def mgf(self) -> MGF: + return self._mgf + + +class MGF(metaclass=abc.ABCMeta): + _algorithm: hashes.HashAlgorithm + + +class MGF1(MGF): + MAX_LENGTH = _MaxLength() + + def __init__(self, algorithm: hashes.HashAlgorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._algorithm = algorithm + + +def calculate_max_pss_salt_length( + key: rsa.RSAPrivateKey | rsa.RSAPublicKey, + hash_algorithm: hashes.HashAlgorithm, +) -> int: + if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): + raise TypeError("key must be an RSA public or private key") + # bit length - 1 per RFC 3447 + emlen = (key.key_size + 6) // 8 + salt_length = emlen - hash_algorithm.digest_size - 2 + assert salt_length >= 0 + return salt_length diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py new file mode 100644 index 0000000..7a387b5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -0,0 +1,260 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing +from math import gcd + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils + + +class RSAPrivateKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes: + """ + Decrypts the provided ciphertext. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_key(self) -> RSAPublicKey: + """ + The RSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def sign( + self, + data: bytes, + padding: AsymmetricPadding, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def private_numbers(self) -> RSAPrivateNumbers: + """ + Returns an RSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + +RSAPrivateKeyWithSerialization = RSAPrivateKey +RSAPrivateKey.register(rust_openssl.rsa.RSAPrivateKey) + + +class RSAPublicKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes: + """ + Encrypts the given plaintext. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_numbers(self) -> RSAPublicNumbers: + """ + Returns an RSAPublicNumbers + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: bytes, + data: bytes, + padding: AsymmetricPadding, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @abc.abstractmethod + def recover_data_from_signature( + self, + signature: bytes, + padding: AsymmetricPadding, + algorithm: hashes.HashAlgorithm | None, + ) -> bytes: + """ + Recovers the original data from the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + +RSAPublicKeyWithSerialization = RSAPublicKey +RSAPublicKey.register(rust_openssl.rsa.RSAPublicKey) + +RSAPrivateNumbers = rust_openssl.rsa.RSAPrivateNumbers +RSAPublicNumbers = rust_openssl.rsa.RSAPublicNumbers + + +def generate_private_key( + public_exponent: int, + key_size: int, + backend: typing.Any = None, +) -> RSAPrivateKey: + _verify_rsa_parameters(public_exponent, key_size) + return rust_openssl.rsa.generate_private_key(public_exponent, key_size) + + +def _verify_rsa_parameters(public_exponent: int, key_size: int) -> None: + if public_exponent not in (3, 65537): + raise ValueError( + "public_exponent must be either 3 (for legacy compatibility) or " + "65537. Almost everyone should choose 65537 here!" + ) + + if key_size < 1024: + raise ValueError("key_size must be at least 1024-bits.") + + +def _modinv(e: int, m: int) -> int: + """ + Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 + """ + x1, x2 = 1, 0 + a, b = e, m + while b > 0: + q, r = divmod(a, b) + xn = x1 - q * x2 + a, b, x1, x2 = b, r, x2, xn + return x1 % m + + +def rsa_crt_iqmp(p: int, q: int) -> int: + """ + Compute the CRT (q ** -1) % p value from RSA primes p and q. + """ + return _modinv(q, p) + + +def rsa_crt_dmp1(private_exponent: int, p: int) -> int: + """ + Compute the CRT private_exponent % (p - 1) value from the RSA + private_exponent (d) and p. + """ + return private_exponent % (p - 1) + + +def rsa_crt_dmq1(private_exponent: int, q: int) -> int: + """ + Compute the CRT private_exponent % (q - 1) value from the RSA + private_exponent (d) and q. + """ + return private_exponent % (q - 1) + + +def rsa_recover_private_exponent(e: int, p: int, q: int) -> int: + """ + Compute the RSA private_exponent (d) given the public exponent (e) + and the RSA primes p and q. + + This uses the Carmichael totient function to generate the + smallest possible working value of the private exponent. + """ + # This lambda_n is the Carmichael totient function. + # The original RSA paper uses the Euler totient function + # here: phi_n = (p - 1) * (q - 1) + # Either version of the private exponent will work, but the + # one generated by the older formulation may be larger + # than necessary. (lambda_n always divides phi_n) + # + # TODO: Replace with lcm(p - 1, q - 1) once the minimum + # supported Python version is >= 3.9. + lambda_n = (p - 1) * (q - 1) // gcd(p - 1, q - 1) + return _modinv(e, lambda_n) + + +# Controls the number of iterations rsa_recover_prime_factors will perform +# to obtain the prime factors. Each iteration increments by 2 so the actual +# maximum attempts is half this number. +_MAX_RECOVERY_ATTEMPTS = 1000 + + +def rsa_recover_prime_factors(n: int, e: int, d: int) -> tuple[int, int]: + """ + Compute factors p and q from the private exponent d. We assume that n has + no more than two factors. This function is adapted from code in PyCrypto. + """ + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t = t // 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + a = 2 + while not spotted and a < _MAX_RECOVERY_ATTEMPTS: + k = t + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = gcd(cand + 1, n) + spotted = True + break + k *= 2 + # This value was not any good... let's try another! + a += 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + q, r = divmod(n, p) + assert r == 0 + p, q = sorted((p, q), reverse=True) + return (p, q) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py new file mode 100644 index 0000000..1fe4eaf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py @@ -0,0 +1,111 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.hazmat.primitives.asymmetric import ( + dh, + dsa, + ec, + ed448, + ed25519, + rsa, + x448, + x25519, +) + +# Every asymmetric key type +PublicKeyTypes = typing.Union[ + dh.DHPublicKey, + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, +] +PUBLIC_KEY_TYPES = PublicKeyTypes +utils.deprecated( + PUBLIC_KEY_TYPES, + __name__, + "Use PublicKeyTypes instead", + utils.DeprecatedIn40, + name="PUBLIC_KEY_TYPES", +) +# Every asymmetric key type +PrivateKeyTypes = typing.Union[ + dh.DHPrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + x25519.X25519PrivateKey, + x448.X448PrivateKey, +] +PRIVATE_KEY_TYPES = PrivateKeyTypes +utils.deprecated( + PRIVATE_KEY_TYPES, + __name__, + "Use PrivateKeyTypes instead", + utils.DeprecatedIn40, + name="PRIVATE_KEY_TYPES", +) +# Just the key types we allow to be used for x509 signing. This mirrors +# the certificate public key types +CertificateIssuerPrivateKeyTypes = typing.Union[ + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, +] +CERTIFICATE_PRIVATE_KEY_TYPES = CertificateIssuerPrivateKeyTypes +utils.deprecated( + CERTIFICATE_PRIVATE_KEY_TYPES, + __name__, + "Use CertificateIssuerPrivateKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_PRIVATE_KEY_TYPES", +) +# Just the key types we allow to be used for x509 signing. This mirrors +# the certificate private key types +CertificateIssuerPublicKeyTypes = typing.Union[ + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, +] +CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES = CertificateIssuerPublicKeyTypes +utils.deprecated( + CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES, + __name__, + "Use CertificateIssuerPublicKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES", +) +# This type removes DHPublicKey. x448/x25519 can be a public key +# but cannot be used in signing so they are allowed here. +CertificatePublicKeyTypes = typing.Union[ + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, +] +CERTIFICATE_PUBLIC_KEY_TYPES = CertificatePublicKeyTypes +utils.deprecated( + CERTIFICATE_PUBLIC_KEY_TYPES, + __name__, + "Use CertificatePublicKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_PUBLIC_KEY_TYPES", +) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py new file mode 100644 index 0000000..826b956 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py @@ -0,0 +1,24 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import asn1 +from cryptography.hazmat.primitives import hashes + +decode_dss_signature = asn1.decode_dss_signature +encode_dss_signature = asn1.encode_dss_signature + + +class Prehashed: + def __init__(self, algorithm: hashes.HashAlgorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of HashAlgorithm.") + + self._algorithm = algorithm + self._digest_size = algorithm.digest_size + + @property + def digest_size(self) -> int: + return self._digest_size diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py new file mode 100644 index 0000000..0cfa36e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py @@ -0,0 +1,109 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization + + +class X25519PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> X25519PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x25519.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + +X25519PublicKey.register(rust_openssl.x25519.X25519PublicKey) + + +class X25519PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> X25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + return rust_openssl.x25519.generate_key() + + @classmethod + def from_private_bytes(cls, data: bytes) -> X25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x25519.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> X25519PublicKey: + """ + Returns the public key associated with this private key + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: X25519PublicKey) -> bytes: + """ + Performs a key exchange operation using the provided peer's public key. + """ + + +X25519PrivateKey.register(rust_openssl.x25519.X25519PrivateKey) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py new file mode 100644 index 0000000..86086ab --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py @@ -0,0 +1,112 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization + + +class X448PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> X448PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + +if hasattr(rust_openssl, "x448"): + X448PublicKey.register(rust_openssl.x448.X448PublicKey) + + +class X448PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> X448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.generate_key() + + @classmethod + def from_private_bytes(cls, data: bytes) -> X448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> X448PublicKey: + """ + Returns the public key associated with this private key + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: X448PublicKey) -> bytes: + """ + Performs a key exchange operation using the provided peer's public key. + """ + + +if hasattr(rust_openssl, "x448"): + X448PrivateKey.register(rust_openssl.x448.X448PrivateKey) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py new file mode 100644 index 0000000..10c15d0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py @@ -0,0 +1,27 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers.base import ( + AEADCipherContext, + AEADDecryptionContext, + AEADEncryptionContext, + Cipher, + CipherContext, +) + +__all__ = [ + "AEADCipherContext", + "AEADDecryptionContext", + "AEADEncryptionContext", + "BlockCipherAlgorithm", + "Cipher", + "CipherAlgorithm", + "CipherContext", +] diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..dd7d036 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc new file mode 100644 index 0000000..7e6f5a2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc new file mode 100644 index 0000000..822c453 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..076081f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc new file mode 100644 index 0000000..8eae128 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py new file mode 100644 index 0000000..c8a582d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = [ + "AESCCM", + "AESGCM", + "AESGCMSIV", + "AESOCB3", + "AESSIV", + "ChaCha20Poly1305", +] + +AESGCM = rust_openssl.aead.AESGCM +ChaCha20Poly1305 = rust_openssl.aead.ChaCha20Poly1305 +AESCCM = rust_openssl.aead.AESCCM +AESSIV = rust_openssl.aead.AESSIV +AESOCB3 = rust_openssl.aead.AESOCB3 +AESGCMSIV = rust_openssl.aead.AESGCMSIV diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py new file mode 100644 index 0000000..1051ba3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -0,0 +1,177 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography import utils +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + ARC4 as ARC4, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + CAST5 as CAST5, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + IDEA as IDEA, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + SEED as SEED, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + Blowfish as Blowfish, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + TripleDES as TripleDES, +) +from cryptography.hazmat.primitives._cipheralgorithm import _verify_key_size +from cryptography.hazmat.primitives.ciphers import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) + + +class AES(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + # 512 added to support AES-256-XTS, which uses 512-bit keys + key_sizes = frozenset([128, 192, 256, 512]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class AES128(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + key_sizes = frozenset([128]) + key_size = 128 + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + +class AES256(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + key_sizes = frozenset([256]) + key_size = 256 + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + +class Camellia(BlockCipherAlgorithm): + name = "camellia" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +utils.deprecated( + ARC4, + __name__, + "ARC4 has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.ARC4 and " + "will be removed from this module in 48.0.0.", + utils.DeprecatedIn43, + name="ARC4", +) + + +utils.deprecated( + TripleDES, + __name__, + "TripleDES has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and " + "will be removed from this module in 48.0.0.", + utils.DeprecatedIn43, + name="TripleDES", +) + +utils.deprecated( + Blowfish, + __name__, + "Blowfish has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.Blowfish and " + "will be removed from this module in 45.0.0.", + utils.DeprecatedIn37, + name="Blowfish", +) + + +utils.deprecated( + CAST5, + __name__, + "CAST5 has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.CAST5 and " + "will be removed from this module in 45.0.0.", + utils.DeprecatedIn37, + name="CAST5", +) + + +utils.deprecated( + IDEA, + __name__, + "IDEA has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.IDEA and " + "will be removed from this module in 45.0.0.", + utils.DeprecatedIn37, + name="IDEA", +) + + +utils.deprecated( + SEED, + __name__, + "SEED has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.SEED and " + "will be removed from this module in 45.0.0.", + utils.DeprecatedIn37, + name="SEED", +) + + +class ChaCha20(CipherAlgorithm): + name = "ChaCha20" + key_sizes = frozenset([256]) + + def __init__(self, key: bytes, nonce: bytes): + self.key = _verify_key_size(self, key) + utils._check_byteslike("nonce", nonce) + + if len(nonce) != 16: + raise ValueError("nonce must be 128-bits (16 bytes)") + + self._nonce = nonce + + @property + def nonce(self) -> bytes: + return self._nonce + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class SM4(BlockCipherAlgorithm): + name = "SM4" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py new file mode 100644 index 0000000..ebfa805 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py @@ -0,0 +1,145 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives._cipheralgorithm import CipherAlgorithm +from cryptography.hazmat.primitives.ciphers import modes + + +class CipherContext(metaclass=abc.ABCMeta): + @abc.abstractmethod + def update(self, data: bytes) -> bytes: + """ + Processes the provided bytes through the cipher and returns the results + as bytes. + """ + + @abc.abstractmethod + def update_into(self, data: bytes, buf: bytes) -> int: + """ + Processes the provided bytes and writes the resulting data into the + provided buffer. Returns the number of bytes written. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Returns the results of processing the final block as bytes. + """ + + @abc.abstractmethod + def reset_nonce(self, nonce: bytes) -> None: + """ + Resets the nonce for the cipher context to the provided value. + Raises an exception if it does not support reset or if the + provided nonce does not have a valid length. + """ + + +class AEADCipherContext(CipherContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + def authenticate_additional_data(self, data: bytes) -> None: + """ + Authenticates the provided bytes. + """ + + +class AEADDecryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + def finalize_with_tag(self, tag: bytes) -> bytes: + """ + Returns the results of processing the final block as bytes and allows + delayed passing of the authentication tag. + """ + + +class AEADEncryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tag(self) -> bytes: + """ + Returns tag bytes. This is only available after encryption is + finalized. + """ + + +Mode = typing.TypeVar( + "Mode", bound=typing.Optional[modes.Mode], covariant=True +) + + +class Cipher(typing.Generic[Mode]): + def __init__( + self, + algorithm: CipherAlgorithm, + mode: Mode, + backend: typing.Any = None, + ) -> None: + if not isinstance(algorithm, CipherAlgorithm): + raise TypeError("Expected interface of CipherAlgorithm.") + + if mode is not None: + # mypy needs this assert to narrow the type from our generic + # type. Maybe it won't some time in the future. + assert isinstance(mode, modes.Mode) + mode.validate_for_algorithm(algorithm) + + self.algorithm = algorithm + self.mode = mode + + @typing.overload + def encryptor( + self: Cipher[modes.ModeWithAuthenticationTag], + ) -> AEADEncryptionContext: ... + + @typing.overload + def encryptor( + self: _CIPHER_TYPE, + ) -> CipherContext: ... + + def encryptor(self): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if self.mode.tag is not None: + raise ValueError( + "Authentication tag must be None when encrypting." + ) + + return rust_openssl.ciphers.create_encryption_ctx( + self.algorithm, self.mode + ) + + @typing.overload + def decryptor( + self: Cipher[modes.ModeWithAuthenticationTag], + ) -> AEADDecryptionContext: ... + + @typing.overload + def decryptor( + self: _CIPHER_TYPE, + ) -> CipherContext: ... + + def decryptor(self): + return rust_openssl.ciphers.create_decryption_ctx( + self.algorithm, self.mode + ) + + +_CIPHER_TYPE = Cipher[ + typing.Union[ + modes.ModeWithNonce, + modes.ModeWithTweak, + None, + modes.ECB, + modes.ModeWithInitializationVector, + ] +] + +CipherContext.register(rust_openssl.ciphers.CipherContext) +AEADEncryptionContext.register(rust_openssl.ciphers.AEADEncryptionContext) +AEADDecryptionContext.register(rust_openssl.ciphers.AEADDecryptionContext) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py new file mode 100644 index 0000000..1dd2cc1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py @@ -0,0 +1,268 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers import algorithms + + +class Mode(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this mode (e.g. "ECB", "CBC"). + """ + + @abc.abstractmethod + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + """ + Checks that all the necessary invariants of this (mode, algorithm) + combination are met. + """ + + +class ModeWithInitializationVector(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def initialization_vector(self) -> bytes: + """ + The value of the initialization vector for this mode as bytes. + """ + + +class ModeWithTweak(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tweak(self) -> bytes: + """ + The value of the tweak for this mode as bytes. + """ + + +class ModeWithNonce(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def nonce(self) -> bytes: + """ + The value of the nonce for this mode as bytes. + """ + + +class ModeWithAuthenticationTag(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tag(self) -> bytes | None: + """ + The value of the tag supplied to the constructor of this mode. + """ + + +def _check_aes_key_length(self: Mode, algorithm: CipherAlgorithm) -> None: + if algorithm.key_size > 256 and algorithm.name == "AES": + raise ValueError( + "Only 128, 192, and 256 bit keys are allowed for this AES mode" + ) + + +def _check_iv_length( + self: ModeWithInitializationVector, algorithm: BlockCipherAlgorithm +) -> None: + iv_len = len(self.initialization_vector) + if iv_len * 8 != algorithm.block_size: + raise ValueError(f"Invalid IV size ({iv_len}) for {self.name}.") + + +def _check_nonce_length( + nonce: bytes, name: str, algorithm: CipherAlgorithm +) -> None: + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + f"{name} requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + if len(nonce) * 8 != algorithm.block_size: + raise ValueError(f"Invalid nonce size ({len(nonce)}) for {name}.") + + +def _check_iv_and_key_length( + self: ModeWithInitializationVector, algorithm: CipherAlgorithm +) -> None: + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + f"{self} requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + _check_aes_key_length(self, algorithm) + _check_iv_length(self, algorithm) + + +class CBC(ModeWithInitializationVector): + name = "CBC" + + def __init__(self, initialization_vector: bytes): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> bytes: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class XTS(ModeWithTweak): + name = "XTS" + + def __init__(self, tweak: bytes): + utils._check_byteslike("tweak", tweak) + + if len(tweak) != 16: + raise ValueError("tweak must be 128-bits (16 bytes)") + + self._tweak = tweak + + @property + def tweak(self) -> bytes: + return self._tweak + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + if isinstance(algorithm, (algorithms.AES128, algorithms.AES256)): + raise TypeError( + "The AES128 and AES256 classes do not support XTS, please use " + "the standard AES class instead." + ) + + if algorithm.key_size not in (256, 512): + raise ValueError( + "The XTS specification requires a 256-bit key for AES-128-XTS" + " and 512-bit key for AES-256-XTS" + ) + + +class ECB(Mode): + name = "ECB" + + validate_for_algorithm = _check_aes_key_length + + +class OFB(ModeWithInitializationVector): + name = "OFB" + + def __init__(self, initialization_vector: bytes): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> bytes: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CFB(ModeWithInitializationVector): + name = "CFB" + + def __init__(self, initialization_vector: bytes): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> bytes: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CFB8(ModeWithInitializationVector): + name = "CFB8" + + def __init__(self, initialization_vector: bytes): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> bytes: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CTR(ModeWithNonce): + name = "CTR" + + def __init__(self, nonce: bytes): + utils._check_byteslike("nonce", nonce) + self._nonce = nonce + + @property + def nonce(self) -> bytes: + return self._nonce + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + _check_aes_key_length(self, algorithm) + _check_nonce_length(self.nonce, self.name, algorithm) + + +class GCM(ModeWithInitializationVector, ModeWithAuthenticationTag): + name = "GCM" + _MAX_ENCRYPTED_BYTES = (2**39 - 256) // 8 + _MAX_AAD_BYTES = (2**64) // 8 + + def __init__( + self, + initialization_vector: bytes, + tag: bytes | None = None, + min_tag_length: int = 16, + ): + # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive + # This is a sane limit anyway so we'll enforce it here. + utils._check_byteslike("initialization_vector", initialization_vector) + if len(initialization_vector) < 8 or len(initialization_vector) > 128: + raise ValueError( + "initialization_vector must be between 8 and 128 bytes (64 " + "and 1024 bits)." + ) + self._initialization_vector = initialization_vector + if tag is not None: + utils._check_bytes("tag", tag) + if min_tag_length < 4: + raise ValueError("min_tag_length must be >= 4") + if len(tag) < min_tag_length: + raise ValueError( + f"Authentication tag must be {min_tag_length} bytes or " + "longer." + ) + self._tag = tag + self._min_tag_length = min_tag_length + + @property + def tag(self) -> bytes | None: + return self._tag + + @property + def initialization_vector(self) -> bytes: + return self._initialization_vector + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + _check_aes_key_length(self, algorithm) + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + "GCM requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + block_size_bytes = algorithm.block_size // 8 + if self._tag is not None and len(self._tag) > block_size_bytes: + raise ValueError( + f"Authentication tag cannot be more than {block_size_bytes} " + "bytes." + ) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py new file mode 100644 index 0000000..2c67ce2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = ["CMAC"] +CMAC = rust_openssl.cmac.CMAC diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py new file mode 100644 index 0000000..3975c71 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import hmac + + +def bytes_eq(a: bytes, b: bytes) -> bool: + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return hmac.compare_digest(a, b) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py new file mode 100644 index 0000000..b819e39 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py @@ -0,0 +1,242 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = [ + "MD5", + "SHA1", + "SHA3_224", + "SHA3_256", + "SHA3_384", + "SHA3_512", + "SHA224", + "SHA256", + "SHA384", + "SHA512", + "SHA512_224", + "SHA512_256", + "SHAKE128", + "SHAKE256", + "SM3", + "BLAKE2b", + "BLAKE2s", + "ExtendableOutputFunction", + "Hash", + "HashAlgorithm", + "HashContext", +] + + +class HashAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this algorithm (e.g. "sha256", "md5"). + """ + + @property + @abc.abstractmethod + def digest_size(self) -> int: + """ + The size of the resulting digest in bytes. + """ + + @property + @abc.abstractmethod + def block_size(self) -> int | None: + """ + The internal block size of the hash function, or None if the hash + function does not use blocks internally (e.g. SHA3). + """ + + +class HashContext(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def algorithm(self) -> HashAlgorithm: + """ + A HashAlgorithm that will be used by this context. + """ + + @abc.abstractmethod + def update(self, data: bytes) -> None: + """ + Processes the provided bytes through the hash. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Finalizes the hash context and returns the hash digest as bytes. + """ + + @abc.abstractmethod + def copy(self) -> HashContext: + """ + Return a HashContext that is a copy of the current context. + """ + + +Hash = rust_openssl.hashes.Hash +HashContext.register(Hash) + + +class ExtendableOutputFunction(metaclass=abc.ABCMeta): + """ + An interface for extendable output functions. + """ + + +class SHA1(HashAlgorithm): + name = "sha1" + digest_size = 20 + block_size = 64 + + +class SHA512_224(HashAlgorithm): # noqa: N801 + name = "sha512-224" + digest_size = 28 + block_size = 128 + + +class SHA512_256(HashAlgorithm): # noqa: N801 + name = "sha512-256" + digest_size = 32 + block_size = 128 + + +class SHA224(HashAlgorithm): + name = "sha224" + digest_size = 28 + block_size = 64 + + +class SHA256(HashAlgorithm): + name = "sha256" + digest_size = 32 + block_size = 64 + + +class SHA384(HashAlgorithm): + name = "sha384" + digest_size = 48 + block_size = 128 + + +class SHA512(HashAlgorithm): + name = "sha512" + digest_size = 64 + block_size = 128 + + +class SHA3_224(HashAlgorithm): # noqa: N801 + name = "sha3-224" + digest_size = 28 + block_size = None + + +class SHA3_256(HashAlgorithm): # noqa: N801 + name = "sha3-256" + digest_size = 32 + block_size = None + + +class SHA3_384(HashAlgorithm): # noqa: N801 + name = "sha3-384" + digest_size = 48 + block_size = None + + +class SHA3_512(HashAlgorithm): # noqa: N801 + name = "sha3-512" + digest_size = 64 + block_size = None + + +class SHAKE128(HashAlgorithm, ExtendableOutputFunction): + name = "shake128" + block_size = None + + def __init__(self, digest_size: int): + if not isinstance(digest_size, int): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class SHAKE256(HashAlgorithm, ExtendableOutputFunction): + name = "shake256" + block_size = None + + def __init__(self, digest_size: int): + if not isinstance(digest_size, int): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class MD5(HashAlgorithm): + name = "md5" + digest_size = 16 + block_size = 64 + + +class BLAKE2b(HashAlgorithm): + name = "blake2b" + _max_digest_size = 64 + _min_digest_size = 1 + block_size = 128 + + def __init__(self, digest_size: int): + if digest_size != 64: + raise ValueError("Digest size must be 64") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class BLAKE2s(HashAlgorithm): + name = "blake2s" + block_size = 64 + _max_digest_size = 32 + _min_digest_size = 1 + + def __init__(self, digest_size: int): + if digest_size != 32: + raise ValueError("Digest size must be 32") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class SM3(HashAlgorithm): + name = "sm3" + digest_size = 32 + block_size = 64 diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py new file mode 100644 index 0000000..a9442d5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import hashes + +__all__ = ["HMAC"] + +HMAC = rust_openssl.hmac.HMAC +hashes.HashContext.register(HMAC) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py new file mode 100644 index 0000000..79bb459 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + + +class KeyDerivationFunction(metaclass=abc.ABCMeta): + @abc.abstractmethod + def derive(self, key_material: bytes) -> bytes: + """ + Deterministically generates and returns a new key based on the existing + key material. + """ + + @abc.abstractmethod + def verify(self, key_material: bytes, expected_key: bytes) -> None: + """ + Checks whether the key generated by the key material matches the + expected derived key. Raises an exception if they do not match. + """ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..91e4456 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc new file mode 100644 index 0000000..cb7d75b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc new file mode 100644 index 0000000..d821c7d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc new file mode 100644 index 0000000..b6e031f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc new file mode 100644 index 0000000..dfad4e8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc new file mode 100644 index 0000000..19f3b60 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc new file mode 100644 index 0000000..57b566c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py new file mode 100644 index 0000000..96d9d4c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py @@ -0,0 +1,124 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized, InvalidKey +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n: int) -> bytes: + return n.to_bytes(length=4, byteorder="big") + + +def _common_args_checks( + algorithm: hashes.HashAlgorithm, + length: int, + otherinfo: bytes | None, +) -> None: + max_length = algorithm.digest_size * (2**32 - 1) + if length > max_length: + raise ValueError(f"Cannot derive keys larger than {max_length} bits.") + if otherinfo is not None: + utils._check_bytes("otherinfo", otherinfo) + + +def _concatkdf_derive( + key_material: bytes, + length: int, + auxfn: typing.Callable[[], hashes.HashContext], + otherinfo: bytes, +) -> bytes: + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while length > outlen: + h = auxfn() + h.update(_int_to_u32be(counter)) + h.update(key_material) + h.update(otherinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:length] + + +class ConcatKDFHash(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + otherinfo: bytes | None, + backend: typing.Any = None, + ): + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" + + self._used = False + + def _hash(self) -> hashes.Hash: + return hashes.Hash(self._algorithm) + + def derive(self, key_material: bytes) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive( + key_material, self._length, self._hash, self._otherinfo + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class ConcatKDFHMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: bytes | None, + otherinfo: bytes | None, + backend: typing.Any = None, + ): + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" + + if algorithm.block_size is None: + raise TypeError(f"{algorithm.name} is unsupported for ConcatKDF") + + if salt is None: + salt = b"\x00" * algorithm.block_size + else: + utils._check_bytes("salt", salt) + + self._salt = salt + + self._used = False + + def _hmac(self) -> hmac.HMAC: + return hmac.HMAC(self._salt, self._algorithm) + + def derive(self, key_material: bytes) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive( + key_material, self._length, self._hmac, self._otherinfo + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py new file mode 100644 index 0000000..ee562d2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py @@ -0,0 +1,101 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized, InvalidKey +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class HKDF(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: bytes | None, + info: bytes | None, + backend: typing.Any = None, + ): + self._algorithm = algorithm + + if salt is None: + salt = b"\x00" * self._algorithm.digest_size + else: + utils._check_bytes("salt", salt) + + self._salt = salt + + self._hkdf_expand = HKDFExpand(self._algorithm, length, info) + + def _extract(self, key_material: bytes) -> bytes: + h = hmac.HMAC(self._salt, self._algorithm) + h.update(key_material) + return h.finalize() + + def derive(self, key_material: bytes) -> bytes: + utils._check_byteslike("key_material", key_material) + return self._hkdf_expand.derive(self._extract(key_material)) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class HKDFExpand(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + info: bytes | None, + backend: typing.Any = None, + ): + self._algorithm = algorithm + + max_length = 255 * algorithm.digest_size + + if length > max_length: + raise ValueError( + f"Cannot derive keys larger than {max_length} octets." + ) + + self._length = length + + if info is None: + info = b"" + else: + utils._check_bytes("info", info) + + self._info = info + + self._used = False + + def _expand(self, key_material: bytes) -> bytes: + output = [b""] + counter = 1 + + while self._algorithm.digest_size * (len(output) - 1) < self._length: + h = hmac.HMAC(key_material, self._algorithm) + h.update(output[-1]) + h.update(self._info) + h.update(bytes([counter])) + output.append(h.finalize()) + counter += 1 + + return b"".join(output)[: self._length] + + def derive(self, key_material: bytes) -> bytes: + utils._check_byteslike("key_material", key_material) + if self._used: + raise AlreadyFinalized + + self._used = True + return self._expand(key_material) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py new file mode 100644 index 0000000..802b484 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py @@ -0,0 +1,302 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.primitives import ( + ciphers, + cmac, + constant_time, + hashes, + hmac, +) +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class Mode(utils.Enum): + CounterMode = "ctr" + + +class CounterLocation(utils.Enum): + BeforeFixed = "before_fixed" + AfterFixed = "after_fixed" + MiddleFixed = "middle_fixed" + + +class _KBKDFDeriver: + def __init__( + self, + prf: typing.Callable, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + break_location: int | None, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + ): + assert callable(prf) + + if not isinstance(mode, Mode): + raise TypeError("mode must be of type Mode") + + if not isinstance(location, CounterLocation): + raise TypeError("location must be of type CounterLocation") + + if break_location is None and location is CounterLocation.MiddleFixed: + raise ValueError("Please specify a break_location") + + if ( + break_location is not None + and location != CounterLocation.MiddleFixed + ): + raise ValueError( + "break_location is ignored when location is not" + " CounterLocation.MiddleFixed" + ) + + if break_location is not None and not isinstance(break_location, int): + raise TypeError("break_location must be an integer") + + if break_location is not None and break_location < 0: + raise ValueError("break_location must be a positive integer") + + if (label or context) and fixed: + raise ValueError( + "When supplying fixed data, label and context are ignored." + ) + + if rlen is None or not self._valid_byte_length(rlen): + raise ValueError("rlen must be between 1 and 4") + + if llen is None and fixed is None: + raise ValueError("Please specify an llen") + + if llen is not None and not isinstance(llen, int): + raise TypeError("llen must be an integer") + + if llen == 0: + raise ValueError("llen must be non-zero") + + if label is None: + label = b"" + + if context is None: + context = b"" + + utils._check_bytes("label", label) + utils._check_bytes("context", context) + self._prf = prf + self._mode = mode + self._length = length + self._rlen = rlen + self._llen = llen + self._location = location + self._break_location = break_location + self._label = label + self._context = context + self._used = False + self._fixed_data = fixed + + @staticmethod + def _valid_byte_length(value: int) -> bool: + if not isinstance(value, int): + raise TypeError("value must be of type int") + + value_bin = utils.int_to_bytes(1, value) + if not 1 <= len(value_bin) <= 4: + return False + return True + + def derive(self, key_material: bytes, prf_output_size: int) -> bytes: + if self._used: + raise AlreadyFinalized + + utils._check_byteslike("key_material", key_material) + self._used = True + + # inverse floor division (equivalent to ceiling) + rounds = -(-self._length // prf_output_size) + + output = [b""] + + # For counter mode, the number of iterations shall not be + # larger than 2^r-1, where r <= 32 is the binary length of the counter + # This ensures that the counter values used as an input to the + # PRF will not repeat during a particular call to the KDF function. + r_bin = utils.int_to_bytes(1, self._rlen) + if rounds > pow(2, len(r_bin) * 8) - 1: + raise ValueError("There are too many iterations.") + + fixed = self._generate_fixed_input() + + if self._location == CounterLocation.BeforeFixed: + data_before_ctr = b"" + data_after_ctr = fixed + elif self._location == CounterLocation.AfterFixed: + data_before_ctr = fixed + data_after_ctr = b"" + else: + if isinstance( + self._break_location, int + ) and self._break_location > len(fixed): + raise ValueError("break_location offset > len(fixed)") + data_before_ctr = fixed[: self._break_location] + data_after_ctr = fixed[self._break_location :] + + for i in range(1, rounds + 1): + h = self._prf(key_material) + + counter = utils.int_to_bytes(i, self._rlen) + input_data = data_before_ctr + counter + data_after_ctr + + h.update(input_data) + + output.append(h.finalize()) + + return b"".join(output)[: self._length] + + def _generate_fixed_input(self) -> bytes: + if self._fixed_data and isinstance(self._fixed_data, bytes): + return self._fixed_data + + l_val = utils.int_to_bytes(self._length * 8, self._llen) + + return b"".join([self._label, b"\x00", self._context, l_val]) + + +class KBKDFHMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + backend: typing.Any = None, + *, + break_location: int | None = None, + ): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hash algorithm.", + _Reasons.UNSUPPORTED_HASH, + ) + + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hmac algorithm.", + _Reasons.UNSUPPORTED_HASH, + ) + + self._algorithm = algorithm + + self._deriver = _KBKDFDeriver( + self._prf, + mode, + length, + rlen, + llen, + location, + break_location, + label, + context, + fixed, + ) + + def _prf(self, key_material: bytes) -> hmac.HMAC: + return hmac.HMAC(key_material, self._algorithm) + + def derive(self, key_material: bytes) -> bytes: + return self._deriver.derive(key_material, self._algorithm.digest_size) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class KBKDFCMAC(KeyDerivationFunction): + def __init__( + self, + algorithm, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + backend: typing.Any = None, + *, + break_location: int | None = None, + ): + if not issubclass( + algorithm, ciphers.BlockCipherAlgorithm + ) or not issubclass(algorithm, ciphers.CipherAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported cipher algorithm.", + _Reasons.UNSUPPORTED_CIPHER, + ) + + self._algorithm = algorithm + self._cipher: ciphers.BlockCipherAlgorithm | None = None + + self._deriver = _KBKDFDeriver( + self._prf, + mode, + length, + rlen, + llen, + location, + break_location, + label, + context, + fixed, + ) + + def _prf(self, _: bytes) -> cmac.CMAC: + assert self._cipher is not None + + return cmac.CMAC(self._cipher) + + def derive(self, key_material: bytes) -> bytes: + self._cipher = self._algorithm(key_material) + + assert self._cipher is not None + + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.cmac_algorithm_supported(self._cipher): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported cipher algorithm.", + _Reasons.UNSUPPORTED_CIPHER, + ) + + return self._deriver.derive(key_material, self._cipher.block_size // 8) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py new file mode 100644 index 0000000..82689eb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -0,0 +1,62 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class PBKDF2HMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: bytes, + iterations: int, + backend: typing.Any = None, + ): + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.pbkdf2_hmac_supported(algorithm): + raise UnsupportedAlgorithm( + f"{algorithm.name} is not supported for PBKDF2.", + _Reasons.UNSUPPORTED_HASH, + ) + self._used = False + self._algorithm = algorithm + self._length = length + utils._check_bytes("salt", salt) + self._salt = salt + self._iterations = iterations + + def derive(self, key_material: bytes) -> bytes: + if self._used: + raise AlreadyFinalized("PBKDF2 instances can only be used once.") + self._used = True + + return rust_openssl.kdf.derive_pbkdf2_hmac( + key_material, + self._algorithm, + self._salt, + self._iterations, + self._length, + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py new file mode 100644 index 0000000..05a4f67 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py @@ -0,0 +1,80 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import sys +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, +) +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + +# This is used by the scrypt tests to skip tests that require more memory +# than the MEM_LIMIT +_MEM_LIMIT = sys.maxsize // 2 + + +class Scrypt(KeyDerivationFunction): + def __init__( + self, + salt: bytes, + length: int, + n: int, + r: int, + p: int, + backend: typing.Any = None, + ): + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.scrypt_supported(): + raise UnsupportedAlgorithm( + "This version of OpenSSL does not support scrypt" + ) + self._length = length + utils._check_bytes("salt", salt) + if n < 2 or (n & (n - 1)) != 0: + raise ValueError("n must be greater than 1 and be a power of 2.") + + if r < 1: + raise ValueError("r must be greater than or equal to 1.") + + if p < 1: + raise ValueError("p must be greater than or equal to 1.") + + self._used = False + self._salt = salt + self._n = n + self._r = r + self._p = p + + def derive(self, key_material: bytes) -> bytes: + if self._used: + raise AlreadyFinalized("Scrypt instances can only be used once.") + self._used = True + + utils._check_byteslike("key_material", key_material) + + return rust_openssl.kdf.derive_scrypt( + key_material, + self._salt, + self._n, + self._r, + self._p, + _MEM_LIMIT, + self._length, + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py new file mode 100644 index 0000000..6e38366 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized, InvalidKey +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n: int) -> bytes: + return n.to_bytes(length=4, byteorder="big") + + +class X963KDF(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + sharedinfo: bytes | None, + backend: typing.Any = None, + ): + max_len = algorithm.digest_size * (2**32 - 1) + if length > max_len: + raise ValueError(f"Cannot derive keys larger than {max_len} bits.") + if sharedinfo is not None: + utils._check_bytes("sharedinfo", sharedinfo) + + self._algorithm = algorithm + self._length = length + self._sharedinfo = sharedinfo + self._used = False + + def derive(self, key_material: bytes) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while self._length > outlen: + h = hashes.Hash(self._algorithm) + h.update(key_material) + h.update(_int_to_u32be(counter)) + if self._sharedinfo is not None: + h.update(self._sharedinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[: self._length] + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py new file mode 100644 index 0000000..b93d87d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py @@ -0,0 +1,177 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import ECB +from cryptography.hazmat.primitives.constant_time import bytes_eq + + +def _wrap_core( + wrapping_key: bytes, + a: bytes, + r: list[bytes], +) -> bytes: + # RFC 3394 Key Wrap - 2.2.1 (index method) + encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() + n = len(r) + for j in range(6): + for i in range(n): + # every encryption operation is a discrete 16 byte chunk (because + # AES has a 128-bit block size) and since we're using ECB it is + # safe to reuse the encryptor for the entire operation + b = encryptor.update(a + r[i]) + a = ( + int.from_bytes(b[:8], byteorder="big") ^ ((n * j) + i + 1) + ).to_bytes(length=8, byteorder="big") + r[i] = b[-8:] + + assert encryptor.finalize() == b"" + + return a + b"".join(r) + + +def aes_key_wrap( + wrapping_key: bytes, + key_to_wrap: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(key_to_wrap) < 16: + raise ValueError("The key to wrap must be at least 16 bytes") + + if len(key_to_wrap) % 8 != 0: + raise ValueError("The key to wrap must be a multiple of 8 bytes") + + a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, a, r) + + +def _unwrap_core( + wrapping_key: bytes, + a: bytes, + r: list[bytes], +) -> tuple[bytes, list[bytes]]: + # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) + decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() + n = len(r) + for j in reversed(range(6)): + for i in reversed(range(n)): + atr = ( + int.from_bytes(a, byteorder="big") ^ ((n * j) + i + 1) + ).to_bytes(length=8, byteorder="big") + r[i] + # every decryption operation is a discrete 16 byte chunk so + # it is safe to reuse the decryptor for the entire operation + b = decryptor.update(atr) + a = b[:8] + r[i] = b[-8:] + + assert decryptor.finalize() == b"" + return a, r + + +def aes_key_wrap_with_padding( + wrapping_key: bytes, + key_to_wrap: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xa6\x59\x59\xa6" + len(key_to_wrap).to_bytes( + length=4, byteorder="big" + ) + # pad the key to wrap if necessary + pad = (8 - (len(key_to_wrap) % 8)) % 8 + key_to_wrap = key_to_wrap + b"\x00" * pad + if len(key_to_wrap) == 8: + # RFC 5649 - 4.1 - exactly 8 octets after padding + encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() + b = encryptor.update(aiv + key_to_wrap) + assert encryptor.finalize() == b"" + return b + else: + r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, aiv, r) + + +def aes_key_unwrap_with_padding( + wrapping_key: bytes, + wrapped_key: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapped_key) < 16: + raise InvalidUnwrap("Must be at least 16 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(wrapped_key) == 16: + # RFC 5649 - 4.2 - exactly two 64-bit blocks + decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() + out = decryptor.update(wrapped_key) + assert decryptor.finalize() == b"" + a = out[:8] + data = out[8:] + n = 1 + else: + r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] + encrypted_aiv = r.pop(0) + n = len(r) + a, r = _unwrap_core(wrapping_key, encrypted_aiv, r) + data = b"".join(r) + + # 1) Check that MSB(32,A) = A65959A6. + # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let + # MLI = LSB(32,A). + # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of + # the output data are zero. + mli = int.from_bytes(a[4:], byteorder="big") + b = (8 * n) - mli + if ( + not bytes_eq(a[:4], b"\xa6\x59\x59\xa6") + or not 8 * (n - 1) < mli <= 8 * n + or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b)) + ): + raise InvalidUnwrap() + + if b == 0: + return data + else: + return data[:-b] + + +def aes_key_unwrap( + wrapping_key: bytes, + wrapped_key: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapped_key) < 24: + raise InvalidUnwrap("Must be at least 24 bytes") + + if len(wrapped_key) % 8 != 0: + raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] + a = r.pop(0) + a, r = _unwrap_core(wrapping_key, a, r) + if not bytes_eq(a, aiv): + raise InvalidUnwrap() + + return b"".join(r) + + +class InvalidUnwrap(Exception): + pass diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py new file mode 100644 index 0000000..d1ca775 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py @@ -0,0 +1,204 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized +from cryptography.hazmat.bindings._rust import ( + PKCS7PaddingContext, + check_ansix923_padding, + check_pkcs7_padding, +) + + +class PaddingContext(metaclass=abc.ABCMeta): + @abc.abstractmethod + def update(self, data: bytes) -> bytes: + """ + Pads the provided bytes and returns any available data as bytes. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Finalize the padding, returns bytes. + """ + + +def _byte_padding_check(block_size: int) -> None: + if not (0 <= block_size <= 2040): + raise ValueError("block_size must be in range(0, 2041).") + + if block_size % 8 != 0: + raise ValueError("block_size must be a multiple of 8.") + + +def _byte_padding_update( + buffer_: bytes | None, data: bytes, block_size: int +) -> tuple[bytes, bytes]: + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + utils._check_byteslike("data", data) + + buffer_ += bytes(data) + + finished_blocks = len(buffer_) // (block_size // 8) + + result = buffer_[: finished_blocks * (block_size // 8)] + buffer_ = buffer_[finished_blocks * (block_size // 8) :] + + return buffer_, result + + +def _byte_padding_pad( + buffer_: bytes | None, + block_size: int, + paddingfn: typing.Callable[[int], bytes], +) -> bytes: + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + pad_size = block_size // 8 - len(buffer_) + return buffer_ + paddingfn(pad_size) + + +def _byte_unpadding_update( + buffer_: bytes | None, data: bytes, block_size: int +) -> tuple[bytes, bytes]: + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + utils._check_byteslike("data", data) + + buffer_ += bytes(data) + + finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0) + + result = buffer_[: finished_blocks * (block_size // 8)] + buffer_ = buffer_[finished_blocks * (block_size // 8) :] + + return buffer_, result + + +def _byte_unpadding_check( + buffer_: bytes | None, + block_size: int, + checkfn: typing.Callable[[bytes], int], +) -> bytes: + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + if len(buffer_) != block_size // 8: + raise ValueError("Invalid padding bytes.") + + valid = checkfn(buffer_) + + if not valid: + raise ValueError("Invalid padding bytes.") + + pad_size = buffer_[-1] + return buffer_[:-pad_size] + + +class PKCS7: + def __init__(self, block_size: int): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self) -> PaddingContext: + return PKCS7PaddingContext(self.block_size) + + def unpadder(self) -> PaddingContext: + return _PKCS7UnpaddingContext(self.block_size) + + +class _PKCS7UnpaddingContext(PaddingContext): + _buffer: bytes | None + + def __init__(self, block_size: int): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data: bytes) -> bytes: + self._buffer, result = _byte_unpadding_update( + self._buffer, data, self.block_size + ) + return result + + def finalize(self) -> bytes: + result = _byte_unpadding_check( + self._buffer, self.block_size, check_pkcs7_padding + ) + self._buffer = None + return result + + +PaddingContext.register(PKCS7PaddingContext) + + +class ANSIX923: + def __init__(self, block_size: int): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self) -> PaddingContext: + return _ANSIX923PaddingContext(self.block_size) + + def unpadder(self) -> PaddingContext: + return _ANSIX923UnpaddingContext(self.block_size) + + +class _ANSIX923PaddingContext(PaddingContext): + _buffer: bytes | None + + def __init__(self, block_size: int): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data: bytes) -> bytes: + self._buffer, result = _byte_padding_update( + self._buffer, data, self.block_size + ) + return result + + def _padding(self, size: int) -> bytes: + return bytes([0]) * (size - 1) + bytes([size]) + + def finalize(self) -> bytes: + result = _byte_padding_pad( + self._buffer, self.block_size, self._padding + ) + self._buffer = None + return result + + +class _ANSIX923UnpaddingContext(PaddingContext): + _buffer: bytes | None + + def __init__(self, block_size: int): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data: bytes) -> bytes: + self._buffer, result = _byte_unpadding_update( + self._buffer, data, self.block_size + ) + return result + + def finalize(self) -> bytes: + result = _byte_unpadding_check( + self._buffer, + self.block_size, + check_ansix923_padding, + ) + self._buffer = None + return result diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py new file mode 100644 index 0000000..7f5a77a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py @@ -0,0 +1,11 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = ["Poly1305"] + +Poly1305 = rust_openssl.poly1305.Poly1305 diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py new file mode 100644 index 0000000..07b2264 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py @@ -0,0 +1,63 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._serialization import ( + BestAvailableEncryption, + Encoding, + KeySerializationEncryption, + NoEncryption, + ParameterFormat, + PrivateFormat, + PublicFormat, + _KeySerializationEncryption, +) +from cryptography.hazmat.primitives.serialization.base import ( + load_der_parameters, + load_der_private_key, + load_der_public_key, + load_pem_parameters, + load_pem_private_key, + load_pem_public_key, +) +from cryptography.hazmat.primitives.serialization.ssh import ( + SSHCertificate, + SSHCertificateBuilder, + SSHCertificateType, + SSHCertPrivateKeyTypes, + SSHCertPublicKeyTypes, + SSHPrivateKeyTypes, + SSHPublicKeyTypes, + load_ssh_private_key, + load_ssh_public_identity, + load_ssh_public_key, +) + +__all__ = [ + "BestAvailableEncryption", + "Encoding", + "KeySerializationEncryption", + "NoEncryption", + "ParameterFormat", + "PrivateFormat", + "PublicFormat", + "SSHCertPrivateKeyTypes", + "SSHCertPublicKeyTypes", + "SSHCertificate", + "SSHCertificateBuilder", + "SSHCertificateType", + "SSHPrivateKeyTypes", + "SSHPublicKeyTypes", + "_KeySerializationEncryption", + "load_der_parameters", + "load_der_private_key", + "load_der_public_key", + "load_pem_parameters", + "load_pem_private_key", + "load_pem_public_key", + "load_ssh_private_key", + "load_ssh_public_identity", + "load_ssh_public_key", +] diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..81c2149 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..aaa2356 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc new file mode 100644 index 0000000..094a6f2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc new file mode 100644 index 0000000..4e63fd1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc new file mode 100644 index 0000000..e2b447d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py new file mode 100644 index 0000000..e7c998b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +load_pem_private_key = rust_openssl.keys.load_pem_private_key +load_der_private_key = rust_openssl.keys.load_der_private_key + +load_pem_public_key = rust_openssl.keys.load_pem_public_key +load_der_public_key = rust_openssl.keys.load_der_public_key + +load_pem_parameters = rust_openssl.dh.from_pem_parameters +load_der_parameters = rust_openssl.dh.from_der_parameters diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py new file mode 100644 index 0000000..549e1f9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py @@ -0,0 +1,156 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import x509 +from cryptography.hazmat.bindings._rust import pkcs12 as rust_pkcs12 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives._serialization import PBES as PBES +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed448, + ed25519, + rsa, +) +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes + +__all__ = [ + "PBES", + "PKCS12Certificate", + "PKCS12KeyAndCertificates", + "PKCS12PrivateKeyTypes", + "load_key_and_certificates", + "load_pkcs12", + "serialize_key_and_certificates", +] + +PKCS12PrivateKeyTypes = typing.Union[ + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, +] + + +PKCS12Certificate = rust_pkcs12.PKCS12Certificate + + +class PKCS12KeyAndCertificates: + def __init__( + self, + key: PrivateKeyTypes | None, + cert: PKCS12Certificate | None, + additional_certs: list[PKCS12Certificate], + ): + if key is not None and not isinstance( + key, + ( + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + ), + ): + raise TypeError( + "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" + " private key, or None." + ) + if cert is not None and not isinstance(cert, PKCS12Certificate): + raise TypeError("cert must be a PKCS12Certificate object or None") + if not all( + isinstance(add_cert, PKCS12Certificate) + for add_cert in additional_certs + ): + raise TypeError( + "all values in additional_certs must be PKCS12Certificate" + " objects" + ) + self._key = key + self._cert = cert + self._additional_certs = additional_certs + + @property + def key(self) -> PrivateKeyTypes | None: + return self._key + + @property + def cert(self) -> PKCS12Certificate | None: + return self._cert + + @property + def additional_certs(self) -> list[PKCS12Certificate]: + return self._additional_certs + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PKCS12KeyAndCertificates): + return NotImplemented + + return ( + self.key == other.key + and self.cert == other.cert + and self.additional_certs == other.additional_certs + ) + + def __hash__(self) -> int: + return hash((self.key, self.cert, tuple(self.additional_certs))) + + def __repr__(self) -> str: + fmt = ( + "" + ) + return fmt.format(self.key, self.cert, self.additional_certs) + + +load_key_and_certificates = rust_pkcs12.load_key_and_certificates +load_pkcs12 = rust_pkcs12.load_pkcs12 + + +_PKCS12CATypes = typing.Union[ + x509.Certificate, + PKCS12Certificate, +] + + +def serialize_key_and_certificates( + name: bytes | None, + key: PKCS12PrivateKeyTypes | None, + cert: x509.Certificate | None, + cas: typing.Iterable[_PKCS12CATypes] | None, + encryption_algorithm: serialization.KeySerializationEncryption, +) -> bytes: + if key is not None and not isinstance( + key, + ( + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + ), + ): + raise TypeError( + "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" + " private key, or None." + ) + + if not isinstance( + encryption_algorithm, serialization.KeySerializationEncryption + ): + raise TypeError( + "Key encryption algorithm must be a " + "KeySerializationEncryption instance" + ) + + if key is None and cert is None and not cas: + raise ValueError("You must supply at least one of key, cert, or cas") + + return rust_pkcs12.serialize_key_and_certificates( + name, key, cert, cas, encryption_algorithm + ) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py new file mode 100644 index 0000000..97ea9db --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py @@ -0,0 +1,336 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import email.base64mime +import email.generator +import email.message +import email.policy +import io +import typing + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import pkcs7 as rust_pkcs7 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ec, padding, rsa +from cryptography.utils import _check_byteslike + +load_pem_pkcs7_certificates = rust_pkcs7.load_pem_pkcs7_certificates + +load_der_pkcs7_certificates = rust_pkcs7.load_der_pkcs7_certificates + +serialize_certificates = rust_pkcs7.serialize_certificates + +PKCS7HashTypes = typing.Union[ + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, +] + +PKCS7PrivateKeyTypes = typing.Union[ + rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey +] + + +class PKCS7Options(utils.Enum): + Text = "Add text/plain MIME type" + Binary = "Don't translate input data into canonical MIME format" + DetachedSignature = "Don't embed data in the PKCS7 structure" + NoCapabilities = "Don't embed SMIME capabilities" + NoAttributes = "Don't embed authenticatedAttributes" + NoCerts = "Don't embed signer certificate" + + +class PKCS7SignatureBuilder: + def __init__( + self, + data: bytes | None = None, + signers: list[ + tuple[ + x509.Certificate, + PKCS7PrivateKeyTypes, + PKCS7HashTypes, + padding.PSS | padding.PKCS1v15 | None, + ] + ] = [], + additional_certs: list[x509.Certificate] = [], + ): + self._data = data + self._signers = signers + self._additional_certs = additional_certs + + def set_data(self, data: bytes) -> PKCS7SignatureBuilder: + _check_byteslike("data", data) + if self._data is not None: + raise ValueError("data may only be set once") + + return PKCS7SignatureBuilder(data, self._signers) + + def add_signer( + self, + certificate: x509.Certificate, + private_key: PKCS7PrivateKeyTypes, + hash_algorithm: PKCS7HashTypes, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ) -> PKCS7SignatureBuilder: + if not isinstance( + hash_algorithm, + ( + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ), + ): + raise TypeError( + "hash_algorithm must be one of hashes.SHA224, " + "SHA256, SHA384, or SHA512" + ) + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + if not isinstance( + private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey) + ): + raise TypeError("Only RSA & EC keys are supported at this time.") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + return PKCS7SignatureBuilder( + self._data, + [ + *self._signers, + (certificate, private_key, hash_algorithm, rsa_padding), + ], + ) + + def add_certificate( + self, certificate: x509.Certificate + ) -> PKCS7SignatureBuilder: + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + return PKCS7SignatureBuilder( + self._data, self._signers, [*self._additional_certs, certificate] + ) + + def sign( + self, + encoding: serialization.Encoding, + options: typing.Iterable[PKCS7Options], + backend: typing.Any = None, + ) -> bytes: + if len(self._signers) == 0: + raise ValueError("Must have at least one signer") + if self._data is None: + raise ValueError("You must add data to sign") + options = list(options) + if not all(isinstance(x, PKCS7Options) for x in options): + raise ValueError("options must be from the PKCS7Options enum") + if encoding not in ( + serialization.Encoding.PEM, + serialization.Encoding.DER, + serialization.Encoding.SMIME, + ): + raise ValueError( + "Must be PEM, DER, or SMIME from the Encoding enum" + ) + + # Text is a meaningless option unless it is accompanied by + # DetachedSignature + if ( + PKCS7Options.Text in options + and PKCS7Options.DetachedSignature not in options + ): + raise ValueError( + "When passing the Text option you must also pass " + "DetachedSignature" + ) + + if PKCS7Options.Text in options and encoding in ( + serialization.Encoding.DER, + serialization.Encoding.PEM, + ): + raise ValueError( + "The Text option is only available for SMIME serialization" + ) + + # No attributes implies no capabilities so we'll error if you try to + # pass both. + if ( + PKCS7Options.NoAttributes in options + and PKCS7Options.NoCapabilities in options + ): + raise ValueError( + "NoAttributes is a superset of NoCapabilities. Do not pass " + "both values." + ) + + return rust_pkcs7.sign_and_serialize(self, encoding, options) + + +class PKCS7EnvelopeBuilder: + def __init__( + self, + *, + _data: bytes | None = None, + _recipients: list[x509.Certificate] | None = None, + ): + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.rsa_encryption_supported(padding=padding.PKCS1v15()): + raise UnsupportedAlgorithm( + "RSA with PKCS1 v1.5 padding is not supported by this version" + " of OpenSSL.", + _Reasons.UNSUPPORTED_PADDING, + ) + self._data = _data + self._recipients = _recipients if _recipients is not None else [] + + def set_data(self, data: bytes) -> PKCS7EnvelopeBuilder: + _check_byteslike("data", data) + if self._data is not None: + raise ValueError("data may only be set once") + + return PKCS7EnvelopeBuilder(_data=data, _recipients=self._recipients) + + def add_recipient( + self, + certificate: x509.Certificate, + ) -> PKCS7EnvelopeBuilder: + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + if not isinstance(certificate.public_key(), rsa.RSAPublicKey): + raise TypeError("Only RSA keys are supported at this time.") + + return PKCS7EnvelopeBuilder( + _data=self._data, + _recipients=[ + *self._recipients, + certificate, + ], + ) + + def encrypt( + self, + encoding: serialization.Encoding, + options: typing.Iterable[PKCS7Options], + ) -> bytes: + if len(self._recipients) == 0: + raise ValueError("Must have at least one recipient") + if self._data is None: + raise ValueError("You must add data to encrypt") + options = list(options) + if not all(isinstance(x, PKCS7Options) for x in options): + raise ValueError("options must be from the PKCS7Options enum") + if encoding not in ( + serialization.Encoding.PEM, + serialization.Encoding.DER, + serialization.Encoding.SMIME, + ): + raise ValueError( + "Must be PEM, DER, or SMIME from the Encoding enum" + ) + + # Only allow options that make sense for encryption + if any( + opt not in [PKCS7Options.Text, PKCS7Options.Binary] + for opt in options + ): + raise ValueError( + "Only the following options are supported for encryption: " + "Text, Binary" + ) + elif PKCS7Options.Text in options and PKCS7Options.Binary in options: + # OpenSSL accepts both options at the same time, but ignores Text. + # We fail defensively to avoid unexpected outputs. + raise ValueError( + "Cannot use Binary and Text options at the same time" + ) + + return rust_pkcs7.encrypt_and_serialize(self, encoding, options) + + +def _smime_signed_encode( + data: bytes, signature: bytes, micalg: str, text_mode: bool +) -> bytes: + # This function works pretty hard to replicate what OpenSSL does + # precisely. For good and for ill. + + m = email.message.Message() + m.add_header("MIME-Version", "1.0") + m.add_header( + "Content-Type", + "multipart/signed", + protocol="application/x-pkcs7-signature", + micalg=micalg, + ) + + m.preamble = "This is an S/MIME signed message\n" + + msg_part = OpenSSLMimePart() + msg_part.set_payload(data) + if text_mode: + msg_part.add_header("Content-Type", "text/plain") + m.attach(msg_part) + + sig_part = email.message.MIMEPart() + sig_part.add_header( + "Content-Type", "application/x-pkcs7-signature", name="smime.p7s" + ) + sig_part.add_header("Content-Transfer-Encoding", "base64") + sig_part.add_header( + "Content-Disposition", "attachment", filename="smime.p7s" + ) + sig_part.set_payload( + email.base64mime.body_encode(signature, maxlinelen=65) + ) + del sig_part["MIME-Version"] + m.attach(sig_part) + + fp = io.BytesIO() + g = email.generator.BytesGenerator( + fp, + maxheaderlen=0, + mangle_from_=False, + policy=m.policy.clone(linesep="\r\n"), + ) + g.flatten(m) + return fp.getvalue() + + +def _smime_enveloped_encode(data: bytes) -> bytes: + m = email.message.Message() + m.add_header("MIME-Version", "1.0") + m.add_header("Content-Disposition", "attachment", filename="smime.p7m") + m.add_header( + "Content-Type", + "application/pkcs7-mime", + smime_type="enveloped-data", + name="smime.p7m", + ) + m.add_header("Content-Transfer-Encoding", "base64") + + m.set_payload(email.base64mime.body_encode(data, maxlinelen=65)) + + return m.as_bytes(policy=m.policy.clone(linesep="\n", max_line_length=0)) + + +class OpenSSLMimePart(email.message.MIMEPart): + # A MIMEPart subclass that replicates OpenSSL's behavior of not including + # a newline if there are no headers. + def _write_headers(self, generator) -> None: + if list(self.raw_items()): + generator._write_headers(self) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py new file mode 100644 index 0000000..c01afb0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py @@ -0,0 +1,1569 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import binascii +import enum +import os +import re +import typing +import warnings +from base64 import encodebytes as _base64_encode +from dataclasses import dataclass + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed25519, + padding, + rsa, +) +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils +from cryptography.hazmat.primitives.ciphers import ( + AEADDecryptionContext, + Cipher, + algorithms, + modes, +) +from cryptography.hazmat.primitives.serialization import ( + Encoding, + KeySerializationEncryption, + NoEncryption, + PrivateFormat, + PublicFormat, + _KeySerializationEncryption, +) + +try: + from bcrypt import kdf as _bcrypt_kdf + + _bcrypt_supported = True +except ImportError: + _bcrypt_supported = False + + def _bcrypt_kdf( + password: bytes, + salt: bytes, + desired_key_bytes: int, + rounds: int, + ignore_few_rounds: bool = False, + ) -> bytes: + raise UnsupportedAlgorithm("Need bcrypt module") + + +_SSH_ED25519 = b"ssh-ed25519" +_SSH_RSA = b"ssh-rsa" +_SSH_DSA = b"ssh-dss" +_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256" +_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384" +_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521" +_CERT_SUFFIX = b"-cert-v01@openssh.com" + +# U2F application string suffixed pubkey +_SK_SSH_ED25519 = b"sk-ssh-ed25519@openssh.com" +_SK_SSH_ECDSA_NISTP256 = b"sk-ecdsa-sha2-nistp256@openssh.com" + +# These are not key types, only algorithms, so they cannot appear +# as a public key type +_SSH_RSA_SHA256 = b"rsa-sha2-256" +_SSH_RSA_SHA512 = b"rsa-sha2-512" + +_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)") +_SK_MAGIC = b"openssh-key-v1\0" +_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----" +_SK_END = b"-----END OPENSSH PRIVATE KEY-----" +_BCRYPT = b"bcrypt" +_NONE = b"none" +_DEFAULT_CIPHER = b"aes256-ctr" +_DEFAULT_ROUNDS = 16 + +# re is only way to work on bytes-like data +_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL) + +# padding for max blocksize +_PADDING = memoryview(bytearray(range(1, 1 + 16))) + + +@dataclass +class _SSHCipher: + alg: type[algorithms.AES] + key_len: int + mode: type[modes.CTR] | type[modes.CBC] | type[modes.GCM] + block_len: int + iv_len: int + tag_len: int | None + is_aead: bool + + +# ciphers that are actually used in key wrapping +_SSH_CIPHERS: dict[bytes, _SSHCipher] = { + b"aes256-ctr": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.CTR, + block_len=16, + iv_len=16, + tag_len=None, + is_aead=False, + ), + b"aes256-cbc": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.CBC, + block_len=16, + iv_len=16, + tag_len=None, + is_aead=False, + ), + b"aes256-gcm@openssh.com": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.GCM, + block_len=16, + iv_len=12, + tag_len=16, + is_aead=True, + ), +} + +# map local curve name to key type +_ECDSA_KEY_TYPE = { + "secp256r1": _ECDSA_NISTP256, + "secp384r1": _ECDSA_NISTP384, + "secp521r1": _ECDSA_NISTP521, +} + + +def _get_ssh_key_type(key: SSHPrivateKeyTypes | SSHPublicKeyTypes) -> bytes: + if isinstance(key, ec.EllipticCurvePrivateKey): + key_type = _ecdsa_key_type(key.public_key()) + elif isinstance(key, ec.EllipticCurvePublicKey): + key_type = _ecdsa_key_type(key) + elif isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): + key_type = _SSH_RSA + elif isinstance(key, (dsa.DSAPrivateKey, dsa.DSAPublicKey)): + key_type = _SSH_DSA + elif isinstance( + key, (ed25519.Ed25519PrivateKey, ed25519.Ed25519PublicKey) + ): + key_type = _SSH_ED25519 + else: + raise ValueError("Unsupported key type") + + return key_type + + +def _ecdsa_key_type(public_key: ec.EllipticCurvePublicKey) -> bytes: + """Return SSH key_type and curve_name for private key.""" + curve = public_key.curve + if curve.name not in _ECDSA_KEY_TYPE: + raise ValueError( + f"Unsupported curve for ssh private key: {curve.name!r}" + ) + return _ECDSA_KEY_TYPE[curve.name] + + +def _ssh_pem_encode( + data: bytes, + prefix: bytes = _SK_START + b"\n", + suffix: bytes = _SK_END + b"\n", +) -> bytes: + return b"".join([prefix, _base64_encode(data), suffix]) + + +def _check_block_size(data: bytes, block_len: int) -> None: + """Require data to be full blocks""" + if not data or len(data) % block_len != 0: + raise ValueError("Corrupt data: missing padding") + + +def _check_empty(data: bytes) -> None: + """All data should have been parsed.""" + if data: + raise ValueError("Corrupt data: unparsed data") + + +def _init_cipher( + ciphername: bytes, + password: bytes | None, + salt: bytes, + rounds: int, +) -> Cipher[modes.CBC | modes.CTR | modes.GCM]: + """Generate key + iv and return cipher.""" + if not password: + raise ValueError("Key is password-protected.") + + ciph = _SSH_CIPHERS[ciphername] + seed = _bcrypt_kdf( + password, salt, ciph.key_len + ciph.iv_len, rounds, True + ) + return Cipher( + ciph.alg(seed[: ciph.key_len]), + ciph.mode(seed[ciph.key_len :]), + ) + + +def _get_u32(data: memoryview) -> tuple[int, memoryview]: + """Uint32""" + if len(data) < 4: + raise ValueError("Invalid data") + return int.from_bytes(data[:4], byteorder="big"), data[4:] + + +def _get_u64(data: memoryview) -> tuple[int, memoryview]: + """Uint64""" + if len(data) < 8: + raise ValueError("Invalid data") + return int.from_bytes(data[:8], byteorder="big"), data[8:] + + +def _get_sshstr(data: memoryview) -> tuple[memoryview, memoryview]: + """Bytes with u32 length prefix""" + n, data = _get_u32(data) + if n > len(data): + raise ValueError("Invalid data") + return data[:n], data[n:] + + +def _get_mpint(data: memoryview) -> tuple[int, memoryview]: + """Big integer.""" + val, data = _get_sshstr(data) + if val and val[0] > 0x7F: + raise ValueError("Invalid data") + return int.from_bytes(val, "big"), data + + +def _to_mpint(val: int) -> bytes: + """Storage format for signed bigint.""" + if val < 0: + raise ValueError("negative mpint not allowed") + if not val: + return b"" + nbytes = (val.bit_length() + 8) // 8 + return utils.int_to_bytes(val, nbytes) + + +class _FragList: + """Build recursive structure without data copy.""" + + flist: list[bytes] + + def __init__(self, init: list[bytes] | None = None) -> None: + self.flist = [] + if init: + self.flist.extend(init) + + def put_raw(self, val: bytes) -> None: + """Add plain bytes""" + self.flist.append(val) + + def put_u32(self, val: int) -> None: + """Big-endian uint32""" + self.flist.append(val.to_bytes(length=4, byteorder="big")) + + def put_u64(self, val: int) -> None: + """Big-endian uint64""" + self.flist.append(val.to_bytes(length=8, byteorder="big")) + + def put_sshstr(self, val: bytes | _FragList) -> None: + """Bytes prefixed with u32 length""" + if isinstance(val, (bytes, memoryview, bytearray)): + self.put_u32(len(val)) + self.flist.append(val) + else: + self.put_u32(val.size()) + self.flist.extend(val.flist) + + def put_mpint(self, val: int) -> None: + """Big-endian bigint prefixed with u32 length""" + self.put_sshstr(_to_mpint(val)) + + def size(self) -> int: + """Current number of bytes""" + return sum(map(len, self.flist)) + + def render(self, dstbuf: memoryview, pos: int = 0) -> int: + """Write into bytearray""" + for frag in self.flist: + flen = len(frag) + start, pos = pos, pos + flen + dstbuf[start:pos] = frag + return pos + + def tobytes(self) -> bytes: + """Return as bytes""" + buf = memoryview(bytearray(self.size())) + self.render(buf) + return buf.tobytes() + + +class _SSHFormatRSA: + """Format for RSA keys. + + Public: + mpint e, n + Private: + mpint n, e, d, iqmp, p, q + """ + + def get_public( + self, data: memoryview + ) -> tuple[tuple[int, int], memoryview]: + """RSA public fields""" + e, data = _get_mpint(data) + n, data = _get_mpint(data) + return (e, n), data + + def load_public( + self, data: memoryview + ) -> tuple[rsa.RSAPublicKey, memoryview]: + """Make RSA public key from data.""" + (e, n), data = self.get_public(data) + public_numbers = rsa.RSAPublicNumbers(e, n) + public_key = public_numbers.public_key() + return public_key, data + + def load_private( + self, data: memoryview, pubfields + ) -> tuple[rsa.RSAPrivateKey, memoryview]: + """Make RSA private key from data.""" + n, data = _get_mpint(data) + e, data = _get_mpint(data) + d, data = _get_mpint(data) + iqmp, data = _get_mpint(data) + p, data = _get_mpint(data) + q, data = _get_mpint(data) + + if (e, n) != pubfields: + raise ValueError("Corrupt data: rsa field mismatch") + dmp1 = rsa.rsa_crt_dmp1(d, p) + dmq1 = rsa.rsa_crt_dmq1(d, q) + public_numbers = rsa.RSAPublicNumbers(e, n) + private_numbers = rsa.RSAPrivateNumbers( + p, q, d, dmp1, dmq1, iqmp, public_numbers + ) + private_key = private_numbers.private_key() + return private_key, data + + def encode_public( + self, public_key: rsa.RSAPublicKey, f_pub: _FragList + ) -> None: + """Write RSA public key""" + pubn = public_key.public_numbers() + f_pub.put_mpint(pubn.e) + f_pub.put_mpint(pubn.n) + + def encode_private( + self, private_key: rsa.RSAPrivateKey, f_priv: _FragList + ) -> None: + """Write RSA private key""" + private_numbers = private_key.private_numbers() + public_numbers = private_numbers.public_numbers + + f_priv.put_mpint(public_numbers.n) + f_priv.put_mpint(public_numbers.e) + + f_priv.put_mpint(private_numbers.d) + f_priv.put_mpint(private_numbers.iqmp) + f_priv.put_mpint(private_numbers.p) + f_priv.put_mpint(private_numbers.q) + + +class _SSHFormatDSA: + """Format for DSA keys. + + Public: + mpint p, q, g, y + Private: + mpint p, q, g, y, x + """ + + def get_public(self, data: memoryview) -> tuple[tuple, memoryview]: + """DSA public fields""" + p, data = _get_mpint(data) + q, data = _get_mpint(data) + g, data = _get_mpint(data) + y, data = _get_mpint(data) + return (p, q, g, y), data + + def load_public( + self, data: memoryview + ) -> tuple[dsa.DSAPublicKey, memoryview]: + """Make DSA public key from data.""" + (p, q, g, y), data = self.get_public(data) + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + self._validate(public_numbers) + public_key = public_numbers.public_key() + return public_key, data + + def load_private( + self, data: memoryview, pubfields + ) -> tuple[dsa.DSAPrivateKey, memoryview]: + """Make DSA private key from data.""" + (p, q, g, y), data = self.get_public(data) + x, data = _get_mpint(data) + + if (p, q, g, y) != pubfields: + raise ValueError("Corrupt data: dsa field mismatch") + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + self._validate(public_numbers) + private_numbers = dsa.DSAPrivateNumbers(x, public_numbers) + private_key = private_numbers.private_key() + return private_key, data + + def encode_public( + self, public_key: dsa.DSAPublicKey, f_pub: _FragList + ) -> None: + """Write DSA public key""" + public_numbers = public_key.public_numbers() + parameter_numbers = public_numbers.parameter_numbers + self._validate(public_numbers) + + f_pub.put_mpint(parameter_numbers.p) + f_pub.put_mpint(parameter_numbers.q) + f_pub.put_mpint(parameter_numbers.g) + f_pub.put_mpint(public_numbers.y) + + def encode_private( + self, private_key: dsa.DSAPrivateKey, f_priv: _FragList + ) -> None: + """Write DSA private key""" + self.encode_public(private_key.public_key(), f_priv) + f_priv.put_mpint(private_key.private_numbers().x) + + def _validate(self, public_numbers: dsa.DSAPublicNumbers) -> None: + parameter_numbers = public_numbers.parameter_numbers + if parameter_numbers.p.bit_length() != 1024: + raise ValueError("SSH supports only 1024 bit DSA keys") + + +class _SSHFormatECDSA: + """Format for ECDSA keys. + + Public: + str curve + bytes point + Private: + str curve + bytes point + mpint secret + """ + + def __init__(self, ssh_curve_name: bytes, curve: ec.EllipticCurve): + self.ssh_curve_name = ssh_curve_name + self.curve = curve + + def get_public( + self, data: memoryview + ) -> tuple[tuple[memoryview, memoryview], memoryview]: + """ECDSA public fields""" + curve, data = _get_sshstr(data) + point, data = _get_sshstr(data) + if curve != self.ssh_curve_name: + raise ValueError("Curve name mismatch") + if point[0] != 4: + raise NotImplementedError("Need uncompressed point") + return (curve, point), data + + def load_public( + self, data: memoryview + ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: + """Make ECDSA public key from data.""" + (_, point), data = self.get_public(data) + public_key = ec.EllipticCurvePublicKey.from_encoded_point( + self.curve, point.tobytes() + ) + return public_key, data + + def load_private( + self, data: memoryview, pubfields + ) -> tuple[ec.EllipticCurvePrivateKey, memoryview]: + """Make ECDSA private key from data.""" + (curve_name, point), data = self.get_public(data) + secret, data = _get_mpint(data) + + if (curve_name, point) != pubfields: + raise ValueError("Corrupt data: ecdsa field mismatch") + private_key = ec.derive_private_key(secret, self.curve) + return private_key, data + + def encode_public( + self, public_key: ec.EllipticCurvePublicKey, f_pub: _FragList + ) -> None: + """Write ECDSA public key""" + point = public_key.public_bytes( + Encoding.X962, PublicFormat.UncompressedPoint + ) + f_pub.put_sshstr(self.ssh_curve_name) + f_pub.put_sshstr(point) + + def encode_private( + self, private_key: ec.EllipticCurvePrivateKey, f_priv: _FragList + ) -> None: + """Write ECDSA private key""" + public_key = private_key.public_key() + private_numbers = private_key.private_numbers() + + self.encode_public(public_key, f_priv) + f_priv.put_mpint(private_numbers.private_value) + + +class _SSHFormatEd25519: + """Format for Ed25519 keys. + + Public: + bytes point + Private: + bytes point + bytes secret_and_point + """ + + def get_public( + self, data: memoryview + ) -> tuple[tuple[memoryview], memoryview]: + """Ed25519 public fields""" + point, data = _get_sshstr(data) + return (point,), data + + def load_public( + self, data: memoryview + ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: + """Make Ed25519 public key from data.""" + (point,), data = self.get_public(data) + public_key = ed25519.Ed25519PublicKey.from_public_bytes( + point.tobytes() + ) + return public_key, data + + def load_private( + self, data: memoryview, pubfields + ) -> tuple[ed25519.Ed25519PrivateKey, memoryview]: + """Make Ed25519 private key from data.""" + (point,), data = self.get_public(data) + keypair, data = _get_sshstr(data) + + secret = keypair[:32] + point2 = keypair[32:] + if point != point2 or (point,) != pubfields: + raise ValueError("Corrupt data: ed25519 field mismatch") + private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret) + return private_key, data + + def encode_public( + self, public_key: ed25519.Ed25519PublicKey, f_pub: _FragList + ) -> None: + """Write Ed25519 public key""" + raw_public_key = public_key.public_bytes( + Encoding.Raw, PublicFormat.Raw + ) + f_pub.put_sshstr(raw_public_key) + + def encode_private( + self, private_key: ed25519.Ed25519PrivateKey, f_priv: _FragList + ) -> None: + """Write Ed25519 private key""" + public_key = private_key.public_key() + raw_private_key = private_key.private_bytes( + Encoding.Raw, PrivateFormat.Raw, NoEncryption() + ) + raw_public_key = public_key.public_bytes( + Encoding.Raw, PublicFormat.Raw + ) + f_keypair = _FragList([raw_private_key, raw_public_key]) + + self.encode_public(public_key, f_priv) + f_priv.put_sshstr(f_keypair) + + +def load_application(data) -> tuple[memoryview, memoryview]: + """ + U2F application strings + """ + application, data = _get_sshstr(data) + if not application.tobytes().startswith(b"ssh:"): + raise ValueError( + "U2F application string does not start with b'ssh:' " + f"({application})" + ) + return application, data + + +class _SSHFormatSKEd25519: + """ + The format of a sk-ssh-ed25519@openssh.com public key is: + + string "sk-ssh-ed25519@openssh.com" + string public key + string application (user-specified, but typically "ssh:") + """ + + def load_public( + self, data: memoryview + ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: + """Make Ed25519 public key from data.""" + public_key, data = _lookup_kformat(_SSH_ED25519).load_public(data) + _, data = load_application(data) + return public_key, data + + +class _SSHFormatSKECDSA: + """ + The format of a sk-ecdsa-sha2-nistp256@openssh.com public key is: + + string "sk-ecdsa-sha2-nistp256@openssh.com" + string curve name + ec_point Q + string application (user-specified, but typically "ssh:") + """ + + def load_public( + self, data: memoryview + ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: + """Make ECDSA public key from data.""" + public_key, data = _lookup_kformat(_ECDSA_NISTP256).load_public(data) + _, data = load_application(data) + return public_key, data + + +_KEY_FORMATS = { + _SSH_RSA: _SSHFormatRSA(), + _SSH_DSA: _SSHFormatDSA(), + _SSH_ED25519: _SSHFormatEd25519(), + _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()), + _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()), + _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()), + _SK_SSH_ED25519: _SSHFormatSKEd25519(), + _SK_SSH_ECDSA_NISTP256: _SSHFormatSKECDSA(), +} + + +def _lookup_kformat(key_type: bytes): + """Return valid format or throw error""" + if not isinstance(key_type, bytes): + key_type = memoryview(key_type).tobytes() + if key_type in _KEY_FORMATS: + return _KEY_FORMATS[key_type] + raise UnsupportedAlgorithm(f"Unsupported key type: {key_type!r}") + + +SSHPrivateKeyTypes = typing.Union[ + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ed25519.Ed25519PrivateKey, +] + + +def load_ssh_private_key( + data: bytes, + password: bytes | None, + backend: typing.Any = None, +) -> SSHPrivateKeyTypes: + """Load private key from OpenSSH custom encoding.""" + utils._check_byteslike("data", data) + if password is not None: + utils._check_bytes("password", password) + + m = _PEM_RC.search(data) + if not m: + raise ValueError("Not OpenSSH private key format") + p1 = m.start(1) + p2 = m.end(1) + data = binascii.a2b_base64(memoryview(data)[p1:p2]) + if not data.startswith(_SK_MAGIC): + raise ValueError("Not OpenSSH private key format") + data = memoryview(data)[len(_SK_MAGIC) :] + + # parse header + ciphername, data = _get_sshstr(data) + kdfname, data = _get_sshstr(data) + kdfoptions, data = _get_sshstr(data) + nkeys, data = _get_u32(data) + if nkeys != 1: + raise ValueError("Only one key supported") + + # load public key data + pubdata, data = _get_sshstr(data) + pub_key_type, pubdata = _get_sshstr(pubdata) + kformat = _lookup_kformat(pub_key_type) + pubfields, pubdata = kformat.get_public(pubdata) + _check_empty(pubdata) + + if (ciphername, kdfname) != (_NONE, _NONE): + ciphername_bytes = ciphername.tobytes() + if ciphername_bytes not in _SSH_CIPHERS: + raise UnsupportedAlgorithm( + f"Unsupported cipher: {ciphername_bytes!r}" + ) + if kdfname != _BCRYPT: + raise UnsupportedAlgorithm(f"Unsupported KDF: {kdfname!r}") + blklen = _SSH_CIPHERS[ciphername_bytes].block_len + tag_len = _SSH_CIPHERS[ciphername_bytes].tag_len + # load secret data + edata, data = _get_sshstr(data) + # see https://bugzilla.mindrot.org/show_bug.cgi?id=3553 for + # information about how OpenSSH handles AEAD tags + if _SSH_CIPHERS[ciphername_bytes].is_aead: + tag = bytes(data) + if len(tag) != tag_len: + raise ValueError("Corrupt data: invalid tag length for cipher") + else: + _check_empty(data) + _check_block_size(edata, blklen) + salt, kbuf = _get_sshstr(kdfoptions) + rounds, kbuf = _get_u32(kbuf) + _check_empty(kbuf) + ciph = _init_cipher(ciphername_bytes, password, salt.tobytes(), rounds) + dec = ciph.decryptor() + edata = memoryview(dec.update(edata)) + if _SSH_CIPHERS[ciphername_bytes].is_aead: + assert isinstance(dec, AEADDecryptionContext) + _check_empty(dec.finalize_with_tag(tag)) + else: + # _check_block_size requires data to be a full block so there + # should be no output from finalize + _check_empty(dec.finalize()) + else: + # load secret data + edata, data = _get_sshstr(data) + _check_empty(data) + blklen = 8 + _check_block_size(edata, blklen) + ck1, edata = _get_u32(edata) + ck2, edata = _get_u32(edata) + if ck1 != ck2: + raise ValueError("Corrupt data: broken checksum") + + # load per-key struct + key_type, edata = _get_sshstr(edata) + if key_type != pub_key_type: + raise ValueError("Corrupt data: key type mismatch") + private_key, edata = kformat.load_private(edata, pubfields) + # We don't use the comment + _, edata = _get_sshstr(edata) + + # yes, SSH does padding check *after* all other parsing is done. + # need to follow as it writes zero-byte padding too. + if edata != _PADDING[: len(edata)]: + raise ValueError("Corrupt data: invalid padding") + + if isinstance(private_key, dsa.DSAPrivateKey): + warnings.warn( + "SSH DSA keys are deprecated and will be removed in a future " + "release.", + utils.DeprecatedIn40, + stacklevel=2, + ) + + return private_key + + +def _serialize_ssh_private_key( + private_key: SSHPrivateKeyTypes, + password: bytes, + encryption_algorithm: KeySerializationEncryption, +) -> bytes: + """Serialize private key with OpenSSH custom encoding.""" + utils._check_bytes("password", password) + if isinstance(private_key, dsa.DSAPrivateKey): + warnings.warn( + "SSH DSA key support is deprecated and will be " + "removed in a future release", + utils.DeprecatedIn40, + stacklevel=4, + ) + + key_type = _get_ssh_key_type(private_key) + kformat = _lookup_kformat(key_type) + + # setup parameters + f_kdfoptions = _FragList() + if password: + ciphername = _DEFAULT_CIPHER + blklen = _SSH_CIPHERS[ciphername].block_len + kdfname = _BCRYPT + rounds = _DEFAULT_ROUNDS + if ( + isinstance(encryption_algorithm, _KeySerializationEncryption) + and encryption_algorithm._kdf_rounds is not None + ): + rounds = encryption_algorithm._kdf_rounds + salt = os.urandom(16) + f_kdfoptions.put_sshstr(salt) + f_kdfoptions.put_u32(rounds) + ciph = _init_cipher(ciphername, password, salt, rounds) + else: + ciphername = kdfname = _NONE + blklen = 8 + ciph = None + nkeys = 1 + checkval = os.urandom(4) + comment = b"" + + # encode public and private parts together + f_public_key = _FragList() + f_public_key.put_sshstr(key_type) + kformat.encode_public(private_key.public_key(), f_public_key) + + f_secrets = _FragList([checkval, checkval]) + f_secrets.put_sshstr(key_type) + kformat.encode_private(private_key, f_secrets) + f_secrets.put_sshstr(comment) + f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)]) + + # top-level structure + f_main = _FragList() + f_main.put_raw(_SK_MAGIC) + f_main.put_sshstr(ciphername) + f_main.put_sshstr(kdfname) + f_main.put_sshstr(f_kdfoptions) + f_main.put_u32(nkeys) + f_main.put_sshstr(f_public_key) + f_main.put_sshstr(f_secrets) + + # copy result info bytearray + slen = f_secrets.size() + mlen = f_main.size() + buf = memoryview(bytearray(mlen + blklen)) + f_main.render(buf) + ofs = mlen - slen + + # encrypt in-place + if ciph is not None: + ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:]) + + return _ssh_pem_encode(buf[:mlen]) + + +SSHPublicKeyTypes = typing.Union[ + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + dsa.DSAPublicKey, + ed25519.Ed25519PublicKey, +] + +SSHCertPublicKeyTypes = typing.Union[ + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + ed25519.Ed25519PublicKey, +] + + +class SSHCertificateType(enum.Enum): + USER = 1 + HOST = 2 + + +class SSHCertificate: + def __init__( + self, + _nonce: memoryview, + _public_key: SSHPublicKeyTypes, + _serial: int, + _cctype: int, + _key_id: memoryview, + _valid_principals: list[bytes], + _valid_after: int, + _valid_before: int, + _critical_options: dict[bytes, bytes], + _extensions: dict[bytes, bytes], + _sig_type: memoryview, + _sig_key: memoryview, + _inner_sig_type: memoryview, + _signature: memoryview, + _tbs_cert_body: memoryview, + _cert_key_type: bytes, + _cert_body: memoryview, + ): + self._nonce = _nonce + self._public_key = _public_key + self._serial = _serial + try: + self._type = SSHCertificateType(_cctype) + except ValueError: + raise ValueError("Invalid certificate type") + self._key_id = _key_id + self._valid_principals = _valid_principals + self._valid_after = _valid_after + self._valid_before = _valid_before + self._critical_options = _critical_options + self._extensions = _extensions + self._sig_type = _sig_type + self._sig_key = _sig_key + self._inner_sig_type = _inner_sig_type + self._signature = _signature + self._cert_key_type = _cert_key_type + self._cert_body = _cert_body + self._tbs_cert_body = _tbs_cert_body + + @property + def nonce(self) -> bytes: + return bytes(self._nonce) + + def public_key(self) -> SSHCertPublicKeyTypes: + # make mypy happy until we remove DSA support entirely and + # the underlying union won't have a disallowed type + return typing.cast(SSHCertPublicKeyTypes, self._public_key) + + @property + def serial(self) -> int: + return self._serial + + @property + def type(self) -> SSHCertificateType: + return self._type + + @property + def key_id(self) -> bytes: + return bytes(self._key_id) + + @property + def valid_principals(self) -> list[bytes]: + return self._valid_principals + + @property + def valid_before(self) -> int: + return self._valid_before + + @property + def valid_after(self) -> int: + return self._valid_after + + @property + def critical_options(self) -> dict[bytes, bytes]: + return self._critical_options + + @property + def extensions(self) -> dict[bytes, bytes]: + return self._extensions + + def signature_key(self) -> SSHCertPublicKeyTypes: + sigformat = _lookup_kformat(self._sig_type) + signature_key, sigkey_rest = sigformat.load_public(self._sig_key) + _check_empty(sigkey_rest) + return signature_key + + def public_bytes(self) -> bytes: + return ( + bytes(self._cert_key_type) + + b" " + + binascii.b2a_base64(bytes(self._cert_body), newline=False) + ) + + def verify_cert_signature(self) -> None: + signature_key = self.signature_key() + if isinstance(signature_key, ed25519.Ed25519PublicKey): + signature_key.verify( + bytes(self._signature), bytes(self._tbs_cert_body) + ) + elif isinstance(signature_key, ec.EllipticCurvePublicKey): + # The signature is encoded as a pair of big-endian integers + r, data = _get_mpint(self._signature) + s, data = _get_mpint(data) + _check_empty(data) + computed_sig = asym_utils.encode_dss_signature(r, s) + hash_alg = _get_ec_hash_alg(signature_key.curve) + signature_key.verify( + computed_sig, bytes(self._tbs_cert_body), ec.ECDSA(hash_alg) + ) + else: + assert isinstance(signature_key, rsa.RSAPublicKey) + if self._inner_sig_type == _SSH_RSA: + hash_alg = hashes.SHA1() + elif self._inner_sig_type == _SSH_RSA_SHA256: + hash_alg = hashes.SHA256() + else: + assert self._inner_sig_type == _SSH_RSA_SHA512 + hash_alg = hashes.SHA512() + signature_key.verify( + bytes(self._signature), + bytes(self._tbs_cert_body), + padding.PKCS1v15(), + hash_alg, + ) + + +def _get_ec_hash_alg(curve: ec.EllipticCurve) -> hashes.HashAlgorithm: + if isinstance(curve, ec.SECP256R1): + return hashes.SHA256() + elif isinstance(curve, ec.SECP384R1): + return hashes.SHA384() + else: + assert isinstance(curve, ec.SECP521R1) + return hashes.SHA512() + + +def _load_ssh_public_identity( + data: bytes, + _legacy_dsa_allowed=False, +) -> SSHCertificate | SSHPublicKeyTypes: + utils._check_byteslike("data", data) + + m = _SSH_PUBKEY_RC.match(data) + if not m: + raise ValueError("Invalid line format") + key_type = orig_key_type = m.group(1) + key_body = m.group(2) + with_cert = False + if key_type.endswith(_CERT_SUFFIX): + with_cert = True + key_type = key_type[: -len(_CERT_SUFFIX)] + if key_type == _SSH_DSA and not _legacy_dsa_allowed: + raise UnsupportedAlgorithm( + "DSA keys aren't supported in SSH certificates" + ) + kformat = _lookup_kformat(key_type) + + try: + rest = memoryview(binascii.a2b_base64(key_body)) + except (TypeError, binascii.Error): + raise ValueError("Invalid format") + + if with_cert: + cert_body = rest + inner_key_type, rest = _get_sshstr(rest) + if inner_key_type != orig_key_type: + raise ValueError("Invalid key format") + if with_cert: + nonce, rest = _get_sshstr(rest) + public_key, rest = kformat.load_public(rest) + if with_cert: + serial, rest = _get_u64(rest) + cctype, rest = _get_u32(rest) + key_id, rest = _get_sshstr(rest) + principals, rest = _get_sshstr(rest) + valid_principals = [] + while principals: + principal, principals = _get_sshstr(principals) + valid_principals.append(bytes(principal)) + valid_after, rest = _get_u64(rest) + valid_before, rest = _get_u64(rest) + crit_options, rest = _get_sshstr(rest) + critical_options = _parse_exts_opts(crit_options) + exts, rest = _get_sshstr(rest) + extensions = _parse_exts_opts(exts) + # Get the reserved field, which is unused. + _, rest = _get_sshstr(rest) + sig_key_raw, rest = _get_sshstr(rest) + sig_type, sig_key = _get_sshstr(sig_key_raw) + if sig_type == _SSH_DSA and not _legacy_dsa_allowed: + raise UnsupportedAlgorithm( + "DSA signatures aren't supported in SSH certificates" + ) + # Get the entire cert body and subtract the signature + tbs_cert_body = cert_body[: -len(rest)] + signature_raw, rest = _get_sshstr(rest) + _check_empty(rest) + inner_sig_type, sig_rest = _get_sshstr(signature_raw) + # RSA certs can have multiple algorithm types + if ( + sig_type == _SSH_RSA + and inner_sig_type + not in [_SSH_RSA_SHA256, _SSH_RSA_SHA512, _SSH_RSA] + ) or (sig_type != _SSH_RSA and inner_sig_type != sig_type): + raise ValueError("Signature key type does not match") + signature, sig_rest = _get_sshstr(sig_rest) + _check_empty(sig_rest) + return SSHCertificate( + nonce, + public_key, + serial, + cctype, + key_id, + valid_principals, + valid_after, + valid_before, + critical_options, + extensions, + sig_type, + sig_key, + inner_sig_type, + signature, + tbs_cert_body, + orig_key_type, + cert_body, + ) + else: + _check_empty(rest) + return public_key + + +def load_ssh_public_identity( + data: bytes, +) -> SSHCertificate | SSHPublicKeyTypes: + return _load_ssh_public_identity(data) + + +def _parse_exts_opts(exts_opts: memoryview) -> dict[bytes, bytes]: + result: dict[bytes, bytes] = {} + last_name = None + while exts_opts: + name, exts_opts = _get_sshstr(exts_opts) + bname: bytes = bytes(name) + if bname in result: + raise ValueError("Duplicate name") + if last_name is not None and bname < last_name: + raise ValueError("Fields not lexically sorted") + value, exts_opts = _get_sshstr(exts_opts) + if len(value) > 0: + value, extra = _get_sshstr(value) + if len(extra) > 0: + raise ValueError("Unexpected extra data after value") + result[bname] = bytes(value) + last_name = bname + return result + + +def load_ssh_public_key( + data: bytes, backend: typing.Any = None +) -> SSHPublicKeyTypes: + cert_or_key = _load_ssh_public_identity(data, _legacy_dsa_allowed=True) + public_key: SSHPublicKeyTypes + if isinstance(cert_or_key, SSHCertificate): + public_key = cert_or_key.public_key() + else: + public_key = cert_or_key + + if isinstance(public_key, dsa.DSAPublicKey): + warnings.warn( + "SSH DSA keys are deprecated and will be removed in a future " + "release.", + utils.DeprecatedIn40, + stacklevel=2, + ) + return public_key + + +def serialize_ssh_public_key(public_key: SSHPublicKeyTypes) -> bytes: + """One-line public key format for OpenSSH""" + if isinstance(public_key, dsa.DSAPublicKey): + warnings.warn( + "SSH DSA key support is deprecated and will be " + "removed in a future release", + utils.DeprecatedIn40, + stacklevel=4, + ) + key_type = _get_ssh_key_type(public_key) + kformat = _lookup_kformat(key_type) + + f_pub = _FragList() + f_pub.put_sshstr(key_type) + kformat.encode_public(public_key, f_pub) + + pub = binascii.b2a_base64(f_pub.tobytes()).strip() + return b"".join([key_type, b" ", pub]) + + +SSHCertPrivateKeyTypes = typing.Union[ + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + ed25519.Ed25519PrivateKey, +] + + +# This is an undocumented limit enforced in the openssh codebase for sshd and +# ssh-keygen, but it is undefined in the ssh certificates spec. +_SSHKEY_CERT_MAX_PRINCIPALS = 256 + + +class SSHCertificateBuilder: + def __init__( + self, + _public_key: SSHCertPublicKeyTypes | None = None, + _serial: int | None = None, + _type: SSHCertificateType | None = None, + _key_id: bytes | None = None, + _valid_principals: list[bytes] = [], + _valid_for_all_principals: bool = False, + _valid_before: int | None = None, + _valid_after: int | None = None, + _critical_options: list[tuple[bytes, bytes]] = [], + _extensions: list[tuple[bytes, bytes]] = [], + ): + self._public_key = _public_key + self._serial = _serial + self._type = _type + self._key_id = _key_id + self._valid_principals = _valid_principals + self._valid_for_all_principals = _valid_for_all_principals + self._valid_before = _valid_before + self._valid_after = _valid_after + self._critical_options = _critical_options + self._extensions = _extensions + + def public_key( + self, public_key: SSHCertPublicKeyTypes + ) -> SSHCertificateBuilder: + if not isinstance( + public_key, + ( + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + ed25519.Ed25519PublicKey, + ), + ): + raise TypeError("Unsupported key type") + if self._public_key is not None: + raise ValueError("public_key already set") + + return SSHCertificateBuilder( + _public_key=public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def serial(self, serial: int) -> SSHCertificateBuilder: + if not isinstance(serial, int): + raise TypeError("serial must be an integer") + if not 0 <= serial < 2**64: + raise ValueError("serial must be between 0 and 2**64") + if self._serial is not None: + raise ValueError("serial already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def type(self, type: SSHCertificateType) -> SSHCertificateBuilder: + if not isinstance(type, SSHCertificateType): + raise TypeError("type must be an SSHCertificateType") + if self._type is not None: + raise ValueError("type already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def key_id(self, key_id: bytes) -> SSHCertificateBuilder: + if not isinstance(key_id, bytes): + raise TypeError("key_id must be bytes") + if self._key_id is not None: + raise ValueError("key_id already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_principals( + self, valid_principals: list[bytes] + ) -> SSHCertificateBuilder: + if self._valid_for_all_principals: + raise ValueError( + "Principals can't be set because the cert is valid " + "for all principals" + ) + if ( + not all(isinstance(x, bytes) for x in valid_principals) + or not valid_principals + ): + raise TypeError( + "principals must be a list of bytes and can't be empty" + ) + if self._valid_principals: + raise ValueError("valid_principals already set") + + if len(valid_principals) > _SSHKEY_CERT_MAX_PRINCIPALS: + raise ValueError( + "Reached or exceeded the maximum number of valid_principals" + ) + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_for_all_principals(self): + if self._valid_principals: + raise ValueError( + "valid_principals already set, can't set " + "valid_for_all_principals" + ) + if self._valid_for_all_principals: + raise ValueError("valid_for_all_principals already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=True, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_before(self, valid_before: int | float) -> SSHCertificateBuilder: + if not isinstance(valid_before, (int, float)): + raise TypeError("valid_before must be an int or float") + valid_before = int(valid_before) + if valid_before < 0 or valid_before >= 2**64: + raise ValueError("valid_before must [0, 2**64)") + if self._valid_before is not None: + raise ValueError("valid_before already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_after(self, valid_after: int | float) -> SSHCertificateBuilder: + if not isinstance(valid_after, (int, float)): + raise TypeError("valid_after must be an int or float") + valid_after = int(valid_after) + if valid_after < 0 or valid_after >= 2**64: + raise ValueError("valid_after must [0, 2**64)") + if self._valid_after is not None: + raise ValueError("valid_after already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def add_critical_option( + self, name: bytes, value: bytes + ) -> SSHCertificateBuilder: + if not isinstance(name, bytes) or not isinstance(value, bytes): + raise TypeError("name and value must be bytes") + # This is O(n**2) + if name in [name for name, _ in self._critical_options]: + raise ValueError("Duplicate critical option name") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=[*self._critical_options, (name, value)], + _extensions=self._extensions, + ) + + def add_extension( + self, name: bytes, value: bytes + ) -> SSHCertificateBuilder: + if not isinstance(name, bytes) or not isinstance(value, bytes): + raise TypeError("name and value must be bytes") + # This is O(n**2) + if name in [name for name, _ in self._extensions]: + raise ValueError("Duplicate extension name") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=[*self._extensions, (name, value)], + ) + + def sign(self, private_key: SSHCertPrivateKeyTypes) -> SSHCertificate: + if not isinstance( + private_key, + ( + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + ed25519.Ed25519PrivateKey, + ), + ): + raise TypeError("Unsupported private key type") + + if self._public_key is None: + raise ValueError("public_key must be set") + + # Not required + serial = 0 if self._serial is None else self._serial + + if self._type is None: + raise ValueError("type must be set") + + # Not required + key_id = b"" if self._key_id is None else self._key_id + + # A zero length list is valid, but means the certificate + # is valid for any principal of the specified type. We require + # the user to explicitly set valid_for_all_principals to get + # that behavior. + if not self._valid_principals and not self._valid_for_all_principals: + raise ValueError( + "valid_principals must be set if valid_for_all_principals " + "is False" + ) + + if self._valid_before is None: + raise ValueError("valid_before must be set") + + if self._valid_after is None: + raise ValueError("valid_after must be set") + + if self._valid_after > self._valid_before: + raise ValueError("valid_after must be earlier than valid_before") + + # lexically sort our byte strings + self._critical_options.sort(key=lambda x: x[0]) + self._extensions.sort(key=lambda x: x[0]) + + key_type = _get_ssh_key_type(self._public_key) + cert_prefix = key_type + _CERT_SUFFIX + + # Marshal the bytes to be signed + nonce = os.urandom(32) + kformat = _lookup_kformat(key_type) + f = _FragList() + f.put_sshstr(cert_prefix) + f.put_sshstr(nonce) + kformat.encode_public(self._public_key, f) + f.put_u64(serial) + f.put_u32(self._type.value) + f.put_sshstr(key_id) + fprincipals = _FragList() + for p in self._valid_principals: + fprincipals.put_sshstr(p) + f.put_sshstr(fprincipals.tobytes()) + f.put_u64(self._valid_after) + f.put_u64(self._valid_before) + fcrit = _FragList() + for name, value in self._critical_options: + fcrit.put_sshstr(name) + if len(value) > 0: + foptval = _FragList() + foptval.put_sshstr(value) + fcrit.put_sshstr(foptval.tobytes()) + else: + fcrit.put_sshstr(value) + f.put_sshstr(fcrit.tobytes()) + fext = _FragList() + for name, value in self._extensions: + fext.put_sshstr(name) + if len(value) > 0: + fextval = _FragList() + fextval.put_sshstr(value) + fext.put_sshstr(fextval.tobytes()) + else: + fext.put_sshstr(value) + f.put_sshstr(fext.tobytes()) + f.put_sshstr(b"") # RESERVED FIELD + # encode CA public key + ca_type = _get_ssh_key_type(private_key) + caformat = _lookup_kformat(ca_type) + caf = _FragList() + caf.put_sshstr(ca_type) + caformat.encode_public(private_key.public_key(), caf) + f.put_sshstr(caf.tobytes()) + # Sigs according to the rules defined for the CA's public key + # (RFC4253 section 6.6 for ssh-rsa, RFC5656 for ECDSA, + # and RFC8032 for Ed25519). + if isinstance(private_key, ed25519.Ed25519PrivateKey): + signature = private_key.sign(f.tobytes()) + fsig = _FragList() + fsig.put_sshstr(ca_type) + fsig.put_sshstr(signature) + f.put_sshstr(fsig.tobytes()) + elif isinstance(private_key, ec.EllipticCurvePrivateKey): + hash_alg = _get_ec_hash_alg(private_key.curve) + signature = private_key.sign(f.tobytes(), ec.ECDSA(hash_alg)) + r, s = asym_utils.decode_dss_signature(signature) + fsig = _FragList() + fsig.put_sshstr(ca_type) + fsigblob = _FragList() + fsigblob.put_mpint(r) + fsigblob.put_mpint(s) + fsig.put_sshstr(fsigblob.tobytes()) + f.put_sshstr(fsig.tobytes()) + + else: + assert isinstance(private_key, rsa.RSAPrivateKey) + # Just like Golang, we're going to use SHA512 for RSA + # https://cs.opensource.google/go/x/crypto/+/refs/tags/ + # v0.4.0:ssh/certs.go;l=445 + # RFC 8332 defines SHA256 and 512 as options + fsig = _FragList() + fsig.put_sshstr(_SSH_RSA_SHA512) + signature = private_key.sign( + f.tobytes(), padding.PKCS1v15(), hashes.SHA512() + ) + fsig.put_sshstr(signature) + f.put_sshstr(fsig.tobytes()) + + cert_data = binascii.b2a_base64(f.tobytes()).strip() + # load_ssh_public_identity returns a union, but this is + # guaranteed to be an SSHCertificate, so we cast to make + # mypy happy. + return typing.cast( + SSHCertificate, + load_ssh_public_identity(b"".join([cert_prefix, b" ", cert_data])), + ) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py new file mode 100644 index 0000000..c1af423 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + + +class InvalidToken(Exception): + pass diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..ead36eb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc new file mode 100644 index 0000000..d449801 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc new file mode 100644 index 0000000..b1e5ec5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py new file mode 100644 index 0000000..af5ab6e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py @@ -0,0 +1,92 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import base64 +import typing +from urllib.parse import quote, urlencode + +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 +from cryptography.hazmat.primitives.twofactor import InvalidToken + +HOTPHashTypes = typing.Union[SHA1, SHA256, SHA512] + + +def _generate_uri( + hotp: HOTP, + type_name: str, + account_name: str, + issuer: str | None, + extra_parameters: list[tuple[str, int]], +) -> str: + parameters = [ + ("digits", hotp._length), + ("secret", base64.b32encode(hotp._key)), + ("algorithm", hotp._algorithm.name.upper()), + ] + + if issuer is not None: + parameters.append(("issuer", issuer)) + + parameters.extend(extra_parameters) + + label = ( + f"{quote(issuer)}:{quote(account_name)}" + if issuer + else quote(account_name) + ) + return f"otpauth://{type_name}/{label}?{urlencode(parameters)}" + + +class HOTP: + def __init__( + self, + key: bytes, + length: int, + algorithm: HOTPHashTypes, + backend: typing.Any = None, + enforce_key_length: bool = True, + ) -> None: + if len(key) < 16 and enforce_key_length is True: + raise ValueError("Key length has to be at least 128 bits.") + + if not isinstance(length, int): + raise TypeError("Length parameter must be an integer type.") + + if length < 6 or length > 8: + raise ValueError("Length of HOTP has to be between 6 and 8.") + + if not isinstance(algorithm, (SHA1, SHA256, SHA512)): + raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") + + self._key = key + self._length = length + self._algorithm = algorithm + + def generate(self, counter: int) -> bytes: + truncated_value = self._dynamic_truncate(counter) + hotp = truncated_value % (10**self._length) + return "{0:0{1}}".format(hotp, self._length).encode() + + def verify(self, hotp: bytes, counter: int) -> None: + if not constant_time.bytes_eq(self.generate(counter), hotp): + raise InvalidToken("Supplied HOTP value does not match.") + + def _dynamic_truncate(self, counter: int) -> int: + ctx = hmac.HMAC(self._key, self._algorithm) + ctx.update(counter.to_bytes(length=8, byteorder="big")) + hmac_value = ctx.finalize() + + offset = hmac_value[len(hmac_value) - 1] & 0b1111 + p = hmac_value[offset : offset + 4] + return int.from_bytes(p, byteorder="big") & 0x7FFFFFFF + + def get_provisioning_uri( + self, account_name: str, counter: int, issuer: str | None + ) -> str: + return _generate_uri( + self, "hotp", account_name, issuer, [("counter", int(counter))] + ) diff --git a/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py new file mode 100644 index 0000000..68a5077 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py @@ -0,0 +1,50 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.hotp import ( + HOTP, + HOTPHashTypes, + _generate_uri, +) + + +class TOTP: + def __init__( + self, + key: bytes, + length: int, + algorithm: HOTPHashTypes, + time_step: int, + backend: typing.Any = None, + enforce_key_length: bool = True, + ): + self._time_step = time_step + self._hotp = HOTP( + key, length, algorithm, enforce_key_length=enforce_key_length + ) + + def generate(self, time: int | float) -> bytes: + counter = int(time / self._time_step) + return self._hotp.generate(counter) + + def verify(self, totp: bytes, time: int) -> None: + if not constant_time.bytes_eq(self.generate(time), totp): + raise InvalidToken("Supplied TOTP value does not match.") + + def get_provisioning_uri( + self, account_name: str, issuer: str | None + ) -> str: + return _generate_uri( + self._hotp, + "totp", + account_name, + issuer, + [("period", int(self._time_step))], + ) diff --git a/myenv/lib/python3.12/site-packages/cryptography/py.typed b/myenv/lib/python3.12/site-packages/cryptography/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/cryptography/utils.py b/myenv/lib/python3.12/site-packages/cryptography/utils.py new file mode 100644 index 0000000..706d0ae --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/utils.py @@ -0,0 +1,127 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import enum +import sys +import types +import typing +import warnings + + +# We use a UserWarning subclass, instead of DeprecationWarning, because CPython +# decided deprecation warnings should be invisible by default. +class CryptographyDeprecationWarning(UserWarning): + pass + + +# Several APIs were deprecated with no specific end-of-life date because of the +# ubiquity of their use. They should not be removed until we agree on when that +# cycle ends. +DeprecatedIn36 = CryptographyDeprecationWarning +DeprecatedIn37 = CryptographyDeprecationWarning +DeprecatedIn40 = CryptographyDeprecationWarning +DeprecatedIn41 = CryptographyDeprecationWarning +DeprecatedIn42 = CryptographyDeprecationWarning +DeprecatedIn43 = CryptographyDeprecationWarning + + +def _check_bytes(name: str, value: bytes) -> None: + if not isinstance(value, bytes): + raise TypeError(f"{name} must be bytes") + + +def _check_byteslike(name: str, value: bytes) -> None: + try: + memoryview(value) + except TypeError: + raise TypeError(f"{name} must be bytes-like") + + +def int_to_bytes(integer: int, length: int | None = None) -> bytes: + if length == 0: + raise ValueError("length argument can't be 0") + return integer.to_bytes( + length or (integer.bit_length() + 7) // 8 or 1, "big" + ) + + +class InterfaceNotImplemented(Exception): + pass + + +class _DeprecatedValue: + def __init__(self, value: object, message: str, warning_class): + self.value = value + self.message = message + self.warning_class = warning_class + + +class _ModuleWithDeprecations(types.ModuleType): + def __init__(self, module: types.ModuleType): + super().__init__(module.__name__) + self.__dict__["_module"] = module + + def __getattr__(self, attr: str) -> object: + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + obj = obj.value + return obj + + def __setattr__(self, attr: str, value: object) -> None: + setattr(self._module, attr, value) + + def __delattr__(self, attr: str) -> None: + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + + delattr(self._module, attr) + + def __dir__(self) -> typing.Sequence[str]: + return ["_module", *dir(self._module)] + + +def deprecated( + value: object, + module_name: str, + message: str, + warning_class: type[Warning], + name: str | None = None, +) -> _DeprecatedValue: + module = sys.modules[module_name] + if not isinstance(module, _ModuleWithDeprecations): + sys.modules[module_name] = module = _ModuleWithDeprecations(module) + dv = _DeprecatedValue(value, message, warning_class) + # Maintain backwards compatibility with `name is None` for pyOpenSSL. + if name is not None: + setattr(module, name, dv) + return dv + + +def cached_property(func: typing.Callable) -> property: + cached_name = f"_cached_{func}" + sentinel = object() + + def inner(instance: object): + cache = getattr(instance, cached_name, sentinel) + if cache is not sentinel: + return cache + result = func(instance) + setattr(instance, cached_name, result) + return result + + return property(inner) + + +# Python 3.10 changed representation of enums. We use well-defined object +# representation and string representation from Python 3.9. +class Enum(enum.Enum): + def __repr__(self) -> str: + return f"<{self.__class__.__name__}.{self._name_}: {self._value_!r}>" + + def __str__(self) -> str: + return f"{self.__class__.__name__}.{self._name_}" diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/__init__.py b/myenv/lib/python3.12/site-packages/cryptography/x509/__init__.py new file mode 100644 index 0000000..26c6444 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/x509/__init__.py @@ -0,0 +1,259 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.x509 import certificate_transparency, verification +from cryptography.x509.base import ( + Attribute, + AttributeNotFound, + Attributes, + Certificate, + CertificateBuilder, + CertificateRevocationList, + CertificateRevocationListBuilder, + CertificateSigningRequest, + CertificateSigningRequestBuilder, + InvalidVersion, + RevokedCertificate, + RevokedCertificateBuilder, + Version, + load_der_x509_certificate, + load_der_x509_crl, + load_der_x509_csr, + load_pem_x509_certificate, + load_pem_x509_certificates, + load_pem_x509_crl, + load_pem_x509_csr, + random_serial_number, +) +from cryptography.x509.extensions import ( + AccessDescription, + AuthorityInformationAccess, + AuthorityKeyIdentifier, + BasicConstraints, + CertificateIssuer, + CertificatePolicies, + CRLDistributionPoints, + CRLNumber, + CRLReason, + DeltaCRLIndicator, + DistributionPoint, + DuplicateExtension, + ExtendedKeyUsage, + Extension, + ExtensionNotFound, + Extensions, + ExtensionType, + FreshestCRL, + GeneralNames, + InhibitAnyPolicy, + InvalidityDate, + IssuerAlternativeName, + IssuingDistributionPoint, + KeyUsage, + MSCertificateTemplate, + NameConstraints, + NoticeReference, + OCSPAcceptableResponses, + OCSPNoCheck, + OCSPNonce, + PolicyConstraints, + PolicyInformation, + PrecertificateSignedCertificateTimestamps, + PrecertPoison, + ReasonFlags, + SignedCertificateTimestamps, + SubjectAlternativeName, + SubjectInformationAccess, + SubjectKeyIdentifier, + TLSFeature, + TLSFeatureType, + UnrecognizedExtension, + UserNotice, +) +from cryptography.x509.general_name import ( + DirectoryName, + DNSName, + GeneralName, + IPAddress, + OtherName, + RegisteredID, + RFC822Name, + UniformResourceIdentifier, + UnsupportedGeneralNameType, +) +from cryptography.x509.name import ( + Name, + NameAttribute, + RelativeDistinguishedName, +) +from cryptography.x509.oid import ( + AuthorityInformationAccessOID, + CertificatePoliciesOID, + CRLEntryExtensionOID, + ExtendedKeyUsageOID, + ExtensionOID, + NameOID, + ObjectIdentifier, + PublicKeyAlgorithmOID, + SignatureAlgorithmOID, +) + +OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS +OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER +OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS +OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES +OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS +OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE +OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL +OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY +OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME +OID_KEY_USAGE = ExtensionOID.KEY_USAGE +OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS +OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK +OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS +OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS +OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME +OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES +OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS +OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER + +OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 +OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 +OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 +OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 +OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 +OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 +OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 +OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 +OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 +OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 +OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 +OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 +OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 +OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 +OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS + +OID_COMMON_NAME = NameOID.COMMON_NAME +OID_COUNTRY_NAME = NameOID.COUNTRY_NAME +OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT +OID_DN_QUALIFIER = NameOID.DN_QUALIFIER +OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS +OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER +OID_GIVEN_NAME = NameOID.GIVEN_NAME +OID_LOCALITY_NAME = NameOID.LOCALITY_NAME +OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME +OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME +OID_PSEUDONYM = NameOID.PSEUDONYM +OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER +OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME +OID_SURNAME = NameOID.SURNAME +OID_TITLE = NameOID.TITLE + +OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH +OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING +OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION +OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING +OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH +OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING + +OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY +OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER +OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE + +OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER +OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON +OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE + +OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS +OID_OCSP = AuthorityInformationAccessOID.OCSP + +__all__ = [ + "OID_CA_ISSUERS", + "OID_OCSP", + "AccessDescription", + "Attribute", + "AttributeNotFound", + "Attributes", + "AuthorityInformationAccess", + "AuthorityKeyIdentifier", + "BasicConstraints", + "CRLDistributionPoints", + "CRLNumber", + "CRLReason", + "Certificate", + "CertificateBuilder", + "CertificateIssuer", + "CertificatePolicies", + "CertificateRevocationList", + "CertificateRevocationListBuilder", + "CertificateSigningRequest", + "CertificateSigningRequestBuilder", + "DNSName", + "DeltaCRLIndicator", + "DirectoryName", + "DistributionPoint", + "DuplicateExtension", + "ExtendedKeyUsage", + "Extension", + "ExtensionNotFound", + "ExtensionType", + "Extensions", + "FreshestCRL", + "GeneralName", + "GeneralNames", + "IPAddress", + "InhibitAnyPolicy", + "InvalidVersion", + "InvalidityDate", + "IssuerAlternativeName", + "IssuingDistributionPoint", + "KeyUsage", + "MSCertificateTemplate", + "Name", + "NameAttribute", + "NameConstraints", + "NameOID", + "NoticeReference", + "OCSPAcceptableResponses", + "OCSPNoCheck", + "OCSPNonce", + "ObjectIdentifier", + "OtherName", + "PolicyConstraints", + "PolicyInformation", + "PrecertPoison", + "PrecertificateSignedCertificateTimestamps", + "PublicKeyAlgorithmOID", + "RFC822Name", + "ReasonFlags", + "RegisteredID", + "RelativeDistinguishedName", + "RevokedCertificate", + "RevokedCertificateBuilder", + "SignatureAlgorithmOID", + "SignedCertificateTimestamps", + "SubjectAlternativeName", + "SubjectInformationAccess", + "SubjectKeyIdentifier", + "TLSFeature", + "TLSFeatureType", + "UniformResourceIdentifier", + "UnrecognizedExtension", + "UnsupportedGeneralNameType", + "UserNotice", + "Version", + "certificate_transparency", + "load_der_x509_certificate", + "load_der_x509_crl", + "load_der_x509_csr", + "load_pem_x509_certificate", + "load_pem_x509_certificates", + "load_pem_x509_crl", + "load_pem_x509_csr", + "random_serial_number", + "verification", + "verification", +] diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c327b34 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..bd1387f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc new file mode 100644 index 0000000..51b3d83 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc new file mode 100644 index 0000000..35e843b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc new file mode 100644 index 0000000..ea41242 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc new file mode 100644 index 0000000..fa5e9e1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc new file mode 100644 index 0000000..24d060e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc new file mode 100644 index 0000000..88414d0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc new file mode 100644 index 0000000..c116324 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/base.py b/myenv/lib/python3.12/site-packages/cryptography/x509/base.py new file mode 100644 index 0000000..6ed41e6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/x509/base.py @@ -0,0 +1,1226 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import datetime +import os +import typing +import warnings + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed448, + ed25519, + padding, + rsa, + x448, + x25519, +) +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPrivateKeyTypes, + CertificateIssuerPublicKeyTypes, + CertificatePublicKeyTypes, +) +from cryptography.x509.extensions import ( + Extension, + Extensions, + ExtensionType, + _make_sequence_methods, +) +from cryptography.x509.name import Name, _ASN1Type +from cryptography.x509.oid import ObjectIdentifier + +_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1) + +# This must be kept in sync with sign.rs's list of allowable types in +# identify_hash_type +_AllowedHashTypes = typing.Union[ + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA3_224, + hashes.SHA3_256, + hashes.SHA3_384, + hashes.SHA3_512, +] + + +class AttributeNotFound(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +def _reject_duplicate_extension( + extension: Extension[ExtensionType], + extensions: list[Extension[ExtensionType]], +) -> None: + # This is quadratic in the number of extensions + for e in extensions: + if e.oid == extension.oid: + raise ValueError("This extension has already been set.") + + +def _reject_duplicate_attribute( + oid: ObjectIdentifier, + attributes: list[tuple[ObjectIdentifier, bytes, int | None]], +) -> None: + # This is quadratic in the number of attributes + for attr_oid, _, _ in attributes: + if attr_oid == oid: + raise ValueError("This attribute has already been set.") + + +def _convert_to_naive_utc_time(time: datetime.datetime) -> datetime.datetime: + """Normalizes a datetime to a naive datetime in UTC. + + time -- datetime to normalize. Assumed to be in UTC if not timezone + aware. + """ + if time.tzinfo is not None: + offset = time.utcoffset() + offset = offset if offset else datetime.timedelta() + return time.replace(tzinfo=None) - offset + else: + return time + + +class Attribute: + def __init__( + self, + oid: ObjectIdentifier, + value: bytes, + _type: int = _ASN1Type.UTF8String.value, + ) -> None: + self._oid = oid + self._value = value + self._type = _type + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Attribute): + return NotImplemented + + return ( + self.oid == other.oid + and self.value == other.value + and self._type == other._type + ) + + def __hash__(self) -> int: + return hash((self.oid, self.value, self._type)) + + +class Attributes: + def __init__( + self, + attributes: typing.Iterable[Attribute], + ) -> None: + self._attributes = list(attributes) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_attributes") + + def __repr__(self) -> str: + return f"" + + def get_attribute_for_oid(self, oid: ObjectIdentifier) -> Attribute: + for attr in self: + if attr.oid == oid: + return attr + + raise AttributeNotFound(f"No {oid} attribute was found", oid) + + +class Version(utils.Enum): + v1 = 0 + v3 = 2 + + +class InvalidVersion(Exception): + def __init__(self, msg: str, parsed_version: int) -> None: + super().__init__(msg) + self.parsed_version = parsed_version + + +class Certificate(metaclass=abc.ABCMeta): + @abc.abstractmethod + def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: + """ + Returns bytes using digest passed. + """ + + @property + @abc.abstractmethod + def serial_number(self) -> int: + """ + Returns certificate serial number + """ + + @property + @abc.abstractmethod + def version(self) -> Version: + """ + Returns the certificate version + """ + + @abc.abstractmethod + def public_key(self) -> CertificatePublicKeyTypes: + """ + Returns the public key + """ + + @property + @abc.abstractmethod + def public_key_algorithm_oid(self) -> ObjectIdentifier: + """ + Returns the ObjectIdentifier of the public key. + """ + + @property + @abc.abstractmethod + def not_valid_before(self) -> datetime.datetime: + """ + Not before time (represented as UTC datetime) + """ + + @property + @abc.abstractmethod + def not_valid_before_utc(self) -> datetime.datetime: + """ + Not before time (represented as a non-naive UTC datetime) + """ + + @property + @abc.abstractmethod + def not_valid_after(self) -> datetime.datetime: + """ + Not after time (represented as UTC datetime) + """ + + @property + @abc.abstractmethod + def not_valid_after_utc(self) -> datetime.datetime: + """ + Not after time (represented as a non-naive UTC datetime) + """ + + @property + @abc.abstractmethod + def issuer(self) -> Name: + """ + Returns the issuer name object. + """ + + @property + @abc.abstractmethod + def subject(self) -> Name: + """ + Returns the subject name object. + """ + + @property + @abc.abstractmethod + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @property + @abc.abstractmethod + def signature_algorithm_oid(self) -> ObjectIdentifier: + """ + Returns the ObjectIdentifier of the signature algorithm. + """ + + @property + @abc.abstractmethod + def signature_algorithm_parameters( + self, + ) -> None | padding.PSS | padding.PKCS1v15 | ec.ECDSA: + """ + Returns the signature algorithm parameters. + """ + + @property + @abc.abstractmethod + def extensions(self) -> Extensions: + """ + Returns an Extensions object. + """ + + @property + @abc.abstractmethod + def signature(self) -> bytes: + """ + Returns the signature bytes. + """ + + @property + @abc.abstractmethod + def tbs_certificate_bytes(self) -> bytes: + """ + Returns the tbsCertificate payload bytes as defined in RFC 5280. + """ + + @property + @abc.abstractmethod + def tbs_precertificate_bytes(self) -> bytes: + """ + Returns the tbsCertificate payload bytes with the SCT list extension + stripped. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_bytes(self, encoding: serialization.Encoding) -> bytes: + """ + Serializes the certificate to PEM or DER format. + """ + + @abc.abstractmethod + def verify_directly_issued_by(self, issuer: Certificate) -> None: + """ + This method verifies that certificate issuer name matches the + issuer subject name and that the certificate is signed by the + issuer's private key. No other validation is performed. + """ + + +# Runtime isinstance checks need this since the rust class is not a subclass. +Certificate.register(rust_x509.Certificate) + + +class RevokedCertificate(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def serial_number(self) -> int: + """ + Returns the serial number of the revoked certificate. + """ + + @property + @abc.abstractmethod + def revocation_date(self) -> datetime.datetime: + """ + Returns the date of when this certificate was revoked. + """ + + @property + @abc.abstractmethod + def revocation_date_utc(self) -> datetime.datetime: + """ + Returns the date of when this certificate was revoked as a non-naive + UTC datetime. + """ + + @property + @abc.abstractmethod + def extensions(self) -> Extensions: + """ + Returns an Extensions object containing a list of Revoked extensions. + """ + + +# Runtime isinstance checks need this since the rust class is not a subclass. +RevokedCertificate.register(rust_x509.RevokedCertificate) + + +class _RawRevokedCertificate(RevokedCertificate): + def __init__( + self, + serial_number: int, + revocation_date: datetime.datetime, + extensions: Extensions, + ): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + @property + def serial_number(self) -> int: + return self._serial_number + + @property + def revocation_date(self) -> datetime.datetime: + warnings.warn( + "Properties that return a naïve datetime object have been " + "deprecated. Please switch to revocation_date_utc.", + utils.DeprecatedIn42, + stacklevel=2, + ) + return self._revocation_date + + @property + def revocation_date_utc(self) -> datetime.datetime: + return self._revocation_date.replace(tzinfo=datetime.timezone.utc) + + @property + def extensions(self) -> Extensions: + return self._extensions + + +class CertificateRevocationList(metaclass=abc.ABCMeta): + @abc.abstractmethod + def public_bytes(self, encoding: serialization.Encoding) -> bytes: + """ + Serializes the CRL to PEM or DER format. + """ + + @abc.abstractmethod + def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: + """ + Returns bytes using digest passed. + """ + + @abc.abstractmethod + def get_revoked_certificate_by_serial_number( + self, serial_number: int + ) -> RevokedCertificate | None: + """ + Returns an instance of RevokedCertificate or None if the serial_number + is not in the CRL. + """ + + @property + @abc.abstractmethod + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @property + @abc.abstractmethod + def signature_algorithm_oid(self) -> ObjectIdentifier: + """ + Returns the ObjectIdentifier of the signature algorithm. + """ + + @property + @abc.abstractmethod + def signature_algorithm_parameters( + self, + ) -> None | padding.PSS | padding.PKCS1v15 | ec.ECDSA: + """ + Returns the signature algorithm parameters. + """ + + @property + @abc.abstractmethod + def issuer(self) -> Name: + """ + Returns the X509Name with the issuer of this CRL. + """ + + @property + @abc.abstractmethod + def next_update(self) -> datetime.datetime | None: + """ + Returns the date of next update for this CRL. + """ + + @property + @abc.abstractmethod + def next_update_utc(self) -> datetime.datetime | None: + """ + Returns the date of next update for this CRL as a non-naive UTC + datetime. + """ + + @property + @abc.abstractmethod + def last_update(self) -> datetime.datetime: + """ + Returns the date of last update for this CRL. + """ + + @property + @abc.abstractmethod + def last_update_utc(self) -> datetime.datetime: + """ + Returns the date of last update for this CRL as a non-naive UTC + datetime. + """ + + @property + @abc.abstractmethod + def extensions(self) -> Extensions: + """ + Returns an Extensions object containing a list of CRL extensions. + """ + + @property + @abc.abstractmethod + def signature(self) -> bytes: + """ + Returns the signature bytes. + """ + + @property + @abc.abstractmethod + def tbs_certlist_bytes(self) -> bytes: + """ + Returns the tbsCertList payload bytes as defined in RFC 5280. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __len__(self) -> int: + """ + Number of revoked certificates in the CRL. + """ + + @typing.overload + def __getitem__(self, idx: int) -> RevokedCertificate: ... + + @typing.overload + def __getitem__(self, idx: slice) -> list[RevokedCertificate]: ... + + @abc.abstractmethod + def __getitem__( + self, idx: int | slice + ) -> RevokedCertificate | list[RevokedCertificate]: + """ + Returns a revoked certificate (or slice of revoked certificates). + """ + + @abc.abstractmethod + def __iter__(self) -> typing.Iterator[RevokedCertificate]: + """ + Iterator over the revoked certificates + """ + + @abc.abstractmethod + def is_signature_valid( + self, public_key: CertificateIssuerPublicKeyTypes + ) -> bool: + """ + Verifies signature of revocation list against given public key. + """ + + +CertificateRevocationList.register(rust_x509.CertificateRevocationList) + + +class CertificateSigningRequest(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_key(self) -> CertificatePublicKeyTypes: + """ + Returns the public key + """ + + @property + @abc.abstractmethod + def subject(self) -> Name: + """ + Returns the subject name object. + """ + + @property + @abc.abstractmethod + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @property + @abc.abstractmethod + def signature_algorithm_oid(self) -> ObjectIdentifier: + """ + Returns the ObjectIdentifier of the signature algorithm. + """ + + @property + @abc.abstractmethod + def signature_algorithm_parameters( + self, + ) -> None | padding.PSS | padding.PKCS1v15 | ec.ECDSA: + """ + Returns the signature algorithm parameters. + """ + + @property + @abc.abstractmethod + def extensions(self) -> Extensions: + """ + Returns the extensions in the signing request. + """ + + @property + @abc.abstractmethod + def attributes(self) -> Attributes: + """ + Returns an Attributes object. + """ + + @abc.abstractmethod + def public_bytes(self, encoding: serialization.Encoding) -> bytes: + """ + Encodes the request to PEM or DER format. + """ + + @property + @abc.abstractmethod + def signature(self) -> bytes: + """ + Returns the signature bytes. + """ + + @property + @abc.abstractmethod + def tbs_certrequest_bytes(self) -> bytes: + """ + Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC + 2986. + """ + + @property + @abc.abstractmethod + def is_signature_valid(self) -> bool: + """ + Verifies signature of signing request. + """ + + @abc.abstractmethod + def get_attribute_for_oid(self, oid: ObjectIdentifier) -> bytes: + """ + Get the attribute value for a given OID. + """ + + +# Runtime isinstance checks need this since the rust class is not a subclass. +CertificateSigningRequest.register(rust_x509.CertificateSigningRequest) + + +load_pem_x509_certificate = rust_x509.load_pem_x509_certificate +load_der_x509_certificate = rust_x509.load_der_x509_certificate + +load_pem_x509_certificates = rust_x509.load_pem_x509_certificates + +load_pem_x509_csr = rust_x509.load_pem_x509_csr +load_der_x509_csr = rust_x509.load_der_x509_csr + +load_pem_x509_crl = rust_x509.load_pem_x509_crl +load_der_x509_crl = rust_x509.load_der_x509_crl + + +class CertificateSigningRequestBuilder: + def __init__( + self, + subject_name: Name | None = None, + extensions: list[Extension[ExtensionType]] = [], + attributes: list[tuple[ObjectIdentifier, bytes, int | None]] = [], + ): + """ + Creates an empty X.509 certificate request (v1). + """ + self._subject_name = subject_name + self._extensions = extensions + self._attributes = attributes + + def subject_name(self, name: Name) -> CertificateSigningRequestBuilder: + """ + Sets the certificate requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._subject_name is not None: + raise ValueError("The subject name may only be set once.") + return CertificateSigningRequestBuilder( + name, self._extensions, self._attributes + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateSigningRequestBuilder: + """ + Adds an X.509 extension to the certificate request. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateSigningRequestBuilder( + self._subject_name, + [*self._extensions, extension], + self._attributes, + ) + + def add_attribute( + self, + oid: ObjectIdentifier, + value: bytes, + *, + _tag: _ASN1Type | None = None, + ) -> CertificateSigningRequestBuilder: + """ + Adds an X.509 attribute with an OID and associated value. + """ + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + + if not isinstance(value, bytes): + raise TypeError("value must be bytes") + + if _tag is not None and not isinstance(_tag, _ASN1Type): + raise TypeError("tag must be _ASN1Type") + + _reject_duplicate_attribute(oid, self._attributes) + + if _tag is not None: + tag = _tag.value + else: + tag = None + + return CertificateSigningRequestBuilder( + self._subject_name, + self._extensions, + [*self._attributes, (oid, value, tag)], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ) -> CertificateSigningRequest: + """ + Signs the request using the requestor's private key. + """ + if self._subject_name is None: + raise ValueError("A CertificateSigningRequest must have a subject") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + return rust_x509.create_x509_csr( + self, private_key, algorithm, rsa_padding + ) + + +class CertificateBuilder: + _extensions: list[Extension[ExtensionType]] + + def __init__( + self, + issuer_name: Name | None = None, + subject_name: Name | None = None, + public_key: CertificatePublicKeyTypes | None = None, + serial_number: int | None = None, + not_valid_before: datetime.datetime | None = None, + not_valid_after: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + ) -> None: + self._version = Version.v3 + self._issuer_name = issuer_name + self._subject_name = subject_name + self._public_key = public_key + self._serial_number = serial_number + self._not_valid_before = not_valid_before + self._not_valid_after = not_valid_after + self._extensions = extensions + + def issuer_name(self, name: Name) -> CertificateBuilder: + """ + Sets the CA's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._issuer_name is not None: + raise ValueError("The issuer name may only be set once.") + return CertificateBuilder( + name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def subject_name(self, name: Name) -> CertificateBuilder: + """ + Sets the requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._subject_name is not None: + raise ValueError("The subject name may only be set once.") + return CertificateBuilder( + self._issuer_name, + name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def public_key( + self, + key: CertificatePublicKeyTypes, + ) -> CertificateBuilder: + """ + Sets the requestor's public key (as found in the signing request). + """ + if not isinstance( + key, + ( + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, + ), + ): + raise TypeError( + "Expecting one of DSAPublicKey, RSAPublicKey," + " EllipticCurvePublicKey, Ed25519PublicKey," + " Ed448PublicKey, X25519PublicKey, or " + "X448PublicKey." + ) + if self._public_key is not None: + raise ValueError("The public key may only be set once.") + return CertificateBuilder( + self._issuer_name, + self._subject_name, + key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def serial_number(self, number: int) -> CertificateBuilder: + """ + Sets the certificate serial number. + """ + if not isinstance(number, int): + raise TypeError("Serial number must be of integral type.") + if self._serial_number is not None: + raise ValueError("The serial number may only be set once.") + if number <= 0: + raise ValueError("The serial number should be positive.") + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError( + "The serial number should not be more than 159 bits." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def not_valid_before(self, time: datetime.datetime) -> CertificateBuilder: + """ + Sets the certificate activation time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._not_valid_before is not None: + raise ValueError("The not valid before may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The not valid before date must be on or after" + " 1950 January 1)." + ) + if self._not_valid_after is not None and time > self._not_valid_after: + raise ValueError( + "The not valid before date must be before the not valid after " + "date." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + time, + self._not_valid_after, + self._extensions, + ) + + def not_valid_after(self, time: datetime.datetime) -> CertificateBuilder: + """ + Sets the certificate expiration time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._not_valid_after is not None: + raise ValueError("The not valid after may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The not valid after date must be on or after" + " 1950 January 1." + ) + if ( + self._not_valid_before is not None + and time < self._not_valid_before + ): + raise ValueError( + "The not valid after date must be after the not valid before " + "date." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + time, + self._extensions, + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateBuilder: + """ + Adds an X.509 extension to the certificate. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + [*self._extensions, extension], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ) -> Certificate: + """ + Signs the certificate using the CA's private key. + """ + if self._subject_name is None: + raise ValueError("A certificate must have a subject name") + + if self._issuer_name is None: + raise ValueError("A certificate must have an issuer name") + + if self._serial_number is None: + raise ValueError("A certificate must have a serial number") + + if self._not_valid_before is None: + raise ValueError("A certificate must have a not valid before time") + + if self._not_valid_after is None: + raise ValueError("A certificate must have a not valid after time") + + if self._public_key is None: + raise ValueError("A certificate must have a public key") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + return rust_x509.create_x509_certificate( + self, private_key, algorithm, rsa_padding + ) + + +class CertificateRevocationListBuilder: + _extensions: list[Extension[ExtensionType]] + _revoked_certificates: list[RevokedCertificate] + + def __init__( + self, + issuer_name: Name | None = None, + last_update: datetime.datetime | None = None, + next_update: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + revoked_certificates: list[RevokedCertificate] = [], + ): + self._issuer_name = issuer_name + self._last_update = last_update + self._next_update = next_update + self._extensions = extensions + self._revoked_certificates = revoked_certificates + + def issuer_name( + self, issuer_name: Name + ) -> CertificateRevocationListBuilder: + if not isinstance(issuer_name, Name): + raise TypeError("Expecting x509.Name object.") + if self._issuer_name is not None: + raise ValueError("The issuer name may only be set once.") + return CertificateRevocationListBuilder( + issuer_name, + self._last_update, + self._next_update, + self._extensions, + self._revoked_certificates, + ) + + def last_update( + self, last_update: datetime.datetime + ) -> CertificateRevocationListBuilder: + if not isinstance(last_update, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._last_update is not None: + raise ValueError("Last update may only be set once.") + last_update = _convert_to_naive_utc_time(last_update) + if last_update < _EARLIEST_UTC_TIME: + raise ValueError( + "The last update date must be on or after 1950 January 1." + ) + if self._next_update is not None and last_update > self._next_update: + raise ValueError( + "The last update date must be before the next update date." + ) + return CertificateRevocationListBuilder( + self._issuer_name, + last_update, + self._next_update, + self._extensions, + self._revoked_certificates, + ) + + def next_update( + self, next_update: datetime.datetime + ) -> CertificateRevocationListBuilder: + if not isinstance(next_update, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._next_update is not None: + raise ValueError("Last update may only be set once.") + next_update = _convert_to_naive_utc_time(next_update) + if next_update < _EARLIEST_UTC_TIME: + raise ValueError( + "The last update date must be on or after 1950 January 1." + ) + if self._last_update is not None and next_update < self._last_update: + raise ValueError( + "The next update date must be after the last update date." + ) + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + next_update, + self._extensions, + self._revoked_certificates, + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateRevocationListBuilder: + """ + Adds an X.509 extension to the certificate revocation list. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + self._next_update, + [*self._extensions, extension], + self._revoked_certificates, + ) + + def add_revoked_certificate( + self, revoked_certificate: RevokedCertificate + ) -> CertificateRevocationListBuilder: + """ + Adds a revoked certificate to the CRL. + """ + if not isinstance(revoked_certificate, RevokedCertificate): + raise TypeError("Must be an instance of RevokedCertificate") + + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + self._next_update, + self._extensions, + [*self._revoked_certificates, revoked_certificate], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ) -> CertificateRevocationList: + if self._issuer_name is None: + raise ValueError("A CRL must have an issuer name") + + if self._last_update is None: + raise ValueError("A CRL must have a last update time") + + if self._next_update is None: + raise ValueError("A CRL must have a next update time") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + return rust_x509.create_x509_crl( + self, private_key, algorithm, rsa_padding + ) + + +class RevokedCertificateBuilder: + def __init__( + self, + serial_number: int | None = None, + revocation_date: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + ): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + def serial_number(self, number: int) -> RevokedCertificateBuilder: + if not isinstance(number, int): + raise TypeError("Serial number must be of integral type.") + if self._serial_number is not None: + raise ValueError("The serial number may only be set once.") + if number <= 0: + raise ValueError("The serial number should be positive") + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError( + "The serial number should not be more than 159 bits." + ) + return RevokedCertificateBuilder( + number, self._revocation_date, self._extensions + ) + + def revocation_date( + self, time: datetime.datetime + ) -> RevokedCertificateBuilder: + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._revocation_date is not None: + raise ValueError("The revocation date may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The revocation date must be on or after 1950 January 1." + ) + return RevokedCertificateBuilder( + self._serial_number, time, self._extensions + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> RevokedCertificateBuilder: + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + return RevokedCertificateBuilder( + self._serial_number, + self._revocation_date, + [*self._extensions, extension], + ) + + def build(self, backend: typing.Any = None) -> RevokedCertificate: + if self._serial_number is None: + raise ValueError("A revoked certificate must have a serial number") + if self._revocation_date is None: + raise ValueError( + "A revoked certificate must have a revocation date" + ) + return _RawRevokedCertificate( + self._serial_number, + self._revocation_date, + Extensions(self._extensions), + ) + + +def random_serial_number() -> int: + return int.from_bytes(os.urandom(20), "big") >> 1 diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py b/myenv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py new file mode 100644 index 0000000..73647ee --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py @@ -0,0 +1,97 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import datetime + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.hazmat.primitives.hashes import HashAlgorithm + + +class LogEntryType(utils.Enum): + X509_CERTIFICATE = 0 + PRE_CERTIFICATE = 1 + + +class Version(utils.Enum): + v1 = 0 + + +class SignatureAlgorithm(utils.Enum): + """ + Signature algorithms that are valid for SCTs. + + These are exactly the same as SignatureAlgorithm in RFC 5246 (TLS 1.2). + + See: + """ + + ANONYMOUS = 0 + RSA = 1 + DSA = 2 + ECDSA = 3 + + +class SignedCertificateTimestamp(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def version(self) -> Version: + """ + Returns the SCT version. + """ + + @property + @abc.abstractmethod + def log_id(self) -> bytes: + """ + Returns an identifier indicating which log this SCT is for. + """ + + @property + @abc.abstractmethod + def timestamp(self) -> datetime.datetime: + """ + Returns the timestamp for this SCT. + """ + + @property + @abc.abstractmethod + def entry_type(self) -> LogEntryType: + """ + Returns whether this is an SCT for a certificate or pre-certificate. + """ + + @property + @abc.abstractmethod + def signature_hash_algorithm(self) -> HashAlgorithm: + """ + Returns the hash algorithm used for the SCT's signature. + """ + + @property + @abc.abstractmethod + def signature_algorithm(self) -> SignatureAlgorithm: + """ + Returns the signing algorithm used for the SCT's signature. + """ + + @property + @abc.abstractmethod + def signature(self) -> bytes: + """ + Returns the signature for this SCT. + """ + + @property + @abc.abstractmethod + def extension_bytes(self) -> bytes: + """ + Returns the raw bytes of any extensions for this SCT. + """ + + +SignedCertificateTimestamp.register(rust_x509.Sct) diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/extensions.py b/myenv/lib/python3.12/site-packages/cryptography/x509/extensions.py new file mode 100644 index 0000000..5e7486a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/x509/extensions.py @@ -0,0 +1,2196 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import datetime +import hashlib +import ipaddress +import typing + +from cryptography import utils +from cryptography.hazmat.bindings._rust import asn1 +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPublicKeyTypes, + CertificatePublicKeyTypes, +) +from cryptography.x509.certificate_transparency import ( + SignedCertificateTimestamp, +) +from cryptography.x509.general_name import ( + DirectoryName, + DNSName, + GeneralName, + IPAddress, + OtherName, + RegisteredID, + RFC822Name, + UniformResourceIdentifier, + _IPAddressTypes, +) +from cryptography.x509.name import Name, RelativeDistinguishedName +from cryptography.x509.oid import ( + CRLEntryExtensionOID, + ExtensionOID, + ObjectIdentifier, + OCSPExtensionOID, +) + +ExtensionTypeVar = typing.TypeVar( + "ExtensionTypeVar", bound="ExtensionType", covariant=True +) + + +def _key_identifier_from_public_key( + public_key: CertificatePublicKeyTypes, +) -> bytes: + if isinstance(public_key, RSAPublicKey): + data = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.PKCS1, + ) + elif isinstance(public_key, EllipticCurvePublicKey): + data = public_key.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + else: + # This is a very slow way to do this. + serialized = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.SubjectPublicKeyInfo, + ) + data = asn1.parse_spki_for_data(serialized) + + return hashlib.sha1(data).digest() + + +def _make_sequence_methods(field_name: str): + def len_method(self) -> int: + return len(getattr(self, field_name)) + + def iter_method(self): + return iter(getattr(self, field_name)) + + def getitem_method(self, idx): + return getattr(self, field_name)[idx] + + return len_method, iter_method, getitem_method + + +class DuplicateExtension(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +class ExtensionNotFound(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +class ExtensionType(metaclass=abc.ABCMeta): + oid: typing.ClassVar[ObjectIdentifier] + + def public_bytes(self) -> bytes: + """ + Serializes the extension type to DER. + """ + raise NotImplementedError( + f"public_bytes is not implemented for extension type {self!r}" + ) + + +class Extensions: + def __init__( + self, extensions: typing.Iterable[Extension[ExtensionType]] + ) -> None: + self._extensions = list(extensions) + + def get_extension_for_oid( + self, oid: ObjectIdentifier + ) -> Extension[ExtensionType]: + for ext in self: + if ext.oid == oid: + return ext + + raise ExtensionNotFound(f"No {oid} extension was found", oid) + + def get_extension_for_class( + self, extclass: type[ExtensionTypeVar] + ) -> Extension[ExtensionTypeVar]: + if extclass is UnrecognizedExtension: + raise TypeError( + "UnrecognizedExtension can't be used with " + "get_extension_for_class because more than one instance of the" + " class may be present." + ) + + for ext in self: + if isinstance(ext.value, extclass): + return ext + + raise ExtensionNotFound( + f"No {extclass} extension was found", extclass.oid + ) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions") + + def __repr__(self) -> str: + return f"" + + +class CRLNumber(ExtensionType): + oid = ExtensionOID.CRL_NUMBER + + def __init__(self, crl_number: int) -> None: + if not isinstance(crl_number, int): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLNumber): + return NotImplemented + + return self.crl_number == other.crl_number + + def __hash__(self) -> int: + return hash(self.crl_number) + + def __repr__(self) -> str: + return f"" + + @property + def crl_number(self) -> int: + return self._crl_number + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AuthorityKeyIdentifier(ExtensionType): + oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER + + def __init__( + self, + key_identifier: bytes | None, + authority_cert_issuer: typing.Iterable[GeneralName] | None, + authority_cert_serial_number: int | None, + ) -> None: + if (authority_cert_issuer is None) != ( + authority_cert_serial_number is None + ): + raise ValueError( + "authority_cert_issuer and authority_cert_serial_number " + "must both be present or both None" + ) + + if authority_cert_issuer is not None: + authority_cert_issuer = list(authority_cert_issuer) + if not all( + isinstance(x, GeneralName) for x in authority_cert_issuer + ): + raise TypeError( + "authority_cert_issuer must be a list of GeneralName " + "objects" + ) + + if authority_cert_serial_number is not None and not isinstance( + authority_cert_serial_number, int + ): + raise TypeError("authority_cert_serial_number must be an integer") + + self._key_identifier = key_identifier + self._authority_cert_issuer = authority_cert_issuer + self._authority_cert_serial_number = authority_cert_serial_number + + # This takes a subset of CertificatePublicKeyTypes because an issuer + # cannot have an X25519/X448 key. This introduces some unfortunate + # asymmetry that requires typing users to explicitly + # narrow their type, but we should make this accurate and not just + # convenient. + @classmethod + def from_issuer_public_key( + cls, public_key: CertificateIssuerPublicKeyTypes + ) -> AuthorityKeyIdentifier: + digest = _key_identifier_from_public_key(public_key) + return cls( + key_identifier=digest, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ) + + @classmethod + def from_issuer_subject_key_identifier( + cls, ski: SubjectKeyIdentifier + ) -> AuthorityKeyIdentifier: + return cls( + key_identifier=ski.digest, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ) + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AuthorityKeyIdentifier): + return NotImplemented + + return ( + self.key_identifier == other.key_identifier + and self.authority_cert_issuer == other.authority_cert_issuer + and self.authority_cert_serial_number + == other.authority_cert_serial_number + ) + + def __hash__(self) -> int: + if self.authority_cert_issuer is None: + aci = None + else: + aci = tuple(self.authority_cert_issuer) + return hash( + (self.key_identifier, aci, self.authority_cert_serial_number) + ) + + @property + def key_identifier(self) -> bytes | None: + return self._key_identifier + + @property + def authority_cert_issuer( + self, + ) -> list[GeneralName] | None: + return self._authority_cert_issuer + + @property + def authority_cert_serial_number(self) -> int | None: + return self._authority_cert_serial_number + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SubjectKeyIdentifier(ExtensionType): + oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER + + def __init__(self, digest: bytes) -> None: + self._digest = digest + + @classmethod + def from_public_key( + cls, public_key: CertificatePublicKeyTypes + ) -> SubjectKeyIdentifier: + return cls(_key_identifier_from_public_key(public_key)) + + @property + def digest(self) -> bytes: + return self._digest + + @property + def key_identifier(self) -> bytes: + return self._digest + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectKeyIdentifier): + return NotImplemented + + return constant_time.bytes_eq(self.digest, other.digest) + + def __hash__(self) -> int: + return hash(self.digest) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AuthorityInformationAccess(ExtensionType): + oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS + + def __init__( + self, descriptions: typing.Iterable[AccessDescription] + ) -> None: + descriptions = list(descriptions) + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AuthorityInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __hash__(self) -> int: + return hash(tuple(self._descriptions)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SubjectInformationAccess(ExtensionType): + oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS + + def __init__( + self, descriptions: typing.Iterable[AccessDescription] + ) -> None: + descriptions = list(descriptions) + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __hash__(self) -> int: + return hash(tuple(self._descriptions)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AccessDescription: + def __init__( + self, access_method: ObjectIdentifier, access_location: GeneralName + ) -> None: + if not isinstance(access_method, ObjectIdentifier): + raise TypeError("access_method must be an ObjectIdentifier") + + if not isinstance(access_location, GeneralName): + raise TypeError("access_location must be a GeneralName") + + self._access_method = access_method + self._access_location = access_location + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AccessDescription): + return NotImplemented + + return ( + self.access_method == other.access_method + and self.access_location == other.access_location + ) + + def __hash__(self) -> int: + return hash((self.access_method, self.access_location)) + + @property + def access_method(self) -> ObjectIdentifier: + return self._access_method + + @property + def access_location(self) -> GeneralName: + return self._access_location + + +class BasicConstraints(ExtensionType): + oid = ExtensionOID.BASIC_CONSTRAINTS + + def __init__(self, ca: bool, path_length: int | None) -> None: + if not isinstance(ca, bool): + raise TypeError("ca must be a boolean value") + + if path_length is not None and not ca: + raise ValueError("path_length must be None when ca is False") + + if path_length is not None and ( + not isinstance(path_length, int) or path_length < 0 + ): + raise TypeError( + "path_length must be a non-negative integer or None" + ) + + self._ca = ca + self._path_length = path_length + + @property + def ca(self) -> bool: + return self._ca + + @property + def path_length(self) -> int | None: + return self._path_length + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, BasicConstraints): + return NotImplemented + + return self.ca == other.ca and self.path_length == other.path_length + + def __hash__(self) -> int: + return hash((self.ca, self.path_length)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class DeltaCRLIndicator(ExtensionType): + oid = ExtensionOID.DELTA_CRL_INDICATOR + + def __init__(self, crl_number: int) -> None: + if not isinstance(crl_number, int): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + @property + def crl_number(self) -> int: + return self._crl_number + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DeltaCRLIndicator): + return NotImplemented + + return self.crl_number == other.crl_number + + def __hash__(self) -> int: + return hash(self.crl_number) + + def __repr__(self) -> str: + return f"" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CRLDistributionPoints(ExtensionType): + oid = ExtensionOID.CRL_DISTRIBUTION_POINTS + + def __init__( + self, distribution_points: typing.Iterable[DistributionPoint] + ) -> None: + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLDistributionPoints): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __hash__(self) -> int: + return hash(tuple(self._distribution_points)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class FreshestCRL(ExtensionType): + oid = ExtensionOID.FRESHEST_CRL + + def __init__( + self, distribution_points: typing.Iterable[DistributionPoint] + ) -> None: + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, FreshestCRL): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __hash__(self) -> int: + return hash(tuple(self._distribution_points)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class DistributionPoint: + def __init__( + self, + full_name: typing.Iterable[GeneralName] | None, + relative_name: RelativeDistinguishedName | None, + reasons: frozenset[ReasonFlags] | None, + crl_issuer: typing.Iterable[GeneralName] | None, + ) -> None: + if full_name and relative_name: + raise ValueError( + "You cannot provide both full_name and relative_name, at " + "least one must be None." + ) + if not full_name and not relative_name and not crl_issuer: + raise ValueError( + "Either full_name, relative_name or crl_issuer must be " + "provided." + ) + + if full_name is not None: + full_name = list(full_name) + if not all(isinstance(x, GeneralName) for x in full_name): + raise TypeError( + "full_name must be a list of GeneralName objects" + ) + + if relative_name: + if not isinstance(relative_name, RelativeDistinguishedName): + raise TypeError( + "relative_name must be a RelativeDistinguishedName" + ) + + if crl_issuer is not None: + crl_issuer = list(crl_issuer) + if not all(isinstance(x, GeneralName) for x in crl_issuer): + raise TypeError( + "crl_issuer must be None or a list of general names" + ) + + if reasons and ( + not isinstance(reasons, frozenset) + or not all(isinstance(x, ReasonFlags) for x in reasons) + ): + raise TypeError("reasons must be None or frozenset of ReasonFlags") + + if reasons and ( + ReasonFlags.unspecified in reasons + or ReasonFlags.remove_from_crl in reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in a " + "DistributionPoint" + ) + + self._full_name = full_name + self._relative_name = relative_name + self._reasons = reasons + self._crl_issuer = crl_issuer + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name + and self.relative_name == other.relative_name + and self.reasons == other.reasons + and self.crl_issuer == other.crl_issuer + ) + + def __hash__(self) -> int: + if self.full_name is not None: + fn: tuple[GeneralName, ...] | None = tuple(self.full_name) + else: + fn = None + + if self.crl_issuer is not None: + crl_issuer: tuple[GeneralName, ...] | None = tuple(self.crl_issuer) + else: + crl_issuer = None + + return hash((fn, self.relative_name, self.reasons, crl_issuer)) + + @property + def full_name(self) -> list[GeneralName] | None: + return self._full_name + + @property + def relative_name(self) -> RelativeDistinguishedName | None: + return self._relative_name + + @property + def reasons(self) -> frozenset[ReasonFlags] | None: + return self._reasons + + @property + def crl_issuer(self) -> list[GeneralName] | None: + return self._crl_issuer + + +class ReasonFlags(utils.Enum): + unspecified = "unspecified" + key_compromise = "keyCompromise" + ca_compromise = "cACompromise" + affiliation_changed = "affiliationChanged" + superseded = "superseded" + cessation_of_operation = "cessationOfOperation" + certificate_hold = "certificateHold" + privilege_withdrawn = "privilegeWithdrawn" + aa_compromise = "aACompromise" + remove_from_crl = "removeFromCRL" + + +# These are distribution point bit string mappings. Not to be confused with +# CRLReason reason flags bit string mappings. +# ReasonFlags ::= BIT STRING { +# unused (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# privilegeWithdrawn (7), +# aACompromise (8) } +_REASON_BIT_MAPPING = { + 1: ReasonFlags.key_compromise, + 2: ReasonFlags.ca_compromise, + 3: ReasonFlags.affiliation_changed, + 4: ReasonFlags.superseded, + 5: ReasonFlags.cessation_of_operation, + 6: ReasonFlags.certificate_hold, + 7: ReasonFlags.privilege_withdrawn, + 8: ReasonFlags.aa_compromise, +} + +_CRLREASONFLAGS = { + ReasonFlags.key_compromise: 1, + ReasonFlags.ca_compromise: 2, + ReasonFlags.affiliation_changed: 3, + ReasonFlags.superseded: 4, + ReasonFlags.cessation_of_operation: 5, + ReasonFlags.certificate_hold: 6, + ReasonFlags.privilege_withdrawn: 7, + ReasonFlags.aa_compromise: 8, +} + +# CRLReason ::= ENUMERATED { +# unspecified (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# -- value 7 is not used +# removeFromCRL (8), +# privilegeWithdrawn (9), +# aACompromise (10) } +_CRL_ENTRY_REASON_ENUM_TO_CODE = { + ReasonFlags.unspecified: 0, + ReasonFlags.key_compromise: 1, + ReasonFlags.ca_compromise: 2, + ReasonFlags.affiliation_changed: 3, + ReasonFlags.superseded: 4, + ReasonFlags.cessation_of_operation: 5, + ReasonFlags.certificate_hold: 6, + ReasonFlags.remove_from_crl: 8, + ReasonFlags.privilege_withdrawn: 9, + ReasonFlags.aa_compromise: 10, +} + + +class PolicyConstraints(ExtensionType): + oid = ExtensionOID.POLICY_CONSTRAINTS + + def __init__( + self, + require_explicit_policy: int | None, + inhibit_policy_mapping: int | None, + ) -> None: + if require_explicit_policy is not None and not isinstance( + require_explicit_policy, int + ): + raise TypeError( + "require_explicit_policy must be a non-negative integer or " + "None" + ) + + if inhibit_policy_mapping is not None and not isinstance( + inhibit_policy_mapping, int + ): + raise TypeError( + "inhibit_policy_mapping must be a non-negative integer or None" + ) + + if inhibit_policy_mapping is None and require_explicit_policy is None: + raise ValueError( + "At least one of require_explicit_policy and " + "inhibit_policy_mapping must not be None" + ) + + self._require_explicit_policy = require_explicit_policy + self._inhibit_policy_mapping = inhibit_policy_mapping + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PolicyConstraints): + return NotImplemented + + return ( + self.require_explicit_policy == other.require_explicit_policy + and self.inhibit_policy_mapping == other.inhibit_policy_mapping + ) + + def __hash__(self) -> int: + return hash( + (self.require_explicit_policy, self.inhibit_policy_mapping) + ) + + @property + def require_explicit_policy(self) -> int | None: + return self._require_explicit_policy + + @property + def inhibit_policy_mapping(self) -> int | None: + return self._inhibit_policy_mapping + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CertificatePolicies(ExtensionType): + oid = ExtensionOID.CERTIFICATE_POLICIES + + def __init__(self, policies: typing.Iterable[PolicyInformation]) -> None: + policies = list(policies) + if not all(isinstance(x, PolicyInformation) for x in policies): + raise TypeError( + "Every item in the policies list must be a " + "PolicyInformation" + ) + + self._policies = policies + + __len__, __iter__, __getitem__ = _make_sequence_methods("_policies") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CertificatePolicies): + return NotImplemented + + return self._policies == other._policies + + def __hash__(self) -> int: + return hash(tuple(self._policies)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PolicyInformation: + def __init__( + self, + policy_identifier: ObjectIdentifier, + policy_qualifiers: typing.Iterable[str | UserNotice] | None, + ) -> None: + if not isinstance(policy_identifier, ObjectIdentifier): + raise TypeError("policy_identifier must be an ObjectIdentifier") + + self._policy_identifier = policy_identifier + + if policy_qualifiers is not None: + policy_qualifiers = list(policy_qualifiers) + if not all( + isinstance(x, (str, UserNotice)) for x in policy_qualifiers + ): + raise TypeError( + "policy_qualifiers must be a list of strings and/or " + "UserNotice objects or None" + ) + + self._policy_qualifiers = policy_qualifiers + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PolicyInformation): + return NotImplemented + + return ( + self.policy_identifier == other.policy_identifier + and self.policy_qualifiers == other.policy_qualifiers + ) + + def __hash__(self) -> int: + if self.policy_qualifiers is not None: + pq: tuple[str | UserNotice, ...] | None = tuple( + self.policy_qualifiers + ) + else: + pq = None + + return hash((self.policy_identifier, pq)) + + @property + def policy_identifier(self) -> ObjectIdentifier: + return self._policy_identifier + + @property + def policy_qualifiers( + self, + ) -> list[str | UserNotice] | None: + return self._policy_qualifiers + + +class UserNotice: + def __init__( + self, + notice_reference: NoticeReference | None, + explicit_text: str | None, + ) -> None: + if notice_reference and not isinstance( + notice_reference, NoticeReference + ): + raise TypeError( + "notice_reference must be None or a NoticeReference" + ) + + self._notice_reference = notice_reference + self._explicit_text = explicit_text + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UserNotice): + return NotImplemented + + return ( + self.notice_reference == other.notice_reference + and self.explicit_text == other.explicit_text + ) + + def __hash__(self) -> int: + return hash((self.notice_reference, self.explicit_text)) + + @property + def notice_reference(self) -> NoticeReference | None: + return self._notice_reference + + @property + def explicit_text(self) -> str | None: + return self._explicit_text + + +class NoticeReference: + def __init__( + self, + organization: str | None, + notice_numbers: typing.Iterable[int], + ) -> None: + self._organization = organization + notice_numbers = list(notice_numbers) + if not all(isinstance(x, int) for x in notice_numbers): + raise TypeError("notice_numbers must be a list of integers") + + self._notice_numbers = notice_numbers + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NoticeReference): + return NotImplemented + + return ( + self.organization == other.organization + and self.notice_numbers == other.notice_numbers + ) + + def __hash__(self) -> int: + return hash((self.organization, tuple(self.notice_numbers))) + + @property + def organization(self) -> str | None: + return self._organization + + @property + def notice_numbers(self) -> list[int]: + return self._notice_numbers + + +class ExtendedKeyUsage(ExtensionType): + oid = ExtensionOID.EXTENDED_KEY_USAGE + + def __init__(self, usages: typing.Iterable[ObjectIdentifier]) -> None: + usages = list(usages) + if not all(isinstance(x, ObjectIdentifier) for x in usages): + raise TypeError( + "Every item in the usages list must be an ObjectIdentifier" + ) + + self._usages = usages + + __len__, __iter__, __getitem__ = _make_sequence_methods("_usages") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ExtendedKeyUsage): + return NotImplemented + + return self._usages == other._usages + + def __hash__(self) -> int: + return hash(tuple(self._usages)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPNoCheck(ExtensionType): + oid = ExtensionOID.OCSP_NO_CHECK + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPNoCheck): + return NotImplemented + + return True + + def __hash__(self) -> int: + return hash(OCSPNoCheck) + + def __repr__(self) -> str: + return "" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrecertPoison(ExtensionType): + oid = ExtensionOID.PRECERT_POISON + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrecertPoison): + return NotImplemented + + return True + + def __hash__(self) -> int: + return hash(PrecertPoison) + + def __repr__(self) -> str: + return "" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class TLSFeature(ExtensionType): + oid = ExtensionOID.TLS_FEATURE + + def __init__(self, features: typing.Iterable[TLSFeatureType]) -> None: + features = list(features) + if ( + not all(isinstance(x, TLSFeatureType) for x in features) + or len(features) == 0 + ): + raise TypeError( + "features must be a list of elements from the TLSFeatureType " + "enum" + ) + + self._features = features + + __len__, __iter__, __getitem__ = _make_sequence_methods("_features") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TLSFeature): + return NotImplemented + + return self._features == other._features + + def __hash__(self) -> int: + return hash(tuple(self._features)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class TLSFeatureType(utils.Enum): + # status_request is defined in RFC 6066 and is used for what is commonly + # called OCSP Must-Staple when present in the TLS Feature extension in an + # X.509 certificate. + status_request = 5 + # status_request_v2 is defined in RFC 6961 and allows multiple OCSP + # responses to be provided. It is not currently in use by clients or + # servers. + status_request_v2 = 17 + + +_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType} + + +class InhibitAnyPolicy(ExtensionType): + oid = ExtensionOID.INHIBIT_ANY_POLICY + + def __init__(self, skip_certs: int) -> None: + if not isinstance(skip_certs, int): + raise TypeError("skip_certs must be an integer") + + if skip_certs < 0: + raise ValueError("skip_certs must be a non-negative integer") + + self._skip_certs = skip_certs + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, InhibitAnyPolicy): + return NotImplemented + + return self.skip_certs == other.skip_certs + + def __hash__(self) -> int: + return hash(self.skip_certs) + + @property + def skip_certs(self) -> int: + return self._skip_certs + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class KeyUsage(ExtensionType): + oid = ExtensionOID.KEY_USAGE + + def __init__( + self, + digital_signature: bool, + content_commitment: bool, + key_encipherment: bool, + data_encipherment: bool, + key_agreement: bool, + key_cert_sign: bool, + crl_sign: bool, + encipher_only: bool, + decipher_only: bool, + ) -> None: + if not key_agreement and (encipher_only or decipher_only): + raise ValueError( + "encipher_only and decipher_only can only be true when " + "key_agreement is true" + ) + + self._digital_signature = digital_signature + self._content_commitment = content_commitment + self._key_encipherment = key_encipherment + self._data_encipherment = data_encipherment + self._key_agreement = key_agreement + self._key_cert_sign = key_cert_sign + self._crl_sign = crl_sign + self._encipher_only = encipher_only + self._decipher_only = decipher_only + + @property + def digital_signature(self) -> bool: + return self._digital_signature + + @property + def content_commitment(self) -> bool: + return self._content_commitment + + @property + def key_encipherment(self) -> bool: + return self._key_encipherment + + @property + def data_encipherment(self) -> bool: + return self._data_encipherment + + @property + def key_agreement(self) -> bool: + return self._key_agreement + + @property + def key_cert_sign(self) -> bool: + return self._key_cert_sign + + @property + def crl_sign(self) -> bool: + return self._crl_sign + + @property + def encipher_only(self) -> bool: + if not self.key_agreement: + raise ValueError( + "encipher_only is undefined unless key_agreement is true" + ) + else: + return self._encipher_only + + @property + def decipher_only(self) -> bool: + if not self.key_agreement: + raise ValueError( + "decipher_only is undefined unless key_agreement is true" + ) + else: + return self._decipher_only + + def __repr__(self) -> str: + try: + encipher_only = self.encipher_only + decipher_only = self.decipher_only + except ValueError: + # Users found None confusing because even though encipher/decipher + # have no meaning unless key_agreement is true, to construct an + # instance of the class you still need to pass False. + encipher_only = False + decipher_only = False + + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, KeyUsage): + return NotImplemented + + return ( + self.digital_signature == other.digital_signature + and self.content_commitment == other.content_commitment + and self.key_encipherment == other.key_encipherment + and self.data_encipherment == other.data_encipherment + and self.key_agreement == other.key_agreement + and self.key_cert_sign == other.key_cert_sign + and self.crl_sign == other.crl_sign + and self._encipher_only == other._encipher_only + and self._decipher_only == other._decipher_only + ) + + def __hash__(self) -> int: + return hash( + ( + self.digital_signature, + self.content_commitment, + self.key_encipherment, + self.data_encipherment, + self.key_agreement, + self.key_cert_sign, + self.crl_sign, + self._encipher_only, + self._decipher_only, + ) + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class NameConstraints(ExtensionType): + oid = ExtensionOID.NAME_CONSTRAINTS + + def __init__( + self, + permitted_subtrees: typing.Iterable[GeneralName] | None, + excluded_subtrees: typing.Iterable[GeneralName] | None, + ) -> None: + if permitted_subtrees is not None: + permitted_subtrees = list(permitted_subtrees) + if not permitted_subtrees: + raise ValueError( + "permitted_subtrees must be a non-empty list or None" + ) + if not all(isinstance(x, GeneralName) for x in permitted_subtrees): + raise TypeError( + "permitted_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_tree(permitted_subtrees) + + if excluded_subtrees is not None: + excluded_subtrees = list(excluded_subtrees) + if not excluded_subtrees: + raise ValueError( + "excluded_subtrees must be a non-empty list or None" + ) + if not all(isinstance(x, GeneralName) for x in excluded_subtrees): + raise TypeError( + "excluded_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_tree(excluded_subtrees) + + if permitted_subtrees is None and excluded_subtrees is None: + raise ValueError( + "At least one of permitted_subtrees and excluded_subtrees " + "must not be None" + ) + + self._permitted_subtrees = permitted_subtrees + self._excluded_subtrees = excluded_subtrees + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NameConstraints): + return NotImplemented + + return ( + self.excluded_subtrees == other.excluded_subtrees + and self.permitted_subtrees == other.permitted_subtrees + ) + + def _validate_tree(self, tree: typing.Iterable[GeneralName]) -> None: + self._validate_ip_name(tree) + self._validate_dns_name(tree) + + def _validate_ip_name(self, tree: typing.Iterable[GeneralName]) -> None: + if any( + isinstance(name, IPAddress) + and not isinstance( + name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) + ) + for name in tree + ): + raise TypeError( + "IPAddress name constraints must be an IPv4Network or" + " IPv6Network object" + ) + + def _validate_dns_name(self, tree: typing.Iterable[GeneralName]) -> None: + if any( + isinstance(name, DNSName) and "*" in name.value for name in tree + ): + raise ValueError( + "DNSName name constraints must not contain the '*' wildcard" + " character" + ) + + def __repr__(self) -> str: + return ( + f"" + ) + + def __hash__(self) -> int: + if self.permitted_subtrees is not None: + ps: tuple[GeneralName, ...] | None = tuple(self.permitted_subtrees) + else: + ps = None + + if self.excluded_subtrees is not None: + es: tuple[GeneralName, ...] | None = tuple(self.excluded_subtrees) + else: + es = None + + return hash((ps, es)) + + @property + def permitted_subtrees( + self, + ) -> list[GeneralName] | None: + return self._permitted_subtrees + + @property + def excluded_subtrees( + self, + ) -> list[GeneralName] | None: + return self._excluded_subtrees + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class Extension(typing.Generic[ExtensionTypeVar]): + def __init__( + self, oid: ObjectIdentifier, critical: bool, value: ExtensionTypeVar + ) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(critical, bool): + raise TypeError("critical must be a boolean value") + + self._oid = oid + self._critical = critical + self._value = value + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def critical(self) -> bool: + return self._critical + + @property + def value(self) -> ExtensionTypeVar: + return self._value + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Extension): + return NotImplemented + + return ( + self.oid == other.oid + and self.critical == other.critical + and self.value == other.value + ) + + def __hash__(self) -> int: + return hash((self.oid, self.critical, self.value)) + + +class GeneralNames: + def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: + general_names = list(general_names) + if not all(isinstance(x, GeneralName) for x in general_names): + raise TypeError( + "Every item in the general_names list must be an " + "object conforming to the GeneralName interface" + ) + + self._general_names = general_names + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + # Return the value of each GeneralName, except for OtherName instances + # which we return directly because it has two important properties not + # just one value. + objs = (i for i in self if isinstance(i, type)) + if type != OtherName: + return [i.value for i in objs] + return list(objs) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, GeneralNames): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(tuple(self._general_names)) + + +class SubjectAlternativeName(ExtensionType): + oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME + + def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class IssuerAlternativeName(ExtensionType): + oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME + + def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IssuerAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CertificateIssuer(ExtensionType): + oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER + + def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CertificateIssuer): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CRLReason(ExtensionType): + oid = CRLEntryExtensionOID.CRL_REASON + + def __init__(self, reason: ReasonFlags) -> None: + if not isinstance(reason, ReasonFlags): + raise TypeError("reason must be an element from ReasonFlags") + + self._reason = reason + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLReason): + return NotImplemented + + return self.reason == other.reason + + def __hash__(self) -> int: + return hash(self.reason) + + @property + def reason(self) -> ReasonFlags: + return self._reason + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class InvalidityDate(ExtensionType): + oid = CRLEntryExtensionOID.INVALIDITY_DATE + + def __init__(self, invalidity_date: datetime.datetime) -> None: + if not isinstance(invalidity_date, datetime.datetime): + raise TypeError("invalidity_date must be a datetime.datetime") + + self._invalidity_date = invalidity_date + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, InvalidityDate): + return NotImplemented + + return self.invalidity_date == other.invalidity_date + + def __hash__(self) -> int: + return hash(self.invalidity_date) + + @property + def invalidity_date(self) -> datetime.datetime: + return self._invalidity_date + + @property + def invalidity_date_utc(self) -> datetime.datetime: + if self._invalidity_date.tzinfo is None: + return self._invalidity_date.replace(tzinfo=datetime.timezone.utc) + else: + return self._invalidity_date.astimezone(tz=datetime.timezone.utc) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrecertificateSignedCertificateTimestamps(ExtensionType): + oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS + + def __init__( + self, + signed_certificate_timestamps: typing.Iterable[ + SignedCertificateTimestamp + ], + ) -> None: + signed_certificate_timestamps = list(signed_certificate_timestamps) + if not all( + isinstance(sct, SignedCertificateTimestamp) + for sct in signed_certificate_timestamps + ): + raise TypeError( + "Every item in the signed_certificate_timestamps list must be " + "a SignedCertificateTimestamp" + ) + self._signed_certificate_timestamps = signed_certificate_timestamps + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash(tuple(self._signed_certificate_timestamps)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrecertificateSignedCertificateTimestamps): + return NotImplemented + + return ( + self._signed_certificate_timestamps + == other._signed_certificate_timestamps + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SignedCertificateTimestamps(ExtensionType): + oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS + + def __init__( + self, + signed_certificate_timestamps: typing.Iterable[ + SignedCertificateTimestamp + ], + ) -> None: + signed_certificate_timestamps = list(signed_certificate_timestamps) + if not all( + isinstance(sct, SignedCertificateTimestamp) + for sct in signed_certificate_timestamps + ): + raise TypeError( + "Every item in the signed_certificate_timestamps list must be " + "a SignedCertificateTimestamp" + ) + self._signed_certificate_timestamps = signed_certificate_timestamps + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash(tuple(self._signed_certificate_timestamps)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SignedCertificateTimestamps): + return NotImplemented + + return ( + self._signed_certificate_timestamps + == other._signed_certificate_timestamps + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPNonce(ExtensionType): + oid = OCSPExtensionOID.NONCE + + def __init__(self, nonce: bytes) -> None: + if not isinstance(nonce, bytes): + raise TypeError("nonce must be bytes") + + self._nonce = nonce + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPNonce): + return NotImplemented + + return self.nonce == other.nonce + + def __hash__(self) -> int: + return hash(self.nonce) + + def __repr__(self) -> str: + return f"" + + @property + def nonce(self) -> bytes: + return self._nonce + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPAcceptableResponses(ExtensionType): + oid = OCSPExtensionOID.ACCEPTABLE_RESPONSES + + def __init__(self, responses: typing.Iterable[ObjectIdentifier]) -> None: + responses = list(responses) + if any(not isinstance(r, ObjectIdentifier) for r in responses): + raise TypeError("All responses must be ObjectIdentifiers") + + self._responses = responses + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPAcceptableResponses): + return NotImplemented + + return self._responses == other._responses + + def __hash__(self) -> int: + return hash(tuple(self._responses)) + + def __repr__(self) -> str: + return f"" + + def __iter__(self) -> typing.Iterator[ObjectIdentifier]: + return iter(self._responses) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class IssuingDistributionPoint(ExtensionType): + oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT + + def __init__( + self, + full_name: typing.Iterable[GeneralName] | None, + relative_name: RelativeDistinguishedName | None, + only_contains_user_certs: bool, + only_contains_ca_certs: bool, + only_some_reasons: frozenset[ReasonFlags] | None, + indirect_crl: bool, + only_contains_attribute_certs: bool, + ) -> None: + if full_name is not None: + full_name = list(full_name) + + if only_some_reasons and ( + not isinstance(only_some_reasons, frozenset) + or not all(isinstance(x, ReasonFlags) for x in only_some_reasons) + ): + raise TypeError( + "only_some_reasons must be None or frozenset of ReasonFlags" + ) + + if only_some_reasons and ( + ReasonFlags.unspecified in only_some_reasons + or ReasonFlags.remove_from_crl in only_some_reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in an " + "IssuingDistributionPoint" + ) + + if not ( + isinstance(only_contains_user_certs, bool) + and isinstance(only_contains_ca_certs, bool) + and isinstance(indirect_crl, bool) + and isinstance(only_contains_attribute_certs, bool) + ): + raise TypeError( + "only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl and only_contains_attribute_certs " + "must all be boolean." + ) + + crl_constraints = [ + only_contains_user_certs, + only_contains_ca_certs, + indirect_crl, + only_contains_attribute_certs, + ] + + if len([x for x in crl_constraints if x]) > 1: + raise ValueError( + "Only one of the following can be set to True: " + "only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl, only_contains_attribute_certs" + ) + + if not any( + [ + only_contains_user_certs, + only_contains_ca_certs, + indirect_crl, + only_contains_attribute_certs, + full_name, + relative_name, + only_some_reasons, + ] + ): + raise ValueError( + "Cannot create empty extension: " + "if only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl, and only_contains_attribute_certs are all False" + ", then either full_name, relative_name, or only_some_reasons " + "must have a value." + ) + + self._only_contains_user_certs = only_contains_user_certs + self._only_contains_ca_certs = only_contains_ca_certs + self._indirect_crl = indirect_crl + self._only_contains_attribute_certs = only_contains_attribute_certs + self._only_some_reasons = only_some_reasons + self._full_name = full_name + self._relative_name = relative_name + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IssuingDistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name + and self.relative_name == other.relative_name + and self.only_contains_user_certs == other.only_contains_user_certs + and self.only_contains_ca_certs == other.only_contains_ca_certs + and self.only_some_reasons == other.only_some_reasons + and self.indirect_crl == other.indirect_crl + and self.only_contains_attribute_certs + == other.only_contains_attribute_certs + ) + + def __hash__(self) -> int: + return hash( + ( + self.full_name, + self.relative_name, + self.only_contains_user_certs, + self.only_contains_ca_certs, + self.only_some_reasons, + self.indirect_crl, + self.only_contains_attribute_certs, + ) + ) + + @property + def full_name(self) -> list[GeneralName] | None: + return self._full_name + + @property + def relative_name(self) -> RelativeDistinguishedName | None: + return self._relative_name + + @property + def only_contains_user_certs(self) -> bool: + return self._only_contains_user_certs + + @property + def only_contains_ca_certs(self) -> bool: + return self._only_contains_ca_certs + + @property + def only_some_reasons( + self, + ) -> frozenset[ReasonFlags] | None: + return self._only_some_reasons + + @property + def indirect_crl(self) -> bool: + return self._indirect_crl + + @property + def only_contains_attribute_certs(self) -> bool: + return self._only_contains_attribute_certs + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class MSCertificateTemplate(ExtensionType): + oid = ExtensionOID.MS_CERTIFICATE_TEMPLATE + + def __init__( + self, + template_id: ObjectIdentifier, + major_version: int | None, + minor_version: int | None, + ) -> None: + if not isinstance(template_id, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._template_id = template_id + if ( + major_version is not None and not isinstance(major_version, int) + ) or ( + minor_version is not None and not isinstance(minor_version, int) + ): + raise TypeError( + "major_version and minor_version must be integers or None" + ) + self._major_version = major_version + self._minor_version = minor_version + + @property + def template_id(self) -> ObjectIdentifier: + return self._template_id + + @property + def major_version(self) -> int | None: + return self._major_version + + @property + def minor_version(self) -> int | None: + return self._minor_version + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, MSCertificateTemplate): + return NotImplemented + + return ( + self.template_id == other.template_id + and self.major_version == other.major_version + and self.minor_version == other.minor_version + ) + + def __hash__(self) -> int: + return hash((self.template_id, self.major_version, self.minor_version)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class UnrecognizedExtension(ExtensionType): + def __init__(self, oid: ObjectIdentifier, value: bytes) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._oid = oid + self._value = value + + @property + def oid(self) -> ObjectIdentifier: # type: ignore[override] + return self._oid + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UnrecognizedExtension): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __hash__(self) -> int: + return hash((self.oid, self.value)) + + def public_bytes(self) -> bytes: + return self.value diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/general_name.py b/myenv/lib/python3.12/site-packages/cryptography/x509/general_name.py new file mode 100644 index 0000000..672f287 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/x509/general_name.py @@ -0,0 +1,281 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import ipaddress +import typing +from email.utils import parseaddr + +from cryptography.x509.name import Name +from cryptography.x509.oid import ObjectIdentifier + +_IPAddressTypes = typing.Union[ + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network, +] + + +class UnsupportedGeneralNameType(Exception): + pass + + +class GeneralName(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def value(self) -> typing.Any: + """ + Return the value of the object + """ + + +class RFC822Name(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "RFC822Name values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + name, address = parseaddr(value) + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> RFC822Name: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RFC822Name): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class DNSName(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "DNSName values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> DNSName: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DNSName): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class UniformResourceIdentifier(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "URI values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> UniformResourceIdentifier: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UniformResourceIdentifier): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class DirectoryName(GeneralName): + def __init__(self, value: Name) -> None: + if not isinstance(value, Name): + raise TypeError("value must be a Name") + + self._value = value + + @property + def value(self) -> Name: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DirectoryName): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class RegisteredID(GeneralName): + def __init__(self, value: ObjectIdentifier) -> None: + if not isinstance(value, ObjectIdentifier): + raise TypeError("value must be an ObjectIdentifier") + + self._value = value + + @property + def value(self) -> ObjectIdentifier: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RegisteredID): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class IPAddress(GeneralName): + def __init__(self, value: _IPAddressTypes) -> None: + if not isinstance( + value, + ( + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network, + ), + ): + raise TypeError( + "value must be an instance of ipaddress.IPv4Address, " + "ipaddress.IPv6Address, ipaddress.IPv4Network, or " + "ipaddress.IPv6Network" + ) + + self._value = value + + @property + def value(self) -> _IPAddressTypes: + return self._value + + def _packed(self) -> bytes: + if isinstance( + self.value, (ipaddress.IPv4Address, ipaddress.IPv6Address) + ): + return self.value.packed + else: + return ( + self.value.network_address.packed + self.value.netmask.packed + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IPAddress): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class OtherName(GeneralName): + def __init__(self, type_id: ObjectIdentifier, value: bytes) -> None: + if not isinstance(type_id, ObjectIdentifier): + raise TypeError("type_id must be an ObjectIdentifier") + if not isinstance(value, bytes): + raise TypeError("value must be a binary string") + + self._type_id = type_id + self._value = value + + @property + def type_id(self) -> ObjectIdentifier: + return self._type_id + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OtherName): + return NotImplemented + + return self.type_id == other.type_id and self.value == other.value + + def __hash__(self) -> int: + return hash((self.type_id, self.value)) diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/name.py b/myenv/lib/python3.12/site-packages/cryptography/x509/name.py new file mode 100644 index 0000000..1b6b89d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/x509/name.py @@ -0,0 +1,465 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import binascii +import re +import sys +import typing +import warnings + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.x509.oid import NameOID, ObjectIdentifier + + +class _ASN1Type(utils.Enum): + BitString = 3 + OctetString = 4 + UTF8String = 12 + NumericString = 18 + PrintableString = 19 + T61String = 20 + IA5String = 22 + UTCTime = 23 + GeneralizedTime = 24 + VisibleString = 26 + UniversalString = 28 + BMPString = 30 + + +_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type} +_NAMEOID_DEFAULT_TYPE: dict[ObjectIdentifier, _ASN1Type] = { + NameOID.COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString, + NameOID.DN_QUALIFIER: _ASN1Type.PrintableString, + NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String, + NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String, +} + +# Type alias +_OidNameMap = typing.Mapping[ObjectIdentifier, str] +_NameOidMap = typing.Mapping[str, ObjectIdentifier] + +#: Short attribute names from RFC 4514: +#: https://tools.ietf.org/html/rfc4514#page-7 +_NAMEOID_TO_NAME: _OidNameMap = { + NameOID.COMMON_NAME: "CN", + NameOID.LOCALITY_NAME: "L", + NameOID.STATE_OR_PROVINCE_NAME: "ST", + NameOID.ORGANIZATION_NAME: "O", + NameOID.ORGANIZATIONAL_UNIT_NAME: "OU", + NameOID.COUNTRY_NAME: "C", + NameOID.STREET_ADDRESS: "STREET", + NameOID.DOMAIN_COMPONENT: "DC", + NameOID.USER_ID: "UID", +} +_NAME_TO_NAMEOID = {v: k for k, v in _NAMEOID_TO_NAME.items()} + +_NAMEOID_LENGTH_LIMIT = { + NameOID.COUNTRY_NAME: (2, 2), + NameOID.JURISDICTION_COUNTRY_NAME: (2, 2), + NameOID.COMMON_NAME: (1, 64), +} + + +def _escape_dn_value(val: str | bytes) -> str: + """Escape special characters in RFC4514 Distinguished Name value.""" + + if not val: + return "" + + # RFC 4514 Section 2.4 defines the value as being the # (U+0023) character + # followed by the hexadecimal encoding of the octets. + if isinstance(val, bytes): + return "#" + binascii.hexlify(val).decode("utf8") + + # See https://tools.ietf.org/html/rfc4514#section-2.4 + val = val.replace("\\", "\\\\") + val = val.replace('"', '\\"') + val = val.replace("+", "\\+") + val = val.replace(",", "\\,") + val = val.replace(";", "\\;") + val = val.replace("<", "\\<") + val = val.replace(">", "\\>") + val = val.replace("\0", "\\00") + + if val[0] in ("#", " "): + val = "\\" + val + if val[-1] == " ": + val = val[:-1] + "\\ " + + return val + + +def _unescape_dn_value(val: str) -> str: + if not val: + return "" + + # See https://tools.ietf.org/html/rfc4514#section-3 + + # special = escaped / SPACE / SHARP / EQUALS + # escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE + def sub(m): + val = m.group(1) + # Regular escape + if len(val) == 1: + return val + # Hex-value scape + return chr(int(val, 16)) + + return _RFC4514NameParser._PAIR_RE.sub(sub, val) + + +class NameAttribute: + def __init__( + self, + oid: ObjectIdentifier, + value: str | bytes, + _type: _ASN1Type | None = None, + *, + _validate: bool = True, + ) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + if _type == _ASN1Type.BitString: + if oid != NameOID.X500_UNIQUE_IDENTIFIER: + raise TypeError( + "oid must be X500_UNIQUE_IDENTIFIER for BitString type." + ) + if not isinstance(value, bytes): + raise TypeError("value must be bytes for BitString") + else: + if not isinstance(value, str): + raise TypeError("value argument must be a str") + + length_limits = _NAMEOID_LENGTH_LIMIT.get(oid) + if length_limits is not None: + min_length, max_length = length_limits + assert isinstance(value, str) + c_len = len(value.encode("utf8")) + if c_len < min_length or c_len > max_length: + msg = ( + f"Attribute's length must be >= {min_length} and " + f"<= {max_length}, but it was {c_len}" + ) + if _validate is True: + raise ValueError(msg) + else: + warnings.warn(msg, stacklevel=2) + + # The appropriate ASN1 string type varies by OID and is defined across + # multiple RFCs including 2459, 3280, and 5280. In general UTF8String + # is preferred (2459), but 3280 and 5280 specify several OIDs with + # alternate types. This means when we see the sentinel value we need + # to look up whether the OID has a non-UTF8 type. If it does, set it + # to that. Otherwise, UTF8! + if _type is None: + _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String) + + if not isinstance(_type, _ASN1Type): + raise TypeError("_type must be from the _ASN1Type enum") + + self._oid = oid + self._value = value + self._type = _type + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def value(self) -> str | bytes: + return self._value + + @property + def rfc4514_attribute_name(self) -> str: + """ + The short attribute name (for example "CN") if available, + otherwise the OID dotted string. + """ + return _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string) + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + + Use short attribute name if available, otherwise fall back to OID + dotted string. + """ + attr_name = ( + attr_name_overrides.get(self.oid) if attr_name_overrides else None + ) + if attr_name is None: + attr_name = self.rfc4514_attribute_name + + return f"{attr_name}={_escape_dn_value(self.value)}" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NameAttribute): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __hash__(self) -> int: + return hash((self.oid, self.value)) + + def __repr__(self) -> str: + return f"" + + +class RelativeDistinguishedName: + def __init__(self, attributes: typing.Iterable[NameAttribute]): + attributes = list(attributes) + if not attributes: + raise ValueError("a relative distinguished name cannot be empty") + if not all(isinstance(x, NameAttribute) for x in attributes): + raise TypeError("attributes must be an iterable of NameAttribute") + + # Keep list and frozenset to preserve attribute order where it matters + self._attributes = attributes + self._attribute_set = frozenset(attributes) + + if len(self._attribute_set) != len(attributes): + raise ValueError("duplicate attributes are not allowed") + + def get_attributes_for_oid( + self, oid: ObjectIdentifier + ) -> list[NameAttribute]: + return [i for i in self if i.oid == oid] + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + + Within each RDN, attributes are joined by '+', although that is rarely + used in certificates. + """ + return "+".join( + attr.rfc4514_string(attr_name_overrides) + for attr in self._attributes + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RelativeDistinguishedName): + return NotImplemented + + return self._attribute_set == other._attribute_set + + def __hash__(self) -> int: + return hash(self._attribute_set) + + def __iter__(self) -> typing.Iterator[NameAttribute]: + return iter(self._attributes) + + def __len__(self) -> int: + return len(self._attributes) + + def __repr__(self) -> str: + return f"" + + +class Name: + @typing.overload + def __init__(self, attributes: typing.Iterable[NameAttribute]) -> None: ... + + @typing.overload + def __init__( + self, attributes: typing.Iterable[RelativeDistinguishedName] + ) -> None: ... + + def __init__( + self, + attributes: typing.Iterable[NameAttribute | RelativeDistinguishedName], + ) -> None: + attributes = list(attributes) + if all(isinstance(x, NameAttribute) for x in attributes): + self._attributes = [ + RelativeDistinguishedName([typing.cast(NameAttribute, x)]) + for x in attributes + ] + elif all(isinstance(x, RelativeDistinguishedName) for x in attributes): + self._attributes = typing.cast( + typing.List[RelativeDistinguishedName], attributes + ) + else: + raise TypeError( + "attributes must be a list of NameAttribute" + " or a list RelativeDistinguishedName" + ) + + @classmethod + def from_rfc4514_string( + cls, + data: str, + attr_name_overrides: _NameOidMap | None = None, + ) -> Name: + return _RFC4514NameParser(data, attr_name_overrides or {}).parse() + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + For example 'CN=foobar.com,O=Foo Corp,C=US' + + An X.509 name is a two-level structure: a list of sets of attributes. + Each list element is separated by ',' and within each list element, set + elements are separated by '+'. The latter is almost never used in + real world certificates. According to RFC4514 section 2.1 the + RDNSequence must be reversed when converting to string representation. + """ + return ",".join( + attr.rfc4514_string(attr_name_overrides) + for attr in reversed(self._attributes) + ) + + def get_attributes_for_oid( + self, oid: ObjectIdentifier + ) -> list[NameAttribute]: + return [i for i in self if i.oid == oid] + + @property + def rdns(self) -> list[RelativeDistinguishedName]: + return self._attributes + + def public_bytes(self, backend: typing.Any = None) -> bytes: + return rust_x509.encode_name_bytes(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Name): + return NotImplemented + + return self._attributes == other._attributes + + def __hash__(self) -> int: + # TODO: this is relatively expensive, if this looks like a bottleneck + # for you, consider optimizing! + return hash(tuple(self._attributes)) + + def __iter__(self) -> typing.Iterator[NameAttribute]: + for rdn in self._attributes: + yield from rdn + + def __len__(self) -> int: + return sum(len(rdn) for rdn in self._attributes) + + def __repr__(self) -> str: + rdns = ",".join(attr.rfc4514_string() for attr in self._attributes) + return f"" + + +class _RFC4514NameParser: + _OID_RE = re.compile(r"(0|([1-9]\d*))(\.(0|([1-9]\d*)))+") + _DESCR_RE = re.compile(r"[a-zA-Z][a-zA-Z\d-]*") + + _PAIR = r"\\([\\ #=\"\+,;<>]|[\da-zA-Z]{2})" + _PAIR_RE = re.compile(_PAIR) + _LUTF1 = r"[\x01-\x1f\x21\x24-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _SUTF1 = r"[\x01-\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _TUTF1 = r"[\x01-\x1F\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _UTFMB = rf"[\x80-{chr(sys.maxunicode)}]" + _LEADCHAR = rf"{_LUTF1}|{_UTFMB}" + _STRINGCHAR = rf"{_SUTF1}|{_UTFMB}" + _TRAILCHAR = rf"{_TUTF1}|{_UTFMB}" + _STRING_RE = re.compile( + rf""" + ( + ({_LEADCHAR}|{_PAIR}) + ( + ({_STRINGCHAR}|{_PAIR})* + ({_TRAILCHAR}|{_PAIR}) + )? + )? + """, + re.VERBOSE, + ) + _HEXSTRING_RE = re.compile(r"#([\da-zA-Z]{2})+") + + def __init__(self, data: str, attr_name_overrides: _NameOidMap) -> None: + self._data = data + self._idx = 0 + + self._attr_name_overrides = attr_name_overrides + + def _has_data(self) -> bool: + return self._idx < len(self._data) + + def _peek(self) -> str | None: + if self._has_data(): + return self._data[self._idx] + return None + + def _read_char(self, ch: str) -> None: + if self._peek() != ch: + raise ValueError + self._idx += 1 + + def _read_re(self, pat) -> str: + match = pat.match(self._data, pos=self._idx) + if match is None: + raise ValueError + val = match.group() + self._idx += len(val) + return val + + def parse(self) -> Name: + """ + Parses the `data` string and converts it to a Name. + + According to RFC4514 section 2.1 the RDNSequence must be + reversed when converting to string representation. So, when + we parse it, we need to reverse again to get the RDNs on the + correct order. + """ + + if not self._has_data(): + return Name([]) + + rdns = [self._parse_rdn()] + + while self._has_data(): + self._read_char(",") + rdns.append(self._parse_rdn()) + + return Name(reversed(rdns)) + + def _parse_rdn(self) -> RelativeDistinguishedName: + nas = [self._parse_na()] + while self._peek() == "+": + self._read_char("+") + nas.append(self._parse_na()) + + return RelativeDistinguishedName(nas) + + def _parse_na(self) -> NameAttribute: + try: + oid_value = self._read_re(self._OID_RE) + except ValueError: + name = self._read_re(self._DESCR_RE) + oid = self._attr_name_overrides.get( + name, _NAME_TO_NAMEOID.get(name) + ) + if oid is None: + raise ValueError + else: + oid = ObjectIdentifier(oid_value) + + self._read_char("=") + if self._peek() == "#": + value = self._read_re(self._HEXSTRING_RE) + value = binascii.unhexlify(value[1:]).decode() + else: + raw_value = self._read_re(self._STRING_RE) + value = _unescape_dn_value(raw_value) + + return NameAttribute(oid, value) diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/ocsp.py b/myenv/lib/python3.12/site-packages/cryptography/x509/ocsp.py new file mode 100644 index 0000000..dbb475d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/x509/ocsp.py @@ -0,0 +1,678 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import datetime +import typing + +from cryptography import utils, x509 +from cryptography.hazmat.bindings._rust import ocsp +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPrivateKeyTypes, +) +from cryptography.x509.base import ( + _EARLIEST_UTC_TIME, + _convert_to_naive_utc_time, + _reject_duplicate_extension, +) + + +class OCSPResponderEncoding(utils.Enum): + HASH = "By Hash" + NAME = "By Name" + + +class OCSPResponseStatus(utils.Enum): + SUCCESSFUL = 0 + MALFORMED_REQUEST = 1 + INTERNAL_ERROR = 2 + TRY_LATER = 3 + SIG_REQUIRED = 5 + UNAUTHORIZED = 6 + + +_ALLOWED_HASHES = ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, +) + + +def _verify_algorithm(algorithm: hashes.HashAlgorithm) -> None: + if not isinstance(algorithm, _ALLOWED_HASHES): + raise ValueError( + "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512" + ) + + +class OCSPCertStatus(utils.Enum): + GOOD = 0 + REVOKED = 1 + UNKNOWN = 2 + + +class _SingleResponse: + def __init__( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: datetime.datetime | None, + revocation_time: datetime.datetime | None, + revocation_reason: x509.ReasonFlags | None, + ): + if not isinstance(cert, x509.Certificate) or not isinstance( + issuer, x509.Certificate + ): + raise TypeError("cert and issuer must be a Certificate") + + _verify_algorithm(algorithm) + if not isinstance(this_update, datetime.datetime): + raise TypeError("this_update must be a datetime object") + if next_update is not None and not isinstance( + next_update, datetime.datetime + ): + raise TypeError("next_update must be a datetime object or None") + + self._cert = cert + self._issuer = issuer + self._algorithm = algorithm + self._this_update = this_update + self._next_update = next_update + + if not isinstance(cert_status, OCSPCertStatus): + raise TypeError( + "cert_status must be an item from the OCSPCertStatus enum" + ) + if cert_status is not OCSPCertStatus.REVOKED: + if revocation_time is not None: + raise ValueError( + "revocation_time can only be provided if the certificate " + "is revoked" + ) + if revocation_reason is not None: + raise ValueError( + "revocation_reason can only be provided if the certificate" + " is revoked" + ) + else: + if not isinstance(revocation_time, datetime.datetime): + raise TypeError("revocation_time must be a datetime object") + + revocation_time = _convert_to_naive_utc_time(revocation_time) + if revocation_time < _EARLIEST_UTC_TIME: + raise ValueError( + "The revocation_time must be on or after" + " 1950 January 1." + ) + + if revocation_reason is not None and not isinstance( + revocation_reason, x509.ReasonFlags + ): + raise TypeError( + "revocation_reason must be an item from the ReasonFlags " + "enum or None" + ) + + self._cert_status = cert_status + self._revocation_time = revocation_time + self._revocation_reason = revocation_reason + + +class OCSPRequest(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def issuer_key_hash(self) -> bytes: + """ + The hash of the issuer public key + """ + + @property + @abc.abstractmethod + def issuer_name_hash(self) -> bytes: + """ + The hash of the issuer name + """ + + @property + @abc.abstractmethod + def hash_algorithm(self) -> hashes.HashAlgorithm: + """ + The hash algorithm used in the issuer name and key hashes + """ + + @property + @abc.abstractmethod + def serial_number(self) -> int: + """ + The serial number of the cert whose status is being checked + """ + + @abc.abstractmethod + def public_bytes(self, encoding: serialization.Encoding) -> bytes: + """ + Serializes the request to DER + """ + + @property + @abc.abstractmethod + def extensions(self) -> x509.Extensions: + """ + The list of request extensions. Not single request extensions. + """ + + +class OCSPSingleResponse(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def certificate_status(self) -> OCSPCertStatus: + """ + The status of the certificate (an element from the OCSPCertStatus enum) + """ + + @property + @abc.abstractmethod + def revocation_time(self) -> datetime.datetime | None: + """ + The date of when the certificate was revoked or None if not + revoked. + """ + + @property + @abc.abstractmethod + def revocation_time_utc(self) -> datetime.datetime | None: + """ + The date of when the certificate was revoked or None if not + revoked. Represented as a non-naive UTC datetime. + """ + + @property + @abc.abstractmethod + def revocation_reason(self) -> x509.ReasonFlags | None: + """ + The reason the certificate was revoked or None if not specified or + not revoked. + """ + + @property + @abc.abstractmethod + def this_update(self) -> datetime.datetime: + """ + The most recent time at which the status being indicated is known by + the responder to have been correct + """ + + @property + @abc.abstractmethod + def this_update_utc(self) -> datetime.datetime: + """ + The most recent time at which the status being indicated is known by + the responder to have been correct. Represented as a non-naive UTC + datetime. + """ + + @property + @abc.abstractmethod + def next_update(self) -> datetime.datetime | None: + """ + The time when newer information will be available + """ + + @property + @abc.abstractmethod + def next_update_utc(self) -> datetime.datetime | None: + """ + The time when newer information will be available. Represented as a + non-naive UTC datetime. + """ + + @property + @abc.abstractmethod + def issuer_key_hash(self) -> bytes: + """ + The hash of the issuer public key + """ + + @property + @abc.abstractmethod + def issuer_name_hash(self) -> bytes: + """ + The hash of the issuer name + """ + + @property + @abc.abstractmethod + def hash_algorithm(self) -> hashes.HashAlgorithm: + """ + The hash algorithm used in the issuer name and key hashes + """ + + @property + @abc.abstractmethod + def serial_number(self) -> int: + """ + The serial number of the cert whose status is being checked + """ + + +class OCSPResponse(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def responses(self) -> typing.Iterator[OCSPSingleResponse]: + """ + An iterator over the individual SINGLERESP structures in the + response + """ + + @property + @abc.abstractmethod + def response_status(self) -> OCSPResponseStatus: + """ + The status of the response. This is a value from the OCSPResponseStatus + enumeration + """ + + @property + @abc.abstractmethod + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: + """ + The ObjectIdentifier of the signature algorithm + """ + + @property + @abc.abstractmethod + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + """ + + @property + @abc.abstractmethod + def signature(self) -> bytes: + """ + The signature bytes + """ + + @property + @abc.abstractmethod + def tbs_response_bytes(self) -> bytes: + """ + The tbsResponseData bytes + """ + + @property + @abc.abstractmethod + def certificates(self) -> list[x509.Certificate]: + """ + A list of certificates used to help build a chain to verify the OCSP + response. This situation occurs when the OCSP responder uses a delegate + certificate. + """ + + @property + @abc.abstractmethod + def responder_key_hash(self) -> bytes | None: + """ + The responder's key hash or None + """ + + @property + @abc.abstractmethod + def responder_name(self) -> x509.Name | None: + """ + The responder's Name or None + """ + + @property + @abc.abstractmethod + def produced_at(self) -> datetime.datetime: + """ + The time the response was produced + """ + + @property + @abc.abstractmethod + def produced_at_utc(self) -> datetime.datetime: + """ + The time the response was produced. Represented as a non-naive UTC + datetime. + """ + + @property + @abc.abstractmethod + def certificate_status(self) -> OCSPCertStatus: + """ + The status of the certificate (an element from the OCSPCertStatus enum) + """ + + @property + @abc.abstractmethod + def revocation_time(self) -> datetime.datetime | None: + """ + The date of when the certificate was revoked or None if not + revoked. + """ + + @property + @abc.abstractmethod + def revocation_time_utc(self) -> datetime.datetime | None: + """ + The date of when the certificate was revoked or None if not + revoked. Represented as a non-naive UTC datetime. + """ + + @property + @abc.abstractmethod + def revocation_reason(self) -> x509.ReasonFlags | None: + """ + The reason the certificate was revoked or None if not specified or + not revoked. + """ + + @property + @abc.abstractmethod + def this_update(self) -> datetime.datetime: + """ + The most recent time at which the status being indicated is known by + the responder to have been correct + """ + + @property + @abc.abstractmethod + def this_update_utc(self) -> datetime.datetime: + """ + The most recent time at which the status being indicated is known by + the responder to have been correct. Represented as a non-naive UTC + datetime. + """ + + @property + @abc.abstractmethod + def next_update(self) -> datetime.datetime | None: + """ + The time when newer information will be available + """ + + @property + @abc.abstractmethod + def next_update_utc(self) -> datetime.datetime | None: + """ + The time when newer information will be available. Represented as a + non-naive UTC datetime. + """ + + @property + @abc.abstractmethod + def issuer_key_hash(self) -> bytes: + """ + The hash of the issuer public key + """ + + @property + @abc.abstractmethod + def issuer_name_hash(self) -> bytes: + """ + The hash of the issuer name + """ + + @property + @abc.abstractmethod + def hash_algorithm(self) -> hashes.HashAlgorithm: + """ + The hash algorithm used in the issuer name and key hashes + """ + + @property + @abc.abstractmethod + def serial_number(self) -> int: + """ + The serial number of the cert whose status is being checked + """ + + @property + @abc.abstractmethod + def extensions(self) -> x509.Extensions: + """ + The list of response extensions. Not single response extensions. + """ + + @property + @abc.abstractmethod + def single_extensions(self) -> x509.Extensions: + """ + The list of single response extensions. Not response extensions. + """ + + @abc.abstractmethod + def public_bytes(self, encoding: serialization.Encoding) -> bytes: + """ + Serializes the response to DER + """ + + +OCSPRequest.register(ocsp.OCSPRequest) +OCSPResponse.register(ocsp.OCSPResponse) +OCSPSingleResponse.register(ocsp.OCSPSingleResponse) + + +class OCSPRequestBuilder: + def __init__( + self, + request: tuple[ + x509.Certificate, x509.Certificate, hashes.HashAlgorithm + ] + | None = None, + request_hash: tuple[bytes, bytes, int, hashes.HashAlgorithm] + | None = None, + extensions: list[x509.Extension[x509.ExtensionType]] = [], + ) -> None: + self._request = request + self._request_hash = request_hash + self._extensions = extensions + + def add_certificate( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + ) -> OCSPRequestBuilder: + if self._request is not None or self._request_hash is not None: + raise ValueError("Only one certificate can be added to a request") + + _verify_algorithm(algorithm) + if not isinstance(cert, x509.Certificate) or not isinstance( + issuer, x509.Certificate + ): + raise TypeError("cert and issuer must be a Certificate") + + return OCSPRequestBuilder( + (cert, issuer, algorithm), self._request_hash, self._extensions + ) + + def add_certificate_by_hash( + self, + issuer_name_hash: bytes, + issuer_key_hash: bytes, + serial_number: int, + algorithm: hashes.HashAlgorithm, + ) -> OCSPRequestBuilder: + if self._request is not None or self._request_hash is not None: + raise ValueError("Only one certificate can be added to a request") + + if not isinstance(serial_number, int): + raise TypeError("serial_number must be an integer") + + _verify_algorithm(algorithm) + utils._check_bytes("issuer_name_hash", issuer_name_hash) + utils._check_bytes("issuer_key_hash", issuer_key_hash) + if algorithm.digest_size != len( + issuer_name_hash + ) or algorithm.digest_size != len(issuer_key_hash): + raise ValueError( + "issuer_name_hash and issuer_key_hash must be the same length " + "as the digest size of the algorithm" + ) + + return OCSPRequestBuilder( + self._request, + (issuer_name_hash, issuer_key_hash, serial_number, algorithm), + self._extensions, + ) + + def add_extension( + self, extval: x509.ExtensionType, critical: bool + ) -> OCSPRequestBuilder: + if not isinstance(extval, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPRequestBuilder( + self._request, self._request_hash, [*self._extensions, extension] + ) + + def build(self) -> OCSPRequest: + if self._request is None and self._request_hash is None: + raise ValueError("You must add a certificate before building") + + return ocsp.create_ocsp_request(self) + + +class OCSPResponseBuilder: + def __init__( + self, + response: _SingleResponse | None = None, + responder_id: tuple[x509.Certificate, OCSPResponderEncoding] + | None = None, + certs: list[x509.Certificate] | None = None, + extensions: list[x509.Extension[x509.ExtensionType]] = [], + ): + self._response = response + self._responder_id = responder_id + self._certs = certs + self._extensions = extensions + + def add_response( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: datetime.datetime | None, + revocation_time: datetime.datetime | None, + revocation_reason: x509.ReasonFlags | None, + ) -> OCSPResponseBuilder: + if self._response is not None: + raise ValueError("Only one response per OCSPResponse.") + + singleresp = _SingleResponse( + cert, + issuer, + algorithm, + cert_status, + this_update, + next_update, + revocation_time, + revocation_reason, + ) + return OCSPResponseBuilder( + singleresp, + self._responder_id, + self._certs, + self._extensions, + ) + + def responder_id( + self, encoding: OCSPResponderEncoding, responder_cert: x509.Certificate + ) -> OCSPResponseBuilder: + if self._responder_id is not None: + raise ValueError("responder_id can only be set once") + if not isinstance(responder_cert, x509.Certificate): + raise TypeError("responder_cert must be a Certificate") + if not isinstance(encoding, OCSPResponderEncoding): + raise TypeError( + "encoding must be an element from OCSPResponderEncoding" + ) + + return OCSPResponseBuilder( + self._response, + (responder_cert, encoding), + self._certs, + self._extensions, + ) + + def certificates( + self, certs: typing.Iterable[x509.Certificate] + ) -> OCSPResponseBuilder: + if self._certs is not None: + raise ValueError("certificates may only be set once") + certs = list(certs) + if len(certs) == 0: + raise ValueError("certs must not be an empty list") + if not all(isinstance(x, x509.Certificate) for x in certs): + raise TypeError("certs must be a list of Certificates") + return OCSPResponseBuilder( + self._response, + self._responder_id, + certs, + self._extensions, + ) + + def add_extension( + self, extval: x509.ExtensionType, critical: bool + ) -> OCSPResponseBuilder: + if not isinstance(extval, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPResponseBuilder( + self._response, + self._responder_id, + self._certs, + [*self._extensions, extension], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: hashes.HashAlgorithm | None, + ) -> OCSPResponse: + if self._response is None: + raise ValueError("You must add a response before signing") + if self._responder_id is None: + raise ValueError("You must add a responder_id before signing") + + return ocsp.create_ocsp_response( + OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm + ) + + @classmethod + def build_unsuccessful( + cls, response_status: OCSPResponseStatus + ) -> OCSPResponse: + if not isinstance(response_status, OCSPResponseStatus): + raise TypeError( + "response_status must be an item from OCSPResponseStatus" + ) + if response_status is OCSPResponseStatus.SUCCESSFUL: + raise ValueError("response_status cannot be SUCCESSFUL") + + return ocsp.create_ocsp_response(response_status, None, None, None) + + +load_der_ocsp_request = ocsp.load_der_ocsp_request +load_der_ocsp_response = ocsp.load_der_ocsp_response diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/oid.py b/myenv/lib/python3.12/site-packages/cryptography/x509/oid.py new file mode 100644 index 0000000..d4e409e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/x509/oid.py @@ -0,0 +1,35 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat._oid import ( + AttributeOID, + AuthorityInformationAccessOID, + CertificatePoliciesOID, + CRLEntryExtensionOID, + ExtendedKeyUsageOID, + ExtensionOID, + NameOID, + ObjectIdentifier, + OCSPExtensionOID, + PublicKeyAlgorithmOID, + SignatureAlgorithmOID, + SubjectInformationAccessOID, +) + +__all__ = [ + "AttributeOID", + "AuthorityInformationAccessOID", + "CRLEntryExtensionOID", + "CertificatePoliciesOID", + "ExtendedKeyUsageOID", + "ExtensionOID", + "NameOID", + "OCSPExtensionOID", + "ObjectIdentifier", + "PublicKeyAlgorithmOID", + "SignatureAlgorithmOID", + "SubjectInformationAccessOID", +] diff --git a/myenv/lib/python3.12/site-packages/cryptography/x509/verification.py b/myenv/lib/python3.12/site-packages/cryptography/x509/verification.py new file mode 100644 index 0000000..b836506 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/cryptography/x509/verification.py @@ -0,0 +1,28 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.x509.general_name import DNSName, IPAddress + +__all__ = [ + "ClientVerifier", + "PolicyBuilder", + "ServerVerifier", + "Store", + "Subject", + "VerificationError", + "VerifiedClient", +] + +Store = rust_x509.Store +Subject = typing.Union[DNSName, IPAddress] +VerifiedClient = rust_x509.VerifiedClient +ClientVerifier = rust_x509.ClientVerifier +ServerVerifier = rust_x509.ServerVerifier +PolicyBuilder = rust_x509.PolicyBuilder +VerificationError = rust_x509.VerificationError diff --git a/myenv/lib/python3.12/site-packages/dateutil/__init__.py b/myenv/lib/python3.12/site-packages/dateutil/__init__.py new file mode 100644 index 0000000..a2c19c0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +import sys + +try: + from ._version import version as __version__ +except ImportError: + __version__ = 'unknown' + +__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', + 'utils', 'zoneinfo'] + +def __getattr__(name): + import importlib + + if name in __all__: + return importlib.import_module("." + name, __name__) + raise AttributeError( + "module {!r} has not attribute {!r}".format(__name__, name) + ) + + +def __dir__(): + # __dir__ should include all the lazy-importable modules as well. + return [x for x in globals() if x not in sys.modules] + __all__ diff --git a/myenv/lib/python3.12/site-packages/dateutil/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..28a5dc5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/__pycache__/_common.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/_common.cpython-312.pyc new file mode 100644 index 0000000..6d27fdc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/_common.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/__pycache__/_version.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/_version.cpython-312.pyc new file mode 100644 index 0000000..ffd3885 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/_version.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/__pycache__/easter.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/easter.cpython-312.pyc new file mode 100644 index 0000000..7fcc4cd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/easter.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/__pycache__/relativedelta.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/relativedelta.cpython-312.pyc new file mode 100644 index 0000000..44577f4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/relativedelta.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/__pycache__/rrule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/rrule.cpython-312.pyc new file mode 100644 index 0000000..830dd7c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/rrule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/__pycache__/tzwin.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/tzwin.cpython-312.pyc new file mode 100644 index 0000000..c3238d9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/tzwin.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/__pycache__/utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..4e21dee Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/__pycache__/utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/_common.py b/myenv/lib/python3.12/site-packages/dateutil/_common.py new file mode 100644 index 0000000..4eb2659 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/_common.py @@ -0,0 +1,43 @@ +""" +Common code used in multiple modules. +""" + + +class weekday(object): + __slots__ = ["weekday", "n"] + + def __init__(self, weekday, n=None): + self.weekday = weekday + self.n = n + + def __call__(self, n): + if n == self.n: + return self + else: + return self.__class__(self.weekday, n) + + def __eq__(self, other): + try: + if self.weekday != other.weekday or self.n != other.n: + return False + except AttributeError: + return False + return True + + def __hash__(self): + return hash(( + self.weekday, + self.n, + )) + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] + if not self.n: + return s + else: + return "%s(%+d)" % (s, self.n) + +# vim:ts=4:sw=4:et diff --git a/myenv/lib/python3.12/site-packages/dateutil/_version.py b/myenv/lib/python3.12/site-packages/dateutil/_version.py new file mode 100644 index 0000000..ddda980 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/_version.py @@ -0,0 +1,4 @@ +# file generated by setuptools_scm +# don't change, don't track in version control +__version__ = version = '2.9.0.post0' +__version_tuple__ = version_tuple = (2, 9, 0) diff --git a/myenv/lib/python3.12/site-packages/dateutil/easter.py b/myenv/lib/python3.12/site-packages/dateutil/easter.py new file mode 100644 index 0000000..f74d1f7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/easter.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic Easter computing method for any given year, using +Western, Orthodox or Julian algorithms. +""" + +import datetime + +__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] + +EASTER_JULIAN = 1 +EASTER_ORTHODOX = 2 +EASTER_WESTERN = 3 + + +def easter(year, method=EASTER_WESTERN): + """ + This method was ported from the work done by GM Arts, + on top of the algorithm by Claus Tondering, which was + based in part on the algorithm of Ouding (1940), as + quoted in "Explanatory Supplement to the Astronomical + Almanac", P. Kenneth Seidelmann, editor. + + This algorithm implements three different Easter + calculation methods: + + 1. Original calculation in Julian calendar, valid in + dates after 326 AD + 2. Original method, with date converted to Gregorian + calendar, valid in years 1583 to 4099 + 3. Revised method, in Gregorian calendar, valid in + years 1583 to 4099 as well + + These methods are represented by the constants: + + * ``EASTER_JULIAN = 1`` + * ``EASTER_ORTHODOX = 2`` + * ``EASTER_WESTERN = 3`` + + The default method is method 3. + + More about the algorithm may be found at: + + `GM Arts: Easter Algorithms `_ + + and + + `The Calendar FAQ: Easter `_ + + """ + + if not (1 <= method <= 3): + raise ValueError("invalid method") + + # g - Golden year - 1 + # c - Century + # h - (23 - Epact) mod 30 + # i - Number of days from March 21 to Paschal Full Moon + # j - Weekday for PFM (0=Sunday, etc) + # p - Number of days from March 21 to Sunday on or before PFM + # (-6 to 28 methods 1 & 3, to 56 for method 2) + # e - Extra days to add for method 2 (converting Julian + # date to Gregorian date) + + y = year + g = y % 19 + e = 0 + if method < 3: + # Old method + i = (19*g + 15) % 30 + j = (y + y//4 + i) % 7 + if method == 2: + # Extra dates to convert Julian to Gregorian date + e = 10 + if y > 1600: + e = e + y//100 - 16 - (y//100 - 16)//4 + else: + # New method + c = y//100 + h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 + i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) + j = (y + y//4 + i + 2 - c + c//4) % 7 + + # p can be from -6 to 56 corresponding to dates 22 March to 23 May + # (later dates apply to method 2, although 23 May never actually occurs) + p = i - j + e + d = 1 + (p + 27 + (p + 6)//40) % 31 + m = 3 + (p + 26)//30 + return datetime.date(int(y), int(m), int(d)) diff --git a/myenv/lib/python3.12/site-packages/dateutil/parser/__init__.py b/myenv/lib/python3.12/site-packages/dateutil/parser/__init__.py new file mode 100644 index 0000000..d174b0e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/parser/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +from ._parser import parse, parser, parserinfo, ParserError +from ._parser import DEFAULTPARSER, DEFAULTTZPARSER +from ._parser import UnknownTimezoneWarning + +from ._parser import __doc__ + +from .isoparser import isoparser, isoparse + +__all__ = ['parse', 'parser', 'parserinfo', + 'isoparse', 'isoparser', + 'ParserError', + 'UnknownTimezoneWarning'] + + +### +# Deprecate portions of the private interface so that downstream code that +# is improperly relying on it is given *some* notice. + + +def __deprecated_private_func(f): + from functools import wraps + import warnings + + msg = ('{name} is a private function and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=f.__name__) + + @wraps(f) + def deprecated_func(*args, **kwargs): + warnings.warn(msg, DeprecationWarning) + return f(*args, **kwargs) + + return deprecated_func + +def __deprecate_private_class(c): + import warnings + + msg = ('{name} is a private class and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=c.__name__) + + class private_class(c): + __doc__ = c.__doc__ + + def __init__(self, *args, **kwargs): + warnings.warn(msg, DeprecationWarning) + super(private_class, self).__init__(*args, **kwargs) + + private_class.__name__ = c.__name__ + + return private_class + + +from ._parser import _timelex, _resultbase +from ._parser import _tzparser, _parsetz + +_timelex = __deprecate_private_class(_timelex) +_tzparser = __deprecate_private_class(_tzparser) +_resultbase = __deprecate_private_class(_resultbase) +_parsetz = __deprecated_private_func(_parsetz) diff --git a/myenv/lib/python3.12/site-packages/dateutil/parser/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/parser/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..4deaa4e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/parser/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/parser/__pycache__/_parser.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/parser/__pycache__/_parser.cpython-312.pyc new file mode 100644 index 0000000..46d442b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/parser/__pycache__/_parser.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/parser/__pycache__/isoparser.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/parser/__pycache__/isoparser.cpython-312.pyc new file mode 100644 index 0000000..c758daa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/parser/__pycache__/isoparser.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/parser/_parser.py b/myenv/lib/python3.12/site-packages/dateutil/parser/_parser.py new file mode 100644 index 0000000..37d1663 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/parser/_parser.py @@ -0,0 +1,1613 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic date/time string parser which is able to parse +most known formats to represent a date and/or time. + +This module attempts to be forgiving with regards to unlikely input formats, +returning a datetime object even for dates which are ambiguous. If an element +of a date/time stamp is omitted, the following rules are applied: + +- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour + on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is + specified. +- If a time zone is omitted, a timezone-naive datetime is returned. + +If any other elements are missing, they are taken from the +:class:`datetime.datetime` object passed to the parameter ``default``. If this +results in a day number exceeding the valid number of days per month, the +value falls back to the end of the month. + +Additional resources about date/time string formats can be found below: + +- `A summary of the international standard date and time notation + `_ +- `W3C Date and Time Formats `_ +- `Time Formats (Planetary Rings Node) `_ +- `CPAN ParseDate module + `_ +- `Java SimpleDateFormat Class + `_ +""" +from __future__ import unicode_literals + +import datetime +import re +import string +import time +import warnings + +from calendar import monthrange +from io import StringIO + +import six +from six import integer_types, text_type + +from decimal import Decimal + +from warnings import warn + +from .. import relativedelta +from .. import tz + +__all__ = ["parse", "parserinfo", "ParserError"] + + +# TODO: pandas.core.tools.datetimes imports this explicitly. Might be worth +# making public and/or figuring out if there is something we can +# take off their plate. +class _timelex(object): + # Fractional seconds are sometimes split by a comma + _split_decimal = re.compile("([.,])") + + def __init__(self, instream): + if isinstance(instream, (bytes, bytearray)): + instream = instream.decode() + + if isinstance(instream, text_type): + instream = StringIO(instream) + elif getattr(instream, 'read', None) is None: + raise TypeError('Parser must be a string or character stream, not ' + '{itype}'.format(itype=instream.__class__.__name__)) + + self.instream = instream + self.charstack = [] + self.tokenstack = [] + self.eof = False + + def get_token(self): + """ + This function breaks the time string into lexical units (tokens), which + can be parsed by the parser. Lexical units are demarcated by changes in + the character set, so any continuous string of letters is considered + one unit, any continuous string of numbers is considered one unit. + + The main complication arises from the fact that dots ('.') can be used + both as separators (e.g. "Sep.20.2009") or decimal points (e.g. + "4:30:21.447"). As such, it is necessary to read the full context of + any dot-separated strings before breaking it into tokens; as such, this + function maintains a "token stack", for when the ambiguous context + demands that multiple tokens be parsed at once. + """ + if self.tokenstack: + return self.tokenstack.pop(0) + + seenletters = False + token = None + state = None + + while not self.eof: + # We only realize that we've reached the end of a token when we + # find a character that's not part of the current token - since + # that character may be part of the next token, it's stored in the + # charstack. + if self.charstack: + nextchar = self.charstack.pop(0) + else: + nextchar = self.instream.read(1) + while nextchar == '\x00': + nextchar = self.instream.read(1) + + if not nextchar: + self.eof = True + break + elif not state: + # First character of the token - determines if we're starting + # to parse a word, a number or something else. + token = nextchar + if self.isword(nextchar): + state = 'a' + elif self.isnum(nextchar): + state = '0' + elif self.isspace(nextchar): + token = ' ' + break # emit token + else: + break # emit token + elif state == 'a': + # If we've already started reading a word, we keep reading + # letters until we find something that's not part of a word. + seenletters = True + if self.isword(nextchar): + token += nextchar + elif nextchar == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0': + # If we've already started reading a number, we keep reading + # numbers until we find something that doesn't fit. + if self.isnum(nextchar): + token += nextchar + elif nextchar == '.' or (nextchar == ',' and len(token) >= 2): + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == 'a.': + # If we've seen some letters and a dot separator, continue + # parsing, and the tokens will be broken up later. + seenletters = True + if nextchar == '.' or self.isword(nextchar): + token += nextchar + elif self.isnum(nextchar) and token[-1] == '.': + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0.': + # If we've seen at least one dot separator, keep going, we'll + # break up the tokens later. + if nextchar == '.' or self.isnum(nextchar): + token += nextchar + elif self.isword(nextchar) and token[-1] == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + + if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or + token[-1] in '.,')): + l = self._split_decimal.split(token) + token = l[0] + for tok in l[1:]: + if tok: + self.tokenstack.append(tok) + + if state == '0.' and token.count('.') == 0: + token = token.replace(',', '.') + + return token + + def __iter__(self): + return self + + def __next__(self): + token = self.get_token() + if token is None: + raise StopIteration + + return token + + def next(self): + return self.__next__() # Python 2.x support + + @classmethod + def split(cls, s): + return list(cls(s)) + + @classmethod + def isword(cls, nextchar): + """ Whether or not the next character is part of a word """ + return nextchar.isalpha() + + @classmethod + def isnum(cls, nextchar): + """ Whether the next character is part of a number """ + return nextchar.isdigit() + + @classmethod + def isspace(cls, nextchar): + """ Whether the next character is whitespace """ + return nextchar.isspace() + + +class _resultbase(object): + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def _repr(self, classname): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (classname, ", ".join(l)) + + def __len__(self): + return (sum(getattr(self, attr) is not None + for attr in self.__slots__)) + + def __repr__(self): + return self._repr(self.__class__.__name__) + + +class parserinfo(object): + """ + Class which handles what inputs are accepted. Subclass this to customize + the language and acceptable values for each parameter. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. Default is ``False``. + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + Default is ``False``. + """ + + # m from a.m/p.m, t from ISO T separator + JUMP = [" ", ".", ",", ";", "-", "/", "'", + "at", "on", "and", "ad", "m", "t", "of", + "st", "nd", "rd", "th"] + + WEEKDAYS = [("Mon", "Monday"), + ("Tue", "Tuesday"), # TODO: "Tues" + ("Wed", "Wednesday"), + ("Thu", "Thursday"), # TODO: "Thurs" + ("Fri", "Friday"), + ("Sat", "Saturday"), + ("Sun", "Sunday")] + MONTHS = [("Jan", "January"), + ("Feb", "February"), # TODO: "Febr" + ("Mar", "March"), + ("Apr", "April"), + ("May", "May"), + ("Jun", "June"), + ("Jul", "July"), + ("Aug", "August"), + ("Sep", "Sept", "September"), + ("Oct", "October"), + ("Nov", "November"), + ("Dec", "December")] + HMS = [("h", "hour", "hours"), + ("m", "minute", "minutes"), + ("s", "second", "seconds")] + AMPM = [("am", "a"), + ("pm", "p")] + UTCZONE = ["UTC", "GMT", "Z", "z"] + PERTAIN = ["of"] + TZOFFSET = {} + # TODO: ERA = ["AD", "BC", "CE", "BCE", "Stardate", + # "Anno Domini", "Year of Our Lord"] + + def __init__(self, dayfirst=False, yearfirst=False): + self._jump = self._convert(self.JUMP) + self._weekdays = self._convert(self.WEEKDAYS) + self._months = self._convert(self.MONTHS) + self._hms = self._convert(self.HMS) + self._ampm = self._convert(self.AMPM) + self._utczone = self._convert(self.UTCZONE) + self._pertain = self._convert(self.PERTAIN) + + self.dayfirst = dayfirst + self.yearfirst = yearfirst + + self._year = time.localtime().tm_year + self._century = self._year // 100 * 100 + + def _convert(self, lst): + dct = {} + for i, v in enumerate(lst): + if isinstance(v, tuple): + for v in v: + dct[v.lower()] = i + else: + dct[v.lower()] = i + return dct + + def jump(self, name): + return name.lower() in self._jump + + def weekday(self, name): + try: + return self._weekdays[name.lower()] + except KeyError: + pass + return None + + def month(self, name): + try: + return self._months[name.lower()] + 1 + except KeyError: + pass + return None + + def hms(self, name): + try: + return self._hms[name.lower()] + except KeyError: + return None + + def ampm(self, name): + try: + return self._ampm[name.lower()] + except KeyError: + return None + + def pertain(self, name): + return name.lower() in self._pertain + + def utczone(self, name): + return name.lower() in self._utczone + + def tzoffset(self, name): + if name in self._utczone: + return 0 + + return self.TZOFFSET.get(name) + + def convertyear(self, year, century_specified=False): + """ + Converts two-digit years to year within [-50, 49] + range of self._year (current local time) + """ + + # Function contract is that the year is always positive + assert year >= 0 + + if year < 100 and not century_specified: + # assume current century to start + year += self._century + + if year >= self._year + 50: # if too far in future + year -= 100 + elif year < self._year - 50: # if too far in past + year += 100 + + return year + + def validate(self, res): + # move to info + if res.year is not None: + res.year = self.convertyear(res.year, res.century_specified) + + if ((res.tzoffset == 0 and not res.tzname) or + (res.tzname == 'Z' or res.tzname == 'z')): + res.tzname = "UTC" + res.tzoffset = 0 + elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): + res.tzoffset = 0 + return True + + +class _ymd(list): + def __init__(self, *args, **kwargs): + super(self.__class__, self).__init__(*args, **kwargs) + self.century_specified = False + self.dstridx = None + self.mstridx = None + self.ystridx = None + + @property + def has_year(self): + return self.ystridx is not None + + @property + def has_month(self): + return self.mstridx is not None + + @property + def has_day(self): + return self.dstridx is not None + + def could_be_day(self, value): + if self.has_day: + return False + elif not self.has_month: + return 1 <= value <= 31 + elif not self.has_year: + # Be permissive, assume leap year + month = self[self.mstridx] + return 1 <= value <= monthrange(2000, month)[1] + else: + month = self[self.mstridx] + year = self[self.ystridx] + return 1 <= value <= monthrange(year, month)[1] + + def append(self, val, label=None): + if hasattr(val, '__len__'): + if val.isdigit() and len(val) > 2: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + elif val > 100: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + + super(self.__class__, self).append(int(val)) + + if label == 'M': + if self.has_month: + raise ValueError('Month is already set') + self.mstridx = len(self) - 1 + elif label == 'D': + if self.has_day: + raise ValueError('Day is already set') + self.dstridx = len(self) - 1 + elif label == 'Y': + if self.has_year: + raise ValueError('Year is already set') + self.ystridx = len(self) - 1 + + def _resolve_from_stridxs(self, strids): + """ + Try to resolve the identities of year/month/day elements using + ystridx, mstridx, and dstridx, if enough of these are specified. + """ + if len(self) == 3 and len(strids) == 2: + # we can back out the remaining stridx value + missing = [x for x in range(3) if x not in strids.values()] + key = [x for x in ['y', 'm', 'd'] if x not in strids] + assert len(missing) == len(key) == 1 + key = key[0] + val = missing[0] + strids[key] = val + + assert len(self) == len(strids) # otherwise this should not be called + out = {key: self[strids[key]] for key in strids} + return (out.get('y'), out.get('m'), out.get('d')) + + def resolve_ymd(self, yearfirst, dayfirst): + len_ymd = len(self) + year, month, day = (None, None, None) + + strids = (('y', self.ystridx), + ('m', self.mstridx), + ('d', self.dstridx)) + + strids = {key: val for key, val in strids if val is not None} + if (len(self) == len(strids) > 0 or + (len(self) == 3 and len(strids) == 2)): + return self._resolve_from_stridxs(strids) + + mstridx = self.mstridx + + if len_ymd > 3: + raise ValueError("More than three YMD values") + elif len_ymd == 1 or (mstridx is not None and len_ymd == 2): + # One member, or two members with a month string + if mstridx is not None: + month = self[mstridx] + # since mstridx is 0 or 1, self[mstridx-1] always + # looks up the other element + other = self[mstridx - 1] + else: + other = self[0] + + if len_ymd > 1 or mstridx is None: + if other > 31: + year = other + else: + day = other + + elif len_ymd == 2: + # Two members with numbers + if self[0] > 31: + # 99-01 + year, month = self + elif self[1] > 31: + # 01-99 + month, year = self + elif dayfirst and self[1] <= 12: + # 13-01 + day, month = self + else: + # 01-13 + month, day = self + + elif len_ymd == 3: + # Three members + if mstridx == 0: + if self[1] > 31: + # Apr-2003-25 + month, year, day = self + else: + month, day, year = self + elif mstridx == 1: + if self[0] > 31 or (yearfirst and self[2] <= 31): + # 99-Jan-01 + year, month, day = self + else: + # 01-Jan-01 + # Give precedence to day-first, since + # two-digit years is usually hand-written. + day, month, year = self + + elif mstridx == 2: + # WTF!? + if self[1] > 31: + # 01-99-Jan + day, year, month = self + else: + # 99-01-Jan + year, day, month = self + + else: + if (self[0] > 31 or + self.ystridx == 0 or + (yearfirst and self[1] <= 12 and self[2] <= 31)): + # 99-01-01 + if dayfirst and self[2] <= 12: + year, day, month = self + else: + year, month, day = self + elif self[0] > 12 or (dayfirst and self[1] <= 12): + # 13-01-01 + day, month, year = self + else: + # 01-13-01 + month, day, year = self + + return year, month, day + + +class parser(object): + def __init__(self, info=None): + self.info = info or parserinfo() + + def parse(self, timestr, default=None, + ignoretz=False, tzinfos=None, **kwargs): + """ + Parse the date/time string into a :class:`datetime.datetime` object. + + :param timestr: + Any date/time string using the supported formats. + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a + naive :class:`datetime.datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param \\*\\*kwargs: + Keyword arguments as passed to ``_parse()``. + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ParserError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises TypeError: + Raised for non-string or character stream input. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + + if default is None: + default = datetime.datetime.now().replace(hour=0, minute=0, + second=0, microsecond=0) + + res, skipped_tokens = self._parse(timestr, **kwargs) + + if res is None: + raise ParserError("Unknown string format: %s", timestr) + + if len(res) == 0: + raise ParserError("String does not contain a date: %s", timestr) + + try: + ret = self._build_naive(res, default) + except ValueError as e: + six.raise_from(ParserError(str(e) + ": %s", timestr), e) + + if not ignoretz: + ret = self._build_tzaware(ret, res, tzinfos) + + if kwargs.get('fuzzy_with_tokens', False): + return ret, skipped_tokens + else: + return ret + + class _result(_resultbase): + __slots__ = ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond", + "tzname", "tzoffset", "ampm","any_unused_tokens"] + + def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, + fuzzy_with_tokens=False): + """ + Private method which performs the heavy lifting of parsing, called from + ``parse()``, which passes on its ``kwargs`` to this function. + + :param timestr: + The string to parse. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. If set to ``None``, this value is retrieved from the + current :class:`parserinfo` object (which itself defaults to + ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + If this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + """ + if fuzzy_with_tokens: + fuzzy = True + + info = self.info + + if dayfirst is None: + dayfirst = info.dayfirst + + if yearfirst is None: + yearfirst = info.yearfirst + + res = self._result() + l = _timelex.split(timestr) # Splits the timestr into tokens + + skipped_idxs = [] + + # year/month/day list + ymd = _ymd() + + len_l = len(l) + i = 0 + try: + while i < len_l: + + # Check if it's a number + value_repr = l[i] + try: + value = float(value_repr) + except ValueError: + value = None + + if value is not None: + # Numeric token + i = self._parse_numeric_token(l, i, info, ymd, res, fuzzy) + + # Check weekday + elif info.weekday(l[i]) is not None: + value = info.weekday(l[i]) + res.weekday = value + + # Check month name + elif info.month(l[i]) is not None: + value = info.month(l[i]) + ymd.append(value, 'M') + + if i + 1 < len_l: + if l[i + 1] in ('-', '/'): + # Jan-01[-99] + sep = l[i + 1] + ymd.append(l[i + 2]) + + if i + 3 < len_l and l[i + 3] == sep: + # Jan-01-99 + ymd.append(l[i + 4]) + i += 2 + + i += 2 + + elif (i + 4 < len_l and l[i + 1] == l[i + 3] == ' ' and + info.pertain(l[i + 2])): + # Jan of 01 + # In this case, 01 is clearly year + if l[i + 4].isdigit(): + # Convert it here to become unambiguous + value = int(l[i + 4]) + year = str(info.convertyear(value)) + ymd.append(year, 'Y') + else: + # Wrong guess + pass + # TODO: not hit in tests + i += 4 + + # Check am/pm + elif info.ampm(l[i]) is not None: + value = info.ampm(l[i]) + val_is_ampm = self._ampm_valid(res.hour, res.ampm, fuzzy) + + if val_is_ampm: + res.hour = self._adjust_ampm(res.hour, value) + res.ampm = value + + elif fuzzy: + skipped_idxs.append(i) + + # Check for a timezone name + elif self._could_be_tzname(res.hour, res.tzname, res.tzoffset, l[i]): + res.tzname = l[i] + res.tzoffset = info.tzoffset(res.tzname) + + # Check for something like GMT+3, or BRST+3. Notice + # that it doesn't mean "I am 3 hours after GMT", but + # "my time +3 is GMT". If found, we reverse the + # logic so that timezone parsing code will get it + # right. + if i + 1 < len_l and l[i + 1] in ('+', '-'): + l[i + 1] = ('+', '-')[l[i + 1] == '+'] + res.tzoffset = None + if info.utczone(res.tzname): + # With something like GMT+3, the timezone + # is *not* GMT. + res.tzname = None + + # Check for a numbered timezone + elif res.hour is not None and l[i] in ('+', '-'): + signal = (-1, 1)[l[i] == '+'] + len_li = len(l[i + 1]) + + # TODO: check that l[i + 1] is integer? + if len_li == 4: + # -0300 + hour_offset = int(l[i + 1][:2]) + min_offset = int(l[i + 1][2:]) + elif i + 2 < len_l and l[i + 2] == ':': + # -03:00 + hour_offset = int(l[i + 1]) + min_offset = int(l[i + 3]) # TODO: Check that l[i+3] is minute-like? + i += 2 + elif len_li <= 2: + # -[0]3 + hour_offset = int(l[i + 1][:2]) + min_offset = 0 + else: + raise ValueError(timestr) + + res.tzoffset = signal * (hour_offset * 3600 + min_offset * 60) + + # Look for a timezone name between parenthesis + if (i + 5 < len_l and + info.jump(l[i + 2]) and l[i + 3] == '(' and + l[i + 5] == ')' and + 3 <= len(l[i + 4]) and + self._could_be_tzname(res.hour, res.tzname, + None, l[i + 4])): + # -0300 (BRST) + res.tzname = l[i + 4] + i += 4 + + i += 1 + + # Check jumps + elif not (info.jump(l[i]) or fuzzy): + raise ValueError(timestr) + + else: + skipped_idxs.append(i) + i += 1 + + # Process year/month/day + year, month, day = ymd.resolve_ymd(yearfirst, dayfirst) + + res.century_specified = ymd.century_specified + res.year = year + res.month = month + res.day = day + + except (IndexError, ValueError): + return None, None + + if not info.validate(res): + return None, None + + if fuzzy_with_tokens: + skipped_tokens = self._recombine_skipped(l, skipped_idxs) + return res, tuple(skipped_tokens) + else: + return res, None + + def _parse_numeric_token(self, tokens, idx, info, ymd, res, fuzzy): + # Token is a number + value_repr = tokens[idx] + try: + value = self._to_decimal(value_repr) + except Exception as e: + six.raise_from(ValueError('Unknown numeric token'), e) + + len_li = len(value_repr) + + len_l = len(tokens) + + if (len(ymd) == 3 and len_li in (2, 4) and + res.hour is None and + (idx + 1 >= len_l or + (tokens[idx + 1] != ':' and + info.hms(tokens[idx + 1]) is None))): + # 19990101T23[59] + s = tokens[idx] + res.hour = int(s[:2]) + + if len_li == 4: + res.minute = int(s[2:]) + + elif len_li == 6 or (len_li > 6 and tokens[idx].find('.') == 6): + # YYMMDD or HHMMSS[.ss] + s = tokens[idx] + + if not ymd and '.' not in tokens[idx]: + ymd.append(s[:2]) + ymd.append(s[2:4]) + ymd.append(s[4:]) + else: + # 19990101T235959[.59] + + # TODO: Check if res attributes already set. + res.hour = int(s[:2]) + res.minute = int(s[2:4]) + res.second, res.microsecond = self._parsems(s[4:]) + + elif len_li in (8, 12, 14): + # YYYYMMDD + s = tokens[idx] + ymd.append(s[:4], 'Y') + ymd.append(s[4:6]) + ymd.append(s[6:8]) + + if len_li > 8: + res.hour = int(s[8:10]) + res.minute = int(s[10:12]) + + if len_li > 12: + res.second = int(s[12:]) + + elif self._find_hms_idx(idx, tokens, info, allow_jump=True) is not None: + # HH[ ]h or MM[ ]m or SS[.ss][ ]s + hms_idx = self._find_hms_idx(idx, tokens, info, allow_jump=True) + (idx, hms) = self._parse_hms(idx, tokens, info, hms_idx) + if hms is not None: + # TODO: checking that hour/minute/second are not + # already set? + self._assign_hms(res, value_repr, hms) + + elif idx + 2 < len_l and tokens[idx + 1] == ':': + # HH:MM[:SS[.ss]] + res.hour = int(value) + value = self._to_decimal(tokens[idx + 2]) # TODO: try/except for this? + (res.minute, res.second) = self._parse_min_sec(value) + + if idx + 4 < len_l and tokens[idx + 3] == ':': + res.second, res.microsecond = self._parsems(tokens[idx + 4]) + + idx += 2 + + idx += 2 + + elif idx + 1 < len_l and tokens[idx + 1] in ('-', '/', '.'): + sep = tokens[idx + 1] + ymd.append(value_repr) + + if idx + 2 < len_l and not info.jump(tokens[idx + 2]): + if tokens[idx + 2].isdigit(): + # 01-01[-01] + ymd.append(tokens[idx + 2]) + else: + # 01-Jan[-01] + value = info.month(tokens[idx + 2]) + + if value is not None: + ymd.append(value, 'M') + else: + raise ValueError() + + if idx + 3 < len_l and tokens[idx + 3] == sep: + # We have three members + value = info.month(tokens[idx + 4]) + + if value is not None: + ymd.append(value, 'M') + else: + ymd.append(tokens[idx + 4]) + idx += 2 + + idx += 1 + idx += 1 + + elif idx + 1 >= len_l or info.jump(tokens[idx + 1]): + if idx + 2 < len_l and info.ampm(tokens[idx + 2]) is not None: + # 12 am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 2])) + idx += 1 + else: + # Year, month or day + ymd.append(value) + idx += 1 + + elif info.ampm(tokens[idx + 1]) is not None and (0 <= value < 24): + # 12am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 1])) + idx += 1 + + elif ymd.could_be_day(value): + ymd.append(value) + + elif not fuzzy: + raise ValueError() + + return idx + + def _find_hms_idx(self, idx, tokens, info, allow_jump): + len_l = len(tokens) + + if idx+1 < len_l and info.hms(tokens[idx+1]) is not None: + # There is an "h", "m", or "s" label following this token. We take + # assign the upcoming label to the current token. + # e.g. the "12" in 12h" + hms_idx = idx + 1 + + elif (allow_jump and idx+2 < len_l and tokens[idx+1] == ' ' and + info.hms(tokens[idx+2]) is not None): + # There is a space and then an "h", "m", or "s" label. + # e.g. the "12" in "12 h" + hms_idx = idx + 2 + + elif idx > 0 and info.hms(tokens[idx-1]) is not None: + # There is a "h", "m", or "s" preceding this token. Since neither + # of the previous cases was hit, there is no label following this + # token, so we use the previous label. + # e.g. the "04" in "12h04" + hms_idx = idx-1 + + elif (1 < idx == len_l-1 and tokens[idx-1] == ' ' and + info.hms(tokens[idx-2]) is not None): + # If we are looking at the final token, we allow for a + # backward-looking check to skip over a space. + # TODO: Are we sure this is the right condition here? + hms_idx = idx - 2 + + else: + hms_idx = None + + return hms_idx + + def _assign_hms(self, res, value_repr, hms): + # See GH issue #427, fixing float rounding + value = self._to_decimal(value_repr) + + if hms == 0: + # Hour + res.hour = int(value) + if value % 1: + res.minute = int(60*(value % 1)) + + elif hms == 1: + (res.minute, res.second) = self._parse_min_sec(value) + + elif hms == 2: + (res.second, res.microsecond) = self._parsems(value_repr) + + def _could_be_tzname(self, hour, tzname, tzoffset, token): + return (hour is not None and + tzname is None and + tzoffset is None and + len(token) <= 5 and + (all(x in string.ascii_uppercase for x in token) + or token in self.info.UTCZONE)) + + def _ampm_valid(self, hour, ampm, fuzzy): + """ + For fuzzy parsing, 'a' or 'am' (both valid English words) + may erroneously trigger the AM/PM flag. Deal with that + here. + """ + val_is_ampm = True + + # If there's already an AM/PM flag, this one isn't one. + if fuzzy and ampm is not None: + val_is_ampm = False + + # If AM/PM is found and hour is not, raise a ValueError + if hour is None: + if fuzzy: + val_is_ampm = False + else: + raise ValueError('No hour specified with AM or PM flag.') + elif not 0 <= hour <= 12: + # If AM/PM is found, it's a 12 hour clock, so raise + # an error for invalid range + if fuzzy: + val_is_ampm = False + else: + raise ValueError('Invalid hour specified for 12-hour clock.') + + return val_is_ampm + + def _adjust_ampm(self, hour, ampm): + if hour < 12 and ampm == 1: + hour += 12 + elif hour == 12 and ampm == 0: + hour = 0 + return hour + + def _parse_min_sec(self, value): + # TODO: Every usage of this function sets res.second to the return + # value. Are there any cases where second will be returned as None and + # we *don't* want to set res.second = None? + minute = int(value) + second = None + + sec_remainder = value % 1 + if sec_remainder: + second = int(60 * sec_remainder) + return (minute, second) + + def _parse_hms(self, idx, tokens, info, hms_idx): + # TODO: Is this going to admit a lot of false-positives for when we + # just happen to have digits and "h", "m" or "s" characters in non-date + # text? I guess hex hashes won't have that problem, but there's plenty + # of random junk out there. + if hms_idx is None: + hms = None + new_idx = idx + elif hms_idx > idx: + hms = info.hms(tokens[hms_idx]) + new_idx = hms_idx + else: + # Looking backwards, increment one. + hms = info.hms(tokens[hms_idx]) + 1 + new_idx = idx + + return (new_idx, hms) + + # ------------------------------------------------------------------ + # Handling for individual tokens. These are kept as methods instead + # of functions for the sake of customizability via subclassing. + + def _parsems(self, value): + """Parse a I[.F] seconds value into (seconds, microseconds).""" + if "." not in value: + return int(value), 0 + else: + i, f = value.split(".") + return int(i), int(f.ljust(6, "0")[:6]) + + def _to_decimal(self, val): + try: + decimal_value = Decimal(val) + # See GH 662, edge case, infinite value should not be converted + # via `_to_decimal` + if not decimal_value.is_finite(): + raise ValueError("Converted decimal value is infinite or NaN") + except Exception as e: + msg = "Could not convert %s to decimal" % val + six.raise_from(ValueError(msg), e) + else: + return decimal_value + + # ------------------------------------------------------------------ + # Post-Parsing construction of datetime output. These are kept as + # methods instead of functions for the sake of customizability via + # subclassing. + + def _build_tzinfo(self, tzinfos, tzname, tzoffset): + if callable(tzinfos): + tzdata = tzinfos(tzname, tzoffset) + else: + tzdata = tzinfos.get(tzname) + # handle case where tzinfo is paased an options that returns None + # eg tzinfos = {'BRST' : None} + if isinstance(tzdata, datetime.tzinfo) or tzdata is None: + tzinfo = tzdata + elif isinstance(tzdata, text_type): + tzinfo = tz.tzstr(tzdata) + elif isinstance(tzdata, integer_types): + tzinfo = tz.tzoffset(tzname, tzdata) + else: + raise TypeError("Offset must be tzinfo subclass, tz string, " + "or int offset.") + return tzinfo + + def _build_tzaware(self, naive, res, tzinfos): + if (callable(tzinfos) or (tzinfos and res.tzname in tzinfos)): + tzinfo = self._build_tzinfo(tzinfos, res.tzname, res.tzoffset) + aware = naive.replace(tzinfo=tzinfo) + aware = self._assign_tzname(aware, res.tzname) + + elif res.tzname and res.tzname in time.tzname: + aware = naive.replace(tzinfo=tz.tzlocal()) + + # Handle ambiguous local datetime + aware = self._assign_tzname(aware, res.tzname) + + # This is mostly relevant for winter GMT zones parsed in the UK + if (aware.tzname() != res.tzname and + res.tzname in self.info.UTCZONE): + aware = aware.replace(tzinfo=tz.UTC) + + elif res.tzoffset == 0: + aware = naive.replace(tzinfo=tz.UTC) + + elif res.tzoffset: + aware = naive.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) + + elif not res.tzname and not res.tzoffset: + # i.e. no timezone information was found. + aware = naive + + elif res.tzname: + # tz-like string was parsed but we don't know what to do + # with it + warnings.warn("tzname {tzname} identified but not understood. " + "Pass `tzinfos` argument in order to correctly " + "return a timezone-aware datetime. In a future " + "version, this will raise an " + "exception.".format(tzname=res.tzname), + category=UnknownTimezoneWarning) + aware = naive + + return aware + + def _build_naive(self, res, default): + repl = {} + for attr in ("year", "month", "day", "hour", + "minute", "second", "microsecond"): + value = getattr(res, attr) + if value is not None: + repl[attr] = value + + if 'day' not in repl: + # If the default day exceeds the last day of the month, fall back + # to the end of the month. + cyear = default.year if res.year is None else res.year + cmonth = default.month if res.month is None else res.month + cday = default.day if res.day is None else res.day + + if cday > monthrange(cyear, cmonth)[1]: + repl['day'] = monthrange(cyear, cmonth)[1] + + naive = default.replace(**repl) + + if res.weekday is not None and not res.day: + naive = naive + relativedelta.relativedelta(weekday=res.weekday) + + return naive + + def _assign_tzname(self, dt, tzname): + if dt.tzname() != tzname: + new_dt = tz.enfold(dt, fold=1) + if new_dt.tzname() == tzname: + return new_dt + + return dt + + def _recombine_skipped(self, tokens, skipped_idxs): + """ + >>> tokens = ["foo", " ", "bar", " ", "19June2000", "baz"] + >>> skipped_idxs = [0, 1, 2, 5] + >>> _recombine_skipped(tokens, skipped_idxs) + ["foo bar", "baz"] + """ + skipped_tokens = [] + for i, idx in enumerate(sorted(skipped_idxs)): + if i > 0 and idx - 1 == skipped_idxs[i - 1]: + skipped_tokens[-1] = skipped_tokens[-1] + tokens[idx] + else: + skipped_tokens.append(tokens[idx]) + + return skipped_tokens + + +DEFAULTPARSER = parser() + + +def parse(timestr, parserinfo=None, **kwargs): + """ + + Parse a string in one of the supported formats, using the + ``parserinfo`` parameters. + + :param timestr: + A string containing a date/time stamp. + + :param parserinfo: + A :class:`parserinfo` object containing parameters for the parser. + If ``None``, the default arguments to the :class:`parserinfo` + constructor are used. + + The ``**kwargs`` parameter takes the following keyword arguments: + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM and + YMD. If set to ``None``, this value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken to + be the year, otherwise the last number is taken to be the year. If + this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ParserError: + Raised for invalid or unknown string formats, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date would + be created. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + if parserinfo: + return parser(parserinfo).parse(timestr, **kwargs) + else: + return DEFAULTPARSER.parse(timestr, **kwargs) + + +class _tzparser(object): + + class _result(_resultbase): + + __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", + "start", "end"] + + class _attr(_resultbase): + __slots__ = ["month", "week", "weekday", + "yday", "jyday", "day", "time"] + + def __repr__(self): + return self._repr("") + + def __init__(self): + _resultbase.__init__(self) + self.start = self._attr() + self.end = self._attr() + + def parse(self, tzstr): + res = self._result() + l = [x for x in re.split(r'([,:.]|[a-zA-Z]+|[0-9]+)',tzstr) if x] + used_idxs = list() + try: + + len_l = len(l) + + i = 0 + while i < len_l: + # BRST+3[BRDT[+2]] + j = i + while j < len_l and not [x for x in l[j] + if x in "0123456789:,-+"]: + j += 1 + if j != i: + if not res.stdabbr: + offattr = "stdoffset" + res.stdabbr = "".join(l[i:j]) + else: + offattr = "dstoffset" + res.dstabbr = "".join(l[i:j]) + + for ii in range(j): + used_idxs.append(ii) + i = j + if (i < len_l and (l[i] in ('+', '-') or l[i][0] in + "0123456789")): + if l[i] in ('+', '-'): + # Yes, that's right. See the TZ variable + # documentation. + signal = (1, -1)[l[i] == '+'] + used_idxs.append(i) + i += 1 + else: + signal = -1 + len_li = len(l[i]) + if len_li == 4: + # -0300 + setattr(res, offattr, (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) * signal) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + setattr(res, offattr, + (int(l[i]) * 3600 + + int(l[i + 2]) * 60) * signal) + used_idxs.append(i) + i += 2 + elif len_li <= 2: + # -[0]3 + setattr(res, offattr, + int(l[i][:2]) * 3600 * signal) + else: + return None + used_idxs.append(i) + i += 1 + if res.dstabbr: + break + else: + break + + + if i < len_l: + for j in range(i, len_l): + if l[j] == ';': + l[j] = ',' + + assert l[i] == ',' + + i += 1 + + if i >= len_l: + pass + elif (8 <= l.count(',') <= 9 and + not [y for x in l[i:] if x != ',' + for y in x if y not in "0123456789+-"]): + # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] + for x in (res.start, res.end): + x.month = int(l[i]) + used_idxs.append(i) + i += 2 + if l[i] == '-': + value = int(l[i + 1]) * -1 + used_idxs.append(i) + i += 1 + else: + value = int(l[i]) + used_idxs.append(i) + i += 2 + if value: + x.week = value + x.weekday = (int(l[i]) - 1) % 7 + else: + x.day = int(l[i]) + used_idxs.append(i) + i += 2 + x.time = int(l[i]) + used_idxs.append(i) + i += 2 + if i < len_l: + if l[i] in ('-', '+'): + signal = (-1, 1)[l[i] == "+"] + used_idxs.append(i) + i += 1 + else: + signal = 1 + used_idxs.append(i) + res.dstoffset = (res.stdoffset + int(l[i]) * signal) + + # This was a made-up format that is not in normal use + warn(('Parsed time zone "%s"' % tzstr) + + 'is in a non-standard dateutil-specific format, which ' + + 'is now deprecated; support for parsing this format ' + + 'will be removed in future versions. It is recommended ' + + 'that you switch to a standard format like the GNU ' + + 'TZ variable format.', tz.DeprecatedTzFormatWarning) + elif (l.count(',') == 2 and l[i:].count('/') <= 2 and + not [y for x in l[i:] if x not in (',', '/', 'J', 'M', + '.', '-', ':') + for y in x if y not in "0123456789"]): + for x in (res.start, res.end): + if l[i] == 'J': + # non-leap year day (1 based) + used_idxs.append(i) + i += 1 + x.jyday = int(l[i]) + elif l[i] == 'M': + # month[-.]week[-.]weekday + used_idxs.append(i) + i += 1 + x.month = int(l[i]) + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.week = int(l[i]) + if x.week == 5: + x.week = -1 + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.weekday = (int(l[i]) - 1) % 7 + else: + # year day (zero based) + x.yday = int(l[i]) + 1 + + used_idxs.append(i) + i += 1 + + if i < len_l and l[i] == '/': + used_idxs.append(i) + i += 1 + # start time + len_li = len(l[i]) + if len_li == 4: + # -0300 + x.time = (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60 + used_idxs.append(i) + i += 2 + if i + 1 < len_l and l[i + 1] == ':': + used_idxs.append(i) + i += 2 + x.time += int(l[i]) + elif len_li <= 2: + # -[0]3 + x.time = (int(l[i][:2]) * 3600) + else: + return None + used_idxs.append(i) + i += 1 + + assert i == len_l or l[i] == ',' + + i += 1 + + assert i >= len_l + + except (IndexError, ValueError, AssertionError): + return None + + unused_idxs = set(range(len_l)).difference(used_idxs) + res.any_unused_tokens = not {l[n] for n in unused_idxs}.issubset({",",":"}) + return res + + +DEFAULTTZPARSER = _tzparser() + + +def _parsetz(tzstr): + return DEFAULTTZPARSER.parse(tzstr) + + +class ParserError(ValueError): + """Exception subclass used for any failure to parse a datetime string. + + This is a subclass of :py:exc:`ValueError`, and should be raised any time + earlier versions of ``dateutil`` would have raised ``ValueError``. + + .. versionadded:: 2.8.1 + """ + def __str__(self): + try: + return self.args[0] % self.args[1:] + except (TypeError, IndexError): + return super(ParserError, self).__str__() + + def __repr__(self): + args = ", ".join("'%s'" % arg for arg in self.args) + return "%s(%s)" % (self.__class__.__name__, args) + + +class UnknownTimezoneWarning(RuntimeWarning): + """Raised when the parser finds a timezone it cannot parse into a tzinfo. + + .. versionadded:: 2.7.0 + """ +# vim:ts=4:sw=4:et diff --git a/myenv/lib/python3.12/site-packages/dateutil/parser/isoparser.py b/myenv/lib/python3.12/site-packages/dateutil/parser/isoparser.py new file mode 100644 index 0000000..7060087 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/parser/isoparser.py @@ -0,0 +1,416 @@ +# -*- coding: utf-8 -*- +""" +This module offers a parser for ISO-8601 strings + +It is intended to support all valid date, time and datetime formats per the +ISO-8601 specification. + +..versionadded:: 2.7.0 +""" +from datetime import datetime, timedelta, time, date +import calendar +from dateutil import tz + +from functools import wraps + +import re +import six + +__all__ = ["isoparse", "isoparser"] + + +def _takes_ascii(f): + @wraps(f) + def func(self, str_in, *args, **kwargs): + # If it's a stream, read the whole thing + str_in = getattr(str_in, 'read', lambda: str_in)() + + # If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII + if isinstance(str_in, six.text_type): + # ASCII is the same in UTF-8 + try: + str_in = str_in.encode('ascii') + except UnicodeEncodeError as e: + msg = 'ISO-8601 strings should contain only ASCII characters' + six.raise_from(ValueError(msg), e) + + return f(self, str_in, *args, **kwargs) + + return func + + +class isoparser(object): + def __init__(self, sep=None): + """ + :param sep: + A single character that separates date and time portions. If + ``None``, the parser will accept any single character. + For strict ISO-8601 adherence, pass ``'T'``. + """ + if sep is not None: + if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'): + raise ValueError('Separator must be a single, non-numeric ' + + 'ASCII character') + + sep = sep.encode('ascii') + + self._sep = sep + + @_takes_ascii + def isoparse(self, dt_str): + """ + Parse an ISO-8601 datetime string into a :class:`datetime.datetime`. + + An ISO-8601 datetime string consists of a date portion, followed + optionally by a time portion - the date and time portions are separated + by a single character separator, which is ``T`` in the official + standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be + combined with a time portion. + + Supported date formats are: + + Common: + + - ``YYYY`` + - ``YYYY-MM`` + - ``YYYY-MM-DD`` or ``YYYYMMDD`` + + Uncommon: + + - ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0) + - ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day + + The ISO week and day numbering follows the same logic as + :func:`datetime.date.isocalendar`. + + Supported time formats are: + + - ``hh`` + - ``hh:mm`` or ``hhmm`` + - ``hh:mm:ss`` or ``hhmmss`` + - ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits) + + Midnight is a special case for `hh`, as the standard supports both + 00:00 and 24:00 as a representation. The decimal separator can be + either a dot or a comma. + + + .. caution:: + + Support for fractional components other than seconds is part of the + ISO-8601 standard, but is not currently implemented in this parser. + + Supported time zone offset formats are: + + - `Z` (UTC) + - `±HH:MM` + - `±HHMM` + - `±HH` + + Offsets will be represented as :class:`dateutil.tz.tzoffset` objects, + with the exception of UTC, which will be represented as + :class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such + as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`. + + :param dt_str: + A string or stream containing only an ISO-8601 datetime string + + :return: + Returns a :class:`datetime.datetime` representing the string. + Unspecified components default to their lowest value. + + .. warning:: + + As of version 2.7.0, the strictness of the parser should not be + considered a stable part of the contract. Any valid ISO-8601 string + that parses correctly with the default settings will continue to + parse correctly in future versions, but invalid strings that + currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not + guaranteed to continue failing in future versions if they encode + a valid date. + + .. versionadded:: 2.7.0 + """ + components, pos = self._parse_isodate(dt_str) + + if len(dt_str) > pos: + if self._sep is None or dt_str[pos:pos + 1] == self._sep: + components += self._parse_isotime(dt_str[pos + 1:]) + else: + raise ValueError('String contains unknown ISO components') + + if len(components) > 3 and components[3] == 24: + components[3] = 0 + return datetime(*components) + timedelta(days=1) + + return datetime(*components) + + @_takes_ascii + def parse_isodate(self, datestr): + """ + Parse the date portion of an ISO string. + + :param datestr: + The string portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.date` object + """ + components, pos = self._parse_isodate(datestr) + if pos < len(datestr): + raise ValueError('String contains unknown ISO ' + + 'components: {!r}'.format(datestr.decode('ascii'))) + return date(*components) + + @_takes_ascii + def parse_isotime(self, timestr): + """ + Parse the time portion of an ISO string. + + :param timestr: + The time portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.time` object + """ + components = self._parse_isotime(timestr) + if components[0] == 24: + components[0] = 0 + return time(*components) + + @_takes_ascii + def parse_tzstr(self, tzstr, zero_as_utc=True): + """ + Parse a valid ISO time zone string. + + See :func:`isoparser.isoparse` for details on supported formats. + + :param tzstr: + A string representing an ISO time zone offset + + :param zero_as_utc: + Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones + + :return: + Returns :class:`dateutil.tz.tzoffset` for offsets and + :class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is + specified) offsets equivalent to UTC. + """ + return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc) + + # Constants + _DATE_SEP = b'-' + _TIME_SEP = b':' + _FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)') + + def _parse_isodate(self, dt_str): + try: + return self._parse_isodate_common(dt_str) + except ValueError: + return self._parse_isodate_uncommon(dt_str) + + def _parse_isodate_common(self, dt_str): + len_str = len(dt_str) + components = [1, 1, 1] + + if len_str < 4: + raise ValueError('ISO string too short') + + # Year + components[0] = int(dt_str[0:4]) + pos = 4 + if pos >= len_str: + return components, pos + + has_sep = dt_str[pos:pos + 1] == self._DATE_SEP + if has_sep: + pos += 1 + + # Month + if len_str - pos < 2: + raise ValueError('Invalid common month') + + components[1] = int(dt_str[pos:pos + 2]) + pos += 2 + + if pos >= len_str: + if has_sep: + return components, pos + else: + raise ValueError('Invalid ISO format') + + if has_sep: + if dt_str[pos:pos + 1] != self._DATE_SEP: + raise ValueError('Invalid separator in ISO string') + pos += 1 + + # Day + if len_str - pos < 2: + raise ValueError('Invalid common day') + components[2] = int(dt_str[pos:pos + 2]) + return components, pos + 2 + + def _parse_isodate_uncommon(self, dt_str): + if len(dt_str) < 4: + raise ValueError('ISO string too short') + + # All ISO formats start with the year + year = int(dt_str[0:4]) + + has_sep = dt_str[4:5] == self._DATE_SEP + + pos = 4 + has_sep # Skip '-' if it's there + if dt_str[pos:pos + 1] == b'W': + # YYYY-?Www-?D? + pos += 1 + weekno = int(dt_str[pos:pos + 2]) + pos += 2 + + dayno = 1 + if len(dt_str) > pos: + if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep: + raise ValueError('Inconsistent use of dash separator') + + pos += has_sep + + dayno = int(dt_str[pos:pos + 1]) + pos += 1 + + base_date = self._calculate_weekdate(year, weekno, dayno) + else: + # YYYYDDD or YYYY-DDD + if len(dt_str) - pos < 3: + raise ValueError('Invalid ordinal day') + + ordinal_day = int(dt_str[pos:pos + 3]) + pos += 3 + + if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)): + raise ValueError('Invalid ordinal day' + + ' {} for year {}'.format(ordinal_day, year)) + + base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1) + + components = [base_date.year, base_date.month, base_date.day] + return components, pos + + def _calculate_weekdate(self, year, week, day): + """ + Calculate the day of corresponding to the ISO year-week-day calendar. + + This function is effectively the inverse of + :func:`datetime.date.isocalendar`. + + :param year: + The year in the ISO calendar + + :param week: + The week in the ISO calendar - range is [1, 53] + + :param day: + The day in the ISO calendar - range is [1 (MON), 7 (SUN)] + + :return: + Returns a :class:`datetime.date` + """ + if not 0 < week < 54: + raise ValueError('Invalid week: {}'.format(week)) + + if not 0 < day < 8: # Range is 1-7 + raise ValueError('Invalid weekday: {}'.format(day)) + + # Get week 1 for the specific year: + jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it + week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1) + + # Now add the specific number of weeks and days to get what we want + week_offset = (week - 1) * 7 + (day - 1) + return week_1 + timedelta(days=week_offset) + + def _parse_isotime(self, timestr): + len_str = len(timestr) + components = [0, 0, 0, 0, None] + pos = 0 + comp = -1 + + if len_str < 2: + raise ValueError('ISO time too short') + + has_sep = False + + while pos < len_str and comp < 5: + comp += 1 + + if timestr[pos:pos + 1] in b'-+Zz': + # Detect time zone boundary + components[-1] = self._parse_tzstr(timestr[pos:]) + pos = len_str + break + + if comp == 1 and timestr[pos:pos+1] == self._TIME_SEP: + has_sep = True + pos += 1 + elif comp == 2 and has_sep: + if timestr[pos:pos+1] != self._TIME_SEP: + raise ValueError('Inconsistent use of colon separator') + pos += 1 + + if comp < 3: + # Hour, minute, second + components[comp] = int(timestr[pos:pos + 2]) + pos += 2 + + if comp == 3: + # Fraction of a second + frac = self._FRACTION_REGEX.match(timestr[pos:]) + if not frac: + continue + + us_str = frac.group(1)[:6] # Truncate to microseconds + components[comp] = int(us_str) * 10**(6 - len(us_str)) + pos += len(frac.group()) + + if pos < len_str: + raise ValueError('Unused components in ISO string') + + if components[0] == 24: + # Standard supports 00:00 and 24:00 as representations of midnight + if any(component != 0 for component in components[1:4]): + raise ValueError('Hour may only be 24 at 24:00:00.000') + + return components + + def _parse_tzstr(self, tzstr, zero_as_utc=True): + if tzstr == b'Z' or tzstr == b'z': + return tz.UTC + + if len(tzstr) not in {3, 5, 6}: + raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters') + + if tzstr[0:1] == b'-': + mult = -1 + elif tzstr[0:1] == b'+': + mult = 1 + else: + raise ValueError('Time zone offset requires sign') + + hours = int(tzstr[1:3]) + if len(tzstr) == 3: + minutes = 0 + else: + minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):]) + + if zero_as_utc and hours == 0 and minutes == 0: + return tz.UTC + else: + if minutes > 59: + raise ValueError('Invalid minutes in time zone offset') + + if hours > 23: + raise ValueError('Invalid hours in time zone offset') + + return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60) + + +DEFAULT_ISOPARSER = isoparser() +isoparse = DEFAULT_ISOPARSER.isoparse diff --git a/myenv/lib/python3.12/site-packages/dateutil/relativedelta.py b/myenv/lib/python3.12/site-packages/dateutil/relativedelta.py new file mode 100644 index 0000000..cd323a5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/relativedelta.py @@ -0,0 +1,599 @@ +# -*- coding: utf-8 -*- +import datetime +import calendar + +import operator +from math import copysign + +from six import integer_types +from warnings import warn + +from ._common import weekday + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + +__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + + +class relativedelta(object): + """ + The relativedelta type is designed to be applied to an existing datetime and + can replace specific components of that datetime, or represents an interval + of time. + + It is based on the specification of the excellent work done by M.-A. Lemburg + in his + `mx.DateTime `_ extension. + However, notice that this type does *NOT* implement the same algorithm as + his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. + + There are two different ways to build a relativedelta instance. The + first one is passing it two date/datetime classes:: + + relativedelta(datetime1, datetime2) + + The second one is passing it any number of the following keyword arguments:: + + relativedelta(arg1=x,arg2=y,arg3=z...) + + year, month, day, hour, minute, second, microsecond: + Absolute information (argument is singular); adding or subtracting a + relativedelta with absolute information does not perform an arithmetic + operation, but rather REPLACES the corresponding value in the + original datetime with the value(s) in relativedelta. + + years, months, weeks, days, hours, minutes, seconds, microseconds: + Relative information, may be negative (argument is plural); adding + or subtracting a relativedelta with relative information performs + the corresponding arithmetic operation on the original datetime value + with the information in the relativedelta. + + weekday: + One of the weekday instances (MO, TU, etc) available in the + relativedelta module. These instances may receive a parameter N, + specifying the Nth weekday, which could be positive or negative + (like MO(+1) or MO(-2)). Not specifying it is the same as specifying + +1. You can also use an integer, where 0=MO. This argument is always + relative e.g. if the calculated date is already Monday, using MO(1) + or MO(-1) won't change the day. To effectively make it absolute, use + it in combination with the day argument (e.g. day=1, MO(1) for first + Monday of the month). + + leapdays: + Will add given days to the date found, if year is a leap + year, and the date found is post 28 of february. + + yearday, nlyearday: + Set the yearday or the non-leap year day (jump leap days). + These are converted to day/month/leapdays information. + + There are relative and absolute forms of the keyword + arguments. The plural is relative, and the singular is + absolute. For each argument in the order below, the absolute form + is applied first (by setting each attribute to that value) and + then the relative form (by adding the value to the attribute). + + The order of attributes considered when this relativedelta is + added to a datetime is: + + 1. Year + 2. Month + 3. Day + 4. Hours + 5. Minutes + 6. Seconds + 7. Microseconds + + Finally, weekday is applied, using the rule described above. + + For example + + >>> from datetime import datetime + >>> from dateutil.relativedelta import relativedelta, MO + >>> dt = datetime(2018, 4, 9, 13, 37, 0) + >>> delta = relativedelta(hours=25, day=1, weekday=MO(1)) + >>> dt + delta + datetime.datetime(2018, 4, 2, 14, 37) + + First, the day is set to 1 (the first of the month), then 25 hours + are added, to get to the 2nd day and 14th hour, finally the + weekday is applied, but since the 2nd is already a Monday there is + no effect. + + """ + + def __init__(self, dt1=None, dt2=None, + years=0, months=0, days=0, leapdays=0, weeks=0, + hours=0, minutes=0, seconds=0, microseconds=0, + year=None, month=None, day=None, weekday=None, + yearday=None, nlyearday=None, + hour=None, minute=None, second=None, microsecond=None): + + if dt1 and dt2: + # datetime is a subclass of date. So both must be date + if not (isinstance(dt1, datetime.date) and + isinstance(dt2, datetime.date)): + raise TypeError("relativedelta only diffs datetime/date") + + # We allow two dates, or two datetimes, so we coerce them to be + # of the same type + if (isinstance(dt1, datetime.datetime) != + isinstance(dt2, datetime.datetime)): + if not isinstance(dt1, datetime.datetime): + dt1 = datetime.datetime.fromordinal(dt1.toordinal()) + elif not isinstance(dt2, datetime.datetime): + dt2 = datetime.datetime.fromordinal(dt2.toordinal()) + + self.years = 0 + self.months = 0 + self.days = 0 + self.leapdays = 0 + self.hours = 0 + self.minutes = 0 + self.seconds = 0 + self.microseconds = 0 + self.year = None + self.month = None + self.day = None + self.weekday = None + self.hour = None + self.minute = None + self.second = None + self.microsecond = None + self._has_time = 0 + + # Get year / month delta between the two + months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month) + self._set_months(months) + + # Remove the year/month delta so the timedelta is just well-defined + # time units (seconds, days and microseconds) + dtm = self.__radd__(dt2) + + # If we've overshot our target, make an adjustment + if dt1 < dt2: + compare = operator.gt + increment = 1 + else: + compare = operator.lt + increment = -1 + + while compare(dt1, dtm): + months += increment + self._set_months(months) + dtm = self.__radd__(dt2) + + # Get the timedelta between the "months-adjusted" date and dt1 + delta = dt1 - dtm + self.seconds = delta.seconds + delta.days * 86400 + self.microseconds = delta.microseconds + else: + # Check for non-integer values in integer-only quantities + if any(x is not None and x != int(x) for x in (years, months)): + raise ValueError("Non-integer years and months are " + "ambiguous and not currently supported.") + + # Relative information + self.years = int(years) + self.months = int(months) + self.days = days + weeks * 7 + self.leapdays = leapdays + self.hours = hours + self.minutes = minutes + self.seconds = seconds + self.microseconds = microseconds + + # Absolute information + self.year = year + self.month = month + self.day = day + self.hour = hour + self.minute = minute + self.second = second + self.microsecond = microsecond + + if any(x is not None and int(x) != x + for x in (year, month, day, hour, + minute, second, microsecond)): + # For now we'll deprecate floats - later it'll be an error. + warn("Non-integer value passed as absolute information. " + + "This is not a well-defined condition and will raise " + + "errors in future versions.", DeprecationWarning) + + if isinstance(weekday, integer_types): + self.weekday = weekdays[weekday] + else: + self.weekday = weekday + + yday = 0 + if nlyearday: + yday = nlyearday + elif yearday: + yday = yearday + if yearday > 59: + self.leapdays = -1 + if yday: + ydayidx = [31, 59, 90, 120, 151, 181, 212, + 243, 273, 304, 334, 366] + for idx, ydays in enumerate(ydayidx): + if yday <= ydays: + self.month = idx+1 + if idx == 0: + self.day = yday + else: + self.day = yday-ydayidx[idx-1] + break + else: + raise ValueError("invalid year day (%d)" % yday) + + self._fix() + + def _fix(self): + if abs(self.microseconds) > 999999: + s = _sign(self.microseconds) + div, mod = divmod(self.microseconds * s, 1000000) + self.microseconds = mod * s + self.seconds += div * s + if abs(self.seconds) > 59: + s = _sign(self.seconds) + div, mod = divmod(self.seconds * s, 60) + self.seconds = mod * s + self.minutes += div * s + if abs(self.minutes) > 59: + s = _sign(self.minutes) + div, mod = divmod(self.minutes * s, 60) + self.minutes = mod * s + self.hours += div * s + if abs(self.hours) > 23: + s = _sign(self.hours) + div, mod = divmod(self.hours * s, 24) + self.hours = mod * s + self.days += div * s + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years += div * s + if (self.hours or self.minutes or self.seconds or self.microseconds + or self.hour is not None or self.minute is not None or + self.second is not None or self.microsecond is not None): + self._has_time = 1 + else: + self._has_time = 0 + + @property + def weeks(self): + return int(self.days / 7.0) + + @weeks.setter + def weeks(self, value): + self.days = self.days - (self.weeks * 7) + value * 7 + + def _set_months(self, months): + self.months = months + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years = div * s + else: + self.years = 0 + + def normalized(self): + """ + Return a version of this object represented entirely using integer + values for the relative attributes. + + >>> relativedelta(days=1.5, hours=2).normalized() + relativedelta(days=+1, hours=+14) + + :return: + Returns a :class:`dateutil.relativedelta.relativedelta` object. + """ + # Cascade remainders down (rounding each to roughly nearest microsecond) + days = int(self.days) + + hours_f = round(self.hours + 24 * (self.days - days), 11) + hours = int(hours_f) + + minutes_f = round(self.minutes + 60 * (hours_f - hours), 10) + minutes = int(minutes_f) + + seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8) + seconds = int(seconds_f) + + microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds)) + + # Constructor carries overflow back up with call to _fix() + return self.__class__(years=self.years, months=self.months, + days=days, hours=hours, minutes=minutes, + seconds=seconds, microseconds=microseconds, + leapdays=self.leapdays, year=self.year, + month=self.month, day=self.day, + weekday=self.weekday, hour=self.hour, + minute=self.minute, second=self.second, + microsecond=self.microsecond) + + def __add__(self, other): + if isinstance(other, relativedelta): + return self.__class__(years=other.years + self.years, + months=other.months + self.months, + days=other.days + self.days, + hours=other.hours + self.hours, + minutes=other.minutes + self.minutes, + seconds=other.seconds + self.seconds, + microseconds=(other.microseconds + + self.microseconds), + leapdays=other.leapdays or self.leapdays, + year=(other.year if other.year is not None + else self.year), + month=(other.month if other.month is not None + else self.month), + day=(other.day if other.day is not None + else self.day), + weekday=(other.weekday if other.weekday is not None + else self.weekday), + hour=(other.hour if other.hour is not None + else self.hour), + minute=(other.minute if other.minute is not None + else self.minute), + second=(other.second if other.second is not None + else self.second), + microsecond=(other.microsecond if other.microsecond + is not None else + self.microsecond)) + if isinstance(other, datetime.timedelta): + return self.__class__(years=self.years, + months=self.months, + days=self.days + other.days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds + other.seconds, + microseconds=self.microseconds + other.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + if not isinstance(other, datetime.date): + return NotImplemented + elif self._has_time and not isinstance(other, datetime.datetime): + other = datetime.datetime.fromordinal(other.toordinal()) + year = (self.year or other.year)+self.years + month = self.month or other.month + if self.months: + assert 1 <= abs(self.months) <= 12 + month += self.months + if month > 12: + year += 1 + month -= 12 + elif month < 1: + year -= 1 + month += 12 + day = min(calendar.monthrange(year, month)[1], + self.day or other.day) + repl = {"year": year, "month": month, "day": day} + for attr in ["hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + repl[attr] = value + days = self.days + if self.leapdays and month > 2 and calendar.isleap(year): + days += self.leapdays + ret = (other.replace(**repl) + + datetime.timedelta(days=days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds, + microseconds=self.microseconds)) + if self.weekday: + weekday, nth = self.weekday.weekday, self.weekday.n or 1 + jumpdays = (abs(nth) - 1) * 7 + if nth > 0: + jumpdays += (7 - ret.weekday() + weekday) % 7 + else: + jumpdays += (ret.weekday() - weekday) % 7 + jumpdays *= -1 + ret += datetime.timedelta(days=jumpdays) + return ret + + def __radd__(self, other): + return self.__add__(other) + + def __rsub__(self, other): + return self.__neg__().__radd__(other) + + def __sub__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented # In case the other object defines __rsub__ + return self.__class__(years=self.years - other.years, + months=self.months - other.months, + days=self.days - other.days, + hours=self.hours - other.hours, + minutes=self.minutes - other.minutes, + seconds=self.seconds - other.seconds, + microseconds=self.microseconds - other.microseconds, + leapdays=self.leapdays or other.leapdays, + year=(self.year if self.year is not None + else other.year), + month=(self.month if self.month is not None else + other.month), + day=(self.day if self.day is not None else + other.day), + weekday=(self.weekday if self.weekday is not None else + other.weekday), + hour=(self.hour if self.hour is not None else + other.hour), + minute=(self.minute if self.minute is not None else + other.minute), + second=(self.second if self.second is not None else + other.second), + microsecond=(self.microsecond if self.microsecond + is not None else + other.microsecond)) + + def __abs__(self): + return self.__class__(years=abs(self.years), + months=abs(self.months), + days=abs(self.days), + hours=abs(self.hours), + minutes=abs(self.minutes), + seconds=abs(self.seconds), + microseconds=abs(self.microseconds), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __neg__(self): + return self.__class__(years=-self.years, + months=-self.months, + days=-self.days, + hours=-self.hours, + minutes=-self.minutes, + seconds=-self.seconds, + microseconds=-self.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __bool__(self): + return not (not self.years and + not self.months and + not self.days and + not self.hours and + not self.minutes and + not self.seconds and + not self.microseconds and + not self.leapdays and + self.year is None and + self.month is None and + self.day is None and + self.weekday is None and + self.hour is None and + self.minute is None and + self.second is None and + self.microsecond is None) + # Compatibility with Python 2.x + __nonzero__ = __bool__ + + def __mul__(self, other): + try: + f = float(other) + except TypeError: + return NotImplemented + + return self.__class__(years=int(self.years * f), + months=int(self.months * f), + days=int(self.days * f), + hours=int(self.hours * f), + minutes=int(self.minutes * f), + seconds=int(self.seconds * f), + microseconds=int(self.microseconds * f), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + __rmul__ = __mul__ + + def __eq__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented + if self.weekday or other.weekday: + if not self.weekday or not other.weekday: + return False + if self.weekday.weekday != other.weekday.weekday: + return False + n1, n2 = self.weekday.n, other.weekday.n + if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): + return False + return (self.years == other.years and + self.months == other.months and + self.days == other.days and + self.hours == other.hours and + self.minutes == other.minutes and + self.seconds == other.seconds and + self.microseconds == other.microseconds and + self.leapdays == other.leapdays and + self.year == other.year and + self.month == other.month and + self.day == other.day and + self.hour == other.hour and + self.minute == other.minute and + self.second == other.second and + self.microsecond == other.microsecond) + + def __hash__(self): + return hash(( + self.weekday, + self.years, + self.months, + self.days, + self.hours, + self.minutes, + self.seconds, + self.microseconds, + self.leapdays, + self.year, + self.month, + self.day, + self.hour, + self.minute, + self.second, + self.microsecond, + )) + + def __ne__(self, other): + return not self.__eq__(other) + + def __div__(self, other): + try: + reciprocal = 1 / float(other) + except TypeError: + return NotImplemented + + return self.__mul__(reciprocal) + + __truediv__ = __div__ + + def __repr__(self): + l = [] + for attr in ["years", "months", "days", "leapdays", + "hours", "minutes", "seconds", "microseconds"]: + value = getattr(self, attr) + if value: + l.append("{attr}={value:+g}".format(attr=attr, value=value)) + for attr in ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + l.append("{attr}={value}".format(attr=attr, value=repr(value))) + return "{classname}({attrs})".format(classname=self.__class__.__name__, + attrs=", ".join(l)) + + +def _sign(x): + return int(copysign(1, x)) + +# vim:ts=4:sw=4:et diff --git a/myenv/lib/python3.12/site-packages/dateutil/rrule.py b/myenv/lib/python3.12/site-packages/dateutil/rrule.py new file mode 100644 index 0000000..571a0d2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/rrule.py @@ -0,0 +1,1737 @@ +# -*- coding: utf-8 -*- +""" +The rrule module offers a small, complete, and very fast, implementation of +the recurrence rules documented in the +`iCalendar RFC `_, +including support for caching of results. +""" +import calendar +import datetime +import heapq +import itertools +import re +import sys +from functools import wraps +# For warning about deprecation of until and count +from warnings import warn + +from six import advance_iterator, integer_types + +from six.moves import _thread, range + +from ._common import weekday as weekdaybase + +try: + from math import gcd +except ImportError: + from fractions import gcd + +__all__ = ["rrule", "rruleset", "rrulestr", + "YEARLY", "MONTHLY", "WEEKLY", "DAILY", + "HOURLY", "MINUTELY", "SECONDLY", + "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + +# Every mask is 7 days longer to handle cross-year weekly periods. +M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + + [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) +M365MASK = list(M366MASK) +M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) +MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +MDAY365MASK = list(MDAY366MASK) +M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) +NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +NMDAY365MASK = list(NMDAY366MASK) +M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) +M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) +WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 +del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] +MDAY365MASK = tuple(MDAY365MASK) +M365MASK = tuple(M365MASK) + +FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY'] + +(YEARLY, + MONTHLY, + WEEKLY, + DAILY, + HOURLY, + MINUTELY, + SECONDLY) = list(range(7)) + +# Imported on demand. +easter = None +parser = None + + +class weekday(weekdaybase): + """ + This version of weekday does not allow n = 0. + """ + def __init__(self, wkday, n=None): + if n == 0: + raise ValueError("Can't create weekday with n==0") + + super(weekday, self).__init__(wkday, n) + + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + + +def _invalidates_cache(f): + """ + Decorator for rruleset methods which may invalidate the + cached length. + """ + @wraps(f) + def inner_func(self, *args, **kwargs): + rv = f(self, *args, **kwargs) + self._invalidate_cache() + return rv + + return inner_func + + +class rrulebase(object): + def __init__(self, cache=False): + if cache: + self._cache = [] + self._cache_lock = _thread.allocate_lock() + self._invalidate_cache() + else: + self._cache = None + self._cache_complete = False + self._len = None + + def __iter__(self): + if self._cache_complete: + return iter(self._cache) + elif self._cache is None: + return self._iter() + else: + return self._iter_cached() + + def _invalidate_cache(self): + if self._cache is not None: + self._cache = [] + self._cache_complete = False + self._cache_gen = self._iter() + + if self._cache_lock.locked(): + self._cache_lock.release() + + self._len = None + + def _iter_cached(self): + i = 0 + gen = self._cache_gen + cache = self._cache + acquire = self._cache_lock.acquire + release = self._cache_lock.release + while gen: + if i == len(cache): + acquire() + if self._cache_complete: + break + try: + for j in range(10): + cache.append(advance_iterator(gen)) + except StopIteration: + self._cache_gen = gen = None + self._cache_complete = True + break + release() + yield cache[i] + i += 1 + while i < self._len: + yield cache[i] + i += 1 + + def __getitem__(self, item): + if self._cache_complete: + return self._cache[item] + elif isinstance(item, slice): + if item.step and item.step < 0: + return list(iter(self))[item] + else: + return list(itertools.islice(self, + item.start or 0, + item.stop or sys.maxsize, + item.step or 1)) + elif item >= 0: + gen = iter(self) + try: + for i in range(item+1): + res = advance_iterator(gen) + except StopIteration: + raise IndexError + return res + else: + return list(iter(self))[item] + + def __contains__(self, item): + if self._cache_complete: + return item in self._cache + else: + for i in self: + if i == item: + return True + elif i > item: + return False + return False + + # __len__() introduces a large performance penalty. + def count(self): + """ Returns the number of recurrences in this set. It will have go + through the whole recurrence, if this hasn't been done before. """ + if self._len is None: + for x in self: + pass + return self._len + + def before(self, dt, inc=False): + """ Returns the last recurrence before the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + last = None + if inc: + for i in gen: + if i > dt: + break + last = i + else: + for i in gen: + if i >= dt: + break + last = i + return last + + def after(self, dt, inc=False): + """ Returns the first recurrence after the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + if inc: + for i in gen: + if i >= dt: + return i + else: + for i in gen: + if i > dt: + return i + return None + + def xafter(self, dt, count=None, inc=False): + """ + Generator which yields up to `count` recurrences after the given + datetime instance, equivalent to `after`. + + :param dt: + The datetime at which to start generating recurrences. + + :param count: + The maximum number of recurrences to generate. If `None` (default), + dates are generated until the recurrence rule is exhausted. + + :param inc: + If `dt` is an instance of the rule and `inc` is `True`, it is + included in the output. + + :yields: Yields a sequence of `datetime` objects. + """ + + if self._cache_complete: + gen = self._cache + else: + gen = self + + # Select the comparison function + if inc: + comp = lambda dc, dtc: dc >= dtc + else: + comp = lambda dc, dtc: dc > dtc + + # Generate dates + n = 0 + for d in gen: + if comp(d, dt): + if count is not None: + n += 1 + if n > count: + break + + yield d + + def between(self, after, before, inc=False, count=1): + """ Returns all the occurrences of the rrule between after and before. + The inc keyword defines what happens if after and/or before are + themselves occurrences. With inc=True, they will be included in the + list, if they are found in the recurrence set. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + started = False + l = [] + if inc: + for i in gen: + if i > before: + break + elif not started: + if i >= after: + started = True + l.append(i) + else: + l.append(i) + else: + for i in gen: + if i >= before: + break + elif not started: + if i > after: + started = True + l.append(i) + else: + l.append(i) + return l + + +class rrule(rrulebase): + """ + That's the base of the rrule operation. It accepts all the keywords + defined in the RFC as its constructor parameters (except byday, + which was renamed to byweekday) and more. The constructor prototype is:: + + rrule(freq) + + Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, + or SECONDLY. + + .. note:: + Per RFC section 3.3.10, recurrence instances falling on invalid dates + and times are ignored rather than coerced: + + Recurrence rules may generate recurrence instances with an invalid + date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM + on a day where the local time is moved forward by an hour at 1:00 + AM). Such recurrence instances MUST be ignored and MUST NOT be + counted as part of the recurrence set. + + This can lead to possibly surprising behavior when, for example, the + start date occurs at the end of the month: + + >>> from dateutil.rrule import rrule, MONTHLY + >>> from datetime import datetime + >>> start_date = datetime(2014, 12, 31) + >>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date)) + ... # doctest: +NORMALIZE_WHITESPACE + [datetime.datetime(2014, 12, 31, 0, 0), + datetime.datetime(2015, 1, 31, 0, 0), + datetime.datetime(2015, 3, 31, 0, 0), + datetime.datetime(2015, 5, 31, 0, 0)] + + Additionally, it supports the following keyword arguments: + + :param dtstart: + The recurrence start. Besides being the base for the recurrence, + missing parameters in the final recurrence instances will also be + extracted from this date. If not given, datetime.now() will be used + instead. + :param interval: + The interval between each freq iteration. For example, when using + YEARLY, an interval of 2 means once every two years, but with HOURLY, + it means once every two hours. The default interval is 1. + :param wkst: + The week start day. Must be one of the MO, TU, WE constants, or an + integer, specifying the first day of the week. This will affect + recurrences based on weekly periods. The default week start is got + from calendar.firstweekday(), and may be modified by + calendar.setfirstweekday(). + :param count: + If given, this determines how many occurrences will be generated. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param until: + If given, this must be a datetime instance specifying the upper-bound + limit of the recurrence. The last recurrence in the rule is the greatest + datetime that is less than or equal to the value specified in the + ``until`` parameter. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param bysetpos: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each given integer will specify an occurrence + number, corresponding to the nth occurrence of the rule inside the + frequency period. For example, a bysetpos of -1 if combined with a + MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will + result in the last work day of every month. + :param bymonth: + If given, it must be either an integer, or a sequence of integers, + meaning the months to apply the recurrence to. + :param bymonthday: + If given, it must be either an integer, or a sequence of integers, + meaning the month days to apply the recurrence to. + :param byyearday: + If given, it must be either an integer, or a sequence of integers, + meaning the year days to apply the recurrence to. + :param byeaster: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each integer will define an offset from the + Easter Sunday. Passing the offset 0 to byeaster will yield the Easter + Sunday itself. This is an extension to the RFC specification. + :param byweekno: + If given, it must be either an integer, or a sequence of integers, + meaning the week numbers to apply the recurrence to. Week numbers + have the meaning described in ISO8601, that is, the first week of + the year is that containing at least four days of the new year. + :param byweekday: + If given, it must be either an integer (0 == MO), a sequence of + integers, one of the weekday constants (MO, TU, etc), or a sequence + of these constants. When given, these variables will define the + weekdays where the recurrence will be applied. It's also possible to + use an argument n for the weekday instances, which will mean the nth + occurrence of this weekday in the period. For example, with MONTHLY, + or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the + first friday of the month where the recurrence happens. Notice that in + the RFC documentation, this is specified as BYDAY, but was renamed to + avoid the ambiguity of that keyword. + :param byhour: + If given, it must be either an integer, or a sequence of integers, + meaning the hours to apply the recurrence to. + :param byminute: + If given, it must be either an integer, or a sequence of integers, + meaning the minutes to apply the recurrence to. + :param bysecond: + If given, it must be either an integer, or a sequence of integers, + meaning the seconds to apply the recurrence to. + :param cache: + If given, it must be a boolean value specifying to enable or disable + caching of results. If you will use the same rrule instance multiple + times, enabling caching will improve the performance considerably. + """ + def __init__(self, freq, dtstart=None, + interval=1, wkst=None, count=None, until=None, bysetpos=None, + bymonth=None, bymonthday=None, byyearday=None, byeaster=None, + byweekno=None, byweekday=None, + byhour=None, byminute=None, bysecond=None, + cache=False): + super(rrule, self).__init__(cache) + global easter + if not dtstart: + if until and until.tzinfo: + dtstart = datetime.datetime.now(tz=until.tzinfo).replace(microsecond=0) + else: + dtstart = datetime.datetime.now().replace(microsecond=0) + elif not isinstance(dtstart, datetime.datetime): + dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) + else: + dtstart = dtstart.replace(microsecond=0) + self._dtstart = dtstart + self._tzinfo = dtstart.tzinfo + self._freq = freq + self._interval = interval + self._count = count + + # Cache the original byxxx rules, if they are provided, as the _byxxx + # attributes do not necessarily map to the inputs, and this can be + # a problem in generating the strings. Only store things if they've + # been supplied (the string retrieval will just use .get()) + self._original_rule = {} + + if until and not isinstance(until, datetime.datetime): + until = datetime.datetime.fromordinal(until.toordinal()) + self._until = until + + if self._dtstart and self._until: + if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None): + # According to RFC5545 Section 3.3.10: + # https://tools.ietf.org/html/rfc5545#section-3.3.10 + # + # > If the "DTSTART" property is specified as a date with UTC + # > time or a date with local time and time zone reference, + # > then the UNTIL rule part MUST be specified as a date with + # > UTC time. + raise ValueError( + 'RRULE UNTIL values must be specified in UTC when DTSTART ' + 'is timezone-aware' + ) + + if count is not None and until: + warn("Using both 'count' and 'until' is inconsistent with RFC 5545" + " and has been deprecated in dateutil. Future versions will " + "raise an error.", DeprecationWarning) + + if wkst is None: + self._wkst = calendar.firstweekday() + elif isinstance(wkst, integer_types): + self._wkst = wkst + else: + self._wkst = wkst.weekday + + if bysetpos is None: + self._bysetpos = None + elif isinstance(bysetpos, integer_types): + if bysetpos == 0 or not (-366 <= bysetpos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + self._bysetpos = (bysetpos,) + else: + self._bysetpos = tuple(bysetpos) + for pos in self._bysetpos: + if pos == 0 or not (-366 <= pos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + + if self._bysetpos: + self._original_rule['bysetpos'] = self._bysetpos + + if (byweekno is None and byyearday is None and bymonthday is None and + byweekday is None and byeaster is None): + if freq == YEARLY: + if bymonth is None: + bymonth = dtstart.month + self._original_rule['bymonth'] = None + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == MONTHLY: + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == WEEKLY: + byweekday = dtstart.weekday() + self._original_rule['byweekday'] = None + + # bymonth + if bymonth is None: + self._bymonth = None + else: + if isinstance(bymonth, integer_types): + bymonth = (bymonth,) + + self._bymonth = tuple(sorted(set(bymonth))) + + if 'bymonth' not in self._original_rule: + self._original_rule['bymonth'] = self._bymonth + + # byyearday + if byyearday is None: + self._byyearday = None + else: + if isinstance(byyearday, integer_types): + byyearday = (byyearday,) + + self._byyearday = tuple(sorted(set(byyearday))) + self._original_rule['byyearday'] = self._byyearday + + # byeaster + if byeaster is not None: + if not easter: + from dateutil import easter + if isinstance(byeaster, integer_types): + self._byeaster = (byeaster,) + else: + self._byeaster = tuple(sorted(byeaster)) + + self._original_rule['byeaster'] = self._byeaster + else: + self._byeaster = None + + # bymonthday + if bymonthday is None: + self._bymonthday = () + self._bynmonthday = () + else: + if isinstance(bymonthday, integer_types): + bymonthday = (bymonthday,) + + bymonthday = set(bymonthday) # Ensure it's unique + + self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0)) + self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0)) + + # Storing positive numbers first, then negative numbers + if 'bymonthday' not in self._original_rule: + self._original_rule['bymonthday'] = tuple( + itertools.chain(self._bymonthday, self._bynmonthday)) + + # byweekno + if byweekno is None: + self._byweekno = None + else: + if isinstance(byweekno, integer_types): + byweekno = (byweekno,) + + self._byweekno = tuple(sorted(set(byweekno))) + + self._original_rule['byweekno'] = self._byweekno + + # byweekday / bynweekday + if byweekday is None: + self._byweekday = None + self._bynweekday = None + else: + # If it's one of the valid non-sequence types, convert to a + # single-element sequence before the iterator that builds the + # byweekday set. + if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"): + byweekday = (byweekday,) + + self._byweekday = set() + self._bynweekday = set() + for wday in byweekday: + if isinstance(wday, integer_types): + self._byweekday.add(wday) + elif not wday.n or freq > MONTHLY: + self._byweekday.add(wday.weekday) + else: + self._bynweekday.add((wday.weekday, wday.n)) + + if not self._byweekday: + self._byweekday = None + elif not self._bynweekday: + self._bynweekday = None + + if self._byweekday is not None: + self._byweekday = tuple(sorted(self._byweekday)) + orig_byweekday = [weekday(x) for x in self._byweekday] + else: + orig_byweekday = () + + if self._bynweekday is not None: + self._bynweekday = tuple(sorted(self._bynweekday)) + orig_bynweekday = [weekday(*x) for x in self._bynweekday] + else: + orig_bynweekday = () + + if 'byweekday' not in self._original_rule: + self._original_rule['byweekday'] = tuple(itertools.chain( + orig_byweekday, orig_bynweekday)) + + # byhour + if byhour is None: + if freq < HOURLY: + self._byhour = {dtstart.hour} + else: + self._byhour = None + else: + if isinstance(byhour, integer_types): + byhour = (byhour,) + + if freq == HOURLY: + self._byhour = self.__construct_byset(start=dtstart.hour, + byxxx=byhour, + base=24) + else: + self._byhour = set(byhour) + + self._byhour = tuple(sorted(self._byhour)) + self._original_rule['byhour'] = self._byhour + + # byminute + if byminute is None: + if freq < MINUTELY: + self._byminute = {dtstart.minute} + else: + self._byminute = None + else: + if isinstance(byminute, integer_types): + byminute = (byminute,) + + if freq == MINUTELY: + self._byminute = self.__construct_byset(start=dtstart.minute, + byxxx=byminute, + base=60) + else: + self._byminute = set(byminute) + + self._byminute = tuple(sorted(self._byminute)) + self._original_rule['byminute'] = self._byminute + + # bysecond + if bysecond is None: + if freq < SECONDLY: + self._bysecond = ((dtstart.second,)) + else: + self._bysecond = None + else: + if isinstance(bysecond, integer_types): + bysecond = (bysecond,) + + self._bysecond = set(bysecond) + + if freq == SECONDLY: + self._bysecond = self.__construct_byset(start=dtstart.second, + byxxx=bysecond, + base=60) + else: + self._bysecond = set(bysecond) + + self._bysecond = tuple(sorted(self._bysecond)) + self._original_rule['bysecond'] = self._bysecond + + if self._freq >= HOURLY: + self._timeset = None + else: + self._timeset = [] + for hour in self._byhour: + for minute in self._byminute: + for second in self._bysecond: + self._timeset.append( + datetime.time(hour, minute, second, + tzinfo=self._tzinfo)) + self._timeset.sort() + self._timeset = tuple(self._timeset) + + def __str__(self): + """ + Output a string that would generate this RRULE if passed to rrulestr. + This is mostly compatible with RFC5545, except for the + dateutil-specific extension BYEASTER. + """ + + output = [] + h, m, s = [None] * 3 + if self._dtstart: + output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S')) + h, m, s = self._dtstart.timetuple()[3:6] + + parts = ['FREQ=' + FREQNAMES[self._freq]] + if self._interval != 1: + parts.append('INTERVAL=' + str(self._interval)) + + if self._wkst: + parts.append('WKST=' + repr(weekday(self._wkst))[0:2]) + + if self._count is not None: + parts.append('COUNT=' + str(self._count)) + + if self._until: + parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S')) + + if self._original_rule.get('byweekday') is not None: + # The str() method on weekday objects doesn't generate + # RFC5545-compliant strings, so we should modify that. + original_rule = dict(self._original_rule) + wday_strings = [] + for wday in original_rule['byweekday']: + if wday.n: + wday_strings.append('{n:+d}{wday}'.format( + n=wday.n, + wday=repr(wday)[0:2])) + else: + wday_strings.append(repr(wday)) + + original_rule['byweekday'] = wday_strings + else: + original_rule = self._original_rule + + partfmt = '{name}={vals}' + for name, key in [('BYSETPOS', 'bysetpos'), + ('BYMONTH', 'bymonth'), + ('BYMONTHDAY', 'bymonthday'), + ('BYYEARDAY', 'byyearday'), + ('BYWEEKNO', 'byweekno'), + ('BYDAY', 'byweekday'), + ('BYHOUR', 'byhour'), + ('BYMINUTE', 'byminute'), + ('BYSECOND', 'bysecond'), + ('BYEASTER', 'byeaster')]: + value = original_rule.get(key) + if value: + parts.append(partfmt.format(name=name, vals=(','.join(str(v) + for v in value)))) + + output.append('RRULE:' + ';'.join(parts)) + return '\n'.join(output) + + def replace(self, **kwargs): + """Return new rrule with same attributes except for those attributes given new + values by whichever keyword arguments are specified.""" + new_kwargs = {"interval": self._interval, + "count": self._count, + "dtstart": self._dtstart, + "freq": self._freq, + "until": self._until, + "wkst": self._wkst, + "cache": False if self._cache is None else True } + new_kwargs.update(self._original_rule) + new_kwargs.update(kwargs) + return rrule(**new_kwargs) + + def _iter(self): + year, month, day, hour, minute, second, weekday, yearday, _ = \ + self._dtstart.timetuple() + + # Some local variables to speed things up a bit + freq = self._freq + interval = self._interval + wkst = self._wkst + until = self._until + bymonth = self._bymonth + byweekno = self._byweekno + byyearday = self._byyearday + byweekday = self._byweekday + byeaster = self._byeaster + bymonthday = self._bymonthday + bynmonthday = self._bynmonthday + bysetpos = self._bysetpos + byhour = self._byhour + byminute = self._byminute + bysecond = self._bysecond + + ii = _iterinfo(self) + ii.rebuild(year, month) + + getdayset = {YEARLY: ii.ydayset, + MONTHLY: ii.mdayset, + WEEKLY: ii.wdayset, + DAILY: ii.ddayset, + HOURLY: ii.ddayset, + MINUTELY: ii.ddayset, + SECONDLY: ii.ddayset}[freq] + + if freq < HOURLY: + timeset = self._timeset + else: + gettimeset = {HOURLY: ii.htimeset, + MINUTELY: ii.mtimeset, + SECONDLY: ii.stimeset}[freq] + if ((freq >= HOURLY and + self._byhour and hour not in self._byhour) or + (freq >= MINUTELY and + self._byminute and minute not in self._byminute) or + (freq >= SECONDLY and + self._bysecond and second not in self._bysecond)): + timeset = () + else: + timeset = gettimeset(hour, minute, second) + + total = 0 + count = self._count + while True: + # Get dayset with the right frequency + dayset, start, end = getdayset(year, month, day) + + # Do the "hard" work ;-) + filtered = False + for i in dayset[start:end]: + if ((bymonth and ii.mmask[i] not in bymonth) or + (byweekno and not ii.wnomask[i]) or + (byweekday and ii.wdaymask[i] not in byweekday) or + (ii.nwdaymask and not ii.nwdaymask[i]) or + (byeaster and not ii.eastermask[i]) or + ((bymonthday or bynmonthday) and + ii.mdaymask[i] not in bymonthday and + ii.nmdaymask[i] not in bynmonthday) or + (byyearday and + ((i < ii.yearlen and i+1 not in byyearday and + -ii.yearlen+i not in byyearday) or + (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and + -ii.nextyearlen+i-ii.yearlen not in byyearday)))): + dayset[i] = None + filtered = True + + # Output results + if bysetpos and timeset: + poslist = [] + for pos in bysetpos: + if pos < 0: + daypos, timepos = divmod(pos, len(timeset)) + else: + daypos, timepos = divmod(pos-1, len(timeset)) + try: + i = [x for x in dayset[start:end] + if x is not None][daypos] + time = timeset[timepos] + except IndexError: + pass + else: + date = datetime.date.fromordinal(ii.yearordinal+i) + res = datetime.datetime.combine(date, time) + if res not in poslist: + poslist.append(res) + poslist.sort() + for res in poslist: + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + total += 1 + yield res + else: + for i in dayset[start:end]: + if i is not None: + date = datetime.date.fromordinal(ii.yearordinal + i) + for time in timeset: + res = datetime.datetime.combine(date, time) + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + + total += 1 + yield res + + # Handle frequency and interval + fixday = False + if freq == YEARLY: + year += interval + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == MONTHLY: + month += interval + if month > 12: + div, mod = divmod(month, 12) + month = mod + year += div + if month == 0: + month = 12 + year -= 1 + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == WEEKLY: + if wkst > weekday: + day += -(weekday+1+(6-wkst))+self._interval*7 + else: + day += -(weekday-wkst)+self._interval*7 + weekday = wkst + fixday = True + elif freq == DAILY: + day += interval + fixday = True + elif freq == HOURLY: + if filtered: + # Jump to one iteration before next day + hour += ((23-hour)//interval)*interval + + if byhour: + ndays, hour = self.__mod_distance(value=hour, + byxxx=self._byhour, + base=24) + else: + ndays, hour = divmod(hour+interval, 24) + + if ndays: + day += ndays + fixday = True + + timeset = gettimeset(hour, minute, second) + elif freq == MINUTELY: + if filtered: + # Jump to one iteration before next day + minute += ((1439-(hour*60+minute))//interval)*interval + + valid = False + rep_rate = (24*60) + for j in range(rep_rate // gcd(interval, rep_rate)): + if byminute: + nhours, minute = \ + self.__mod_distance(value=minute, + byxxx=self._byminute, + base=60) + else: + nhours, minute = divmod(minute+interval, 60) + + div, hour = divmod(hour+nhours, 24) + if div: + day += div + fixday = True + filtered = False + + if not byhour or hour in byhour: + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval and ' + + 'byhour resulting in empty rule.') + + timeset = gettimeset(hour, minute, second) + elif freq == SECONDLY: + if filtered: + # Jump to one iteration before next day + second += (((86399 - (hour * 3600 + minute * 60 + second)) + // interval) * interval) + + rep_rate = (24 * 3600) + valid = False + for j in range(0, rep_rate // gcd(interval, rep_rate)): + if bysecond: + nminutes, second = \ + self.__mod_distance(value=second, + byxxx=self._bysecond, + base=60) + else: + nminutes, second = divmod(second+interval, 60) + + div, minute = divmod(minute+nminutes, 60) + if div: + hour += div + div, hour = divmod(hour, 24) + if div: + day += div + fixday = True + + if ((not byhour or hour in byhour) and + (not byminute or minute in byminute) and + (not bysecond or second in bysecond)): + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval, ' + + 'byhour and byminute resulting in empty' + + ' rule.') + + timeset = gettimeset(hour, minute, second) + + if fixday and day > 28: + daysinmonth = calendar.monthrange(year, month)[1] + if day > daysinmonth: + while day > daysinmonth: + day -= daysinmonth + month += 1 + if month == 13: + month = 1 + year += 1 + if year > datetime.MAXYEAR: + self._len = total + return + daysinmonth = calendar.monthrange(year, month)[1] + ii.rebuild(year, month) + + def __construct_byset(self, start, byxxx, base): + """ + If a `BYXXX` sequence is passed to the constructor at the same level as + `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some + specifications which cannot be reached given some starting conditions. + + This occurs whenever the interval is not coprime with the base of a + given unit and the difference between the starting position and the + ending position is not coprime with the greatest common denominator + between the interval and the base. For example, with a FREQ of hourly + starting at 17:00 and an interval of 4, the only valid values for + BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not + coprime. + + :param start: + Specifies the starting position. + :param byxxx: + An iterable containing the list of allowed values. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + This does not preserve the type of the iterable, returning a set, since + the values should be unique and the order is irrelevant, this will + speed up later lookups. + + In the event of an empty set, raises a :exception:`ValueError`, as this + results in an empty rrule. + """ + + cset = set() + + # Support a single byxxx value. + if isinstance(byxxx, integer_types): + byxxx = (byxxx, ) + + for num in byxxx: + i_gcd = gcd(self._interval, base) + # Use divmod rather than % because we need to wrap negative nums. + if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: + cset.add(num) + + if len(cset) == 0: + raise ValueError("Invalid rrule byxxx generates an empty set.") + + return cset + + def __mod_distance(self, value, byxxx, base): + """ + Calculates the next value in a sequence where the `FREQ` parameter is + specified along with a `BYXXX` parameter at the same "level" + (e.g. `HOURLY` specified with `BYHOUR`). + + :param value: + The old value of the component. + :param byxxx: + The `BYXXX` set, which should have been generated by + `rrule._construct_byset`, or something else which checks that a + valid rule is present. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + If a valid value is not found after `base` iterations (the maximum + number before the sequence would start to repeat), this raises a + :exception:`ValueError`, as no valid values were found. + + This returns a tuple of `divmod(n*interval, base)`, where `n` is the + smallest number of `interval` repetitions until the next specified + value in `byxxx` is found. + """ + accumulator = 0 + for ii in range(1, base + 1): + # Using divmod() over % to account for negative intervals + div, value = divmod(value + self._interval, base) + accumulator += div + if value in byxxx: + return (accumulator, value) + + +class _iterinfo(object): + __slots__ = ["rrule", "lastyear", "lastmonth", + "yearlen", "nextyearlen", "yearordinal", "yearweekday", + "mmask", "mrange", "mdaymask", "nmdaymask", + "wdaymask", "wnomask", "nwdaymask", "eastermask"] + + def __init__(self, rrule): + for attr in self.__slots__: + setattr(self, attr, None) + self.rrule = rrule + + def rebuild(self, year, month): + # Every mask is 7 days longer to handle cross-year weekly periods. + rr = self.rrule + if year != self.lastyear: + self.yearlen = 365 + calendar.isleap(year) + self.nextyearlen = 365 + calendar.isleap(year + 1) + firstyday = datetime.date(year, 1, 1) + self.yearordinal = firstyday.toordinal() + self.yearweekday = firstyday.weekday() + + wday = datetime.date(year, 1, 1).weekday() + if self.yearlen == 365: + self.mmask = M365MASK + self.mdaymask = MDAY365MASK + self.nmdaymask = NMDAY365MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M365RANGE + else: + self.mmask = M366MASK + self.mdaymask = MDAY366MASK + self.nmdaymask = NMDAY366MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M366RANGE + + if not rr._byweekno: + self.wnomask = None + else: + self.wnomask = [0]*(self.yearlen+7) + # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) + no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 + if no1wkst >= 4: + no1wkst = 0 + # Number of days in the year, plus the days we got + # from last year. + wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 + else: + # Number of days in the year, minus the days we + # left in last year. + wyearlen = self.yearlen-no1wkst + div, mod = divmod(wyearlen, 7) + numweeks = div+mod//4 + for n in rr._byweekno: + if n < 0: + n += numweeks+1 + if not (0 < n <= numweeks): + continue + if n > 1: + i = no1wkst+(n-1)*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + else: + i = no1wkst + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if 1 in rr._byweekno: + # Check week number 1 of next year as well + # TODO: Check -numweeks for next year. + i = no1wkst+numweeks*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + if i < self.yearlen: + # If week starts in next year, we + # don't care about it. + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if no1wkst: + # Check last week number of last year as + # well. If no1wkst is 0, either the year + # started on week start, or week number 1 + # got days from last year, so there are no + # days from last year's last week number in + # this year. + if -1 not in rr._byweekno: + lyearweekday = datetime.date(year-1, 1, 1).weekday() + lno1wkst = (7-lyearweekday+rr._wkst) % 7 + lyearlen = 365+calendar.isleap(year-1) + if lno1wkst >= 4: + lno1wkst = 0 + lnumweeks = 52+(lyearlen + + (lyearweekday-rr._wkst) % 7) % 7//4 + else: + lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 + else: + lnumweeks = -1 + if lnumweeks in rr._byweekno: + for i in range(no1wkst): + self.wnomask[i] = 1 + + if (rr._bynweekday and (month != self.lastmonth or + year != self.lastyear)): + ranges = [] + if rr._freq == YEARLY: + if rr._bymonth: + for month in rr._bymonth: + ranges.append(self.mrange[month-1:month+1]) + else: + ranges = [(0, self.yearlen)] + elif rr._freq == MONTHLY: + ranges = [self.mrange[month-1:month+1]] + if ranges: + # Weekly frequency won't get here, so we may not + # care about cross-year weekly periods. + self.nwdaymask = [0]*self.yearlen + for first, last in ranges: + last -= 1 + for wday, n in rr._bynweekday: + if n < 0: + i = last+(n+1)*7 + i -= (self.wdaymask[i]-wday) % 7 + else: + i = first+(n-1)*7 + i += (7-self.wdaymask[i]+wday) % 7 + if first <= i <= last: + self.nwdaymask[i] = 1 + + if rr._byeaster: + self.eastermask = [0]*(self.yearlen+7) + eyday = easter.easter(year).toordinal()-self.yearordinal + for offset in rr._byeaster: + self.eastermask[eyday+offset] = 1 + + self.lastyear = year + self.lastmonth = month + + def ydayset(self, year, month, day): + return list(range(self.yearlen)), 0, self.yearlen + + def mdayset(self, year, month, day): + dset = [None]*self.yearlen + start, end = self.mrange[month-1:month+1] + for i in range(start, end): + dset[i] = i + return dset, start, end + + def wdayset(self, year, month, day): + # We need to handle cross-year weeks here. + dset = [None]*(self.yearlen+7) + i = datetime.date(year, month, day).toordinal()-self.yearordinal + start = i + for j in range(7): + dset[i] = i + i += 1 + # if (not (0 <= i < self.yearlen) or + # self.wdaymask[i] == self.rrule._wkst): + # This will cross the year boundary, if necessary. + if self.wdaymask[i] == self.rrule._wkst: + break + return dset, start, i + + def ddayset(self, year, month, day): + dset = [None] * self.yearlen + i = datetime.date(year, month, day).toordinal() - self.yearordinal + dset[i] = i + return dset, i, i + 1 + + def htimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for minute in rr._byminute: + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, + tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def mtimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def stimeset(self, hour, minute, second): + return (datetime.time(hour, minute, second, + tzinfo=self.rrule._tzinfo),) + + +class rruleset(rrulebase): + """ The rruleset type allows more complex recurrence setups, mixing + multiple rules, dates, exclusion rules, and exclusion dates. The type + constructor takes the following keyword arguments: + + :param cache: If True, caching of results will be enabled, improving + performance of multiple queries considerably. """ + + class _genitem(object): + def __init__(self, genlist, gen): + try: + self.dt = advance_iterator(gen) + genlist.append(self) + except StopIteration: + pass + self.genlist = genlist + self.gen = gen + + def __next__(self): + try: + self.dt = advance_iterator(self.gen) + except StopIteration: + if self.genlist[0] is self: + heapq.heappop(self.genlist) + else: + self.genlist.remove(self) + heapq.heapify(self.genlist) + + next = __next__ + + def __lt__(self, other): + return self.dt < other.dt + + def __gt__(self, other): + return self.dt > other.dt + + def __eq__(self, other): + return self.dt == other.dt + + def __ne__(self, other): + return self.dt != other.dt + + def __init__(self, cache=False): + super(rruleset, self).__init__(cache) + self._rrule = [] + self._rdate = [] + self._exrule = [] + self._exdate = [] + + @_invalidates_cache + def rrule(self, rrule): + """ Include the given :py:class:`rrule` instance in the recurrence set + generation. """ + self._rrule.append(rrule) + + @_invalidates_cache + def rdate(self, rdate): + """ Include the given :py:class:`datetime` instance in the recurrence + set generation. """ + self._rdate.append(rdate) + + @_invalidates_cache + def exrule(self, exrule): + """ Include the given rrule instance in the recurrence set exclusion + list. Dates which are part of the given recurrence rules will not + be generated, even if some inclusive rrule or rdate matches them. + """ + self._exrule.append(exrule) + + @_invalidates_cache + def exdate(self, exdate): + """ Include the given datetime instance in the recurrence set + exclusion list. Dates included that way will not be generated, + even if some inclusive rrule or rdate matches them. """ + self._exdate.append(exdate) + + def _iter(self): + rlist = [] + self._rdate.sort() + self._genitem(rlist, iter(self._rdate)) + for gen in [iter(x) for x in self._rrule]: + self._genitem(rlist, gen) + exlist = [] + self._exdate.sort() + self._genitem(exlist, iter(self._exdate)) + for gen in [iter(x) for x in self._exrule]: + self._genitem(exlist, gen) + lastdt = None + total = 0 + heapq.heapify(rlist) + heapq.heapify(exlist) + while rlist: + ritem = rlist[0] + if not lastdt or lastdt != ritem.dt: + while exlist and exlist[0] < ritem: + exitem = exlist[0] + advance_iterator(exitem) + if exlist and exlist[0] is exitem: + heapq.heapreplace(exlist, exitem) + if not exlist or ritem != exlist[0]: + total += 1 + yield ritem.dt + lastdt = ritem.dt + advance_iterator(ritem) + if rlist and rlist[0] is ritem: + heapq.heapreplace(rlist, ritem) + self._len = total + + + + +class _rrulestr(object): + """ Parses a string representation of a recurrence rule or set of + recurrence rules. + + :param s: + Required, a string defining one or more recurrence rules. + + :param dtstart: + If given, used as the default recurrence start if not specified in the + rule string. + + :param cache: + If set ``True`` caching of results will be enabled, improving + performance of multiple queries considerably. + + :param unfold: + If set ``True`` indicates that a rule string is split over more + than one line and should be joined before processing. + + :param forceset: + If set ``True`` forces a :class:`dateutil.rrule.rruleset` to + be returned. + + :param compatible: + If set ``True`` forces ``unfold`` and ``forceset`` to be ``True``. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime.datetime` object is returned. + + :param tzids: + If given, a callable or mapping used to retrieve a + :class:`datetime.tzinfo` from a string representation. + Defaults to :func:`dateutil.tz.gettz`. + + :param tzinfos: + Additional time zone names / aliases which may be present in a string + representation. See :func:`dateutil.parser.parse` for more + information. + + :return: + Returns a :class:`dateutil.rrule.rruleset` or + :class:`dateutil.rrule.rrule` + """ + + _freq_map = {"YEARLY": YEARLY, + "MONTHLY": MONTHLY, + "WEEKLY": WEEKLY, + "DAILY": DAILY, + "HOURLY": HOURLY, + "MINUTELY": MINUTELY, + "SECONDLY": SECONDLY} + + _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, + "FR": 4, "SA": 5, "SU": 6} + + def _handle_int(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = int(value) + + def _handle_int_list(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = [int(x) for x in value.split(',')] + + _handle_INTERVAL = _handle_int + _handle_COUNT = _handle_int + _handle_BYSETPOS = _handle_int_list + _handle_BYMONTH = _handle_int_list + _handle_BYMONTHDAY = _handle_int_list + _handle_BYYEARDAY = _handle_int_list + _handle_BYEASTER = _handle_int_list + _handle_BYWEEKNO = _handle_int_list + _handle_BYHOUR = _handle_int_list + _handle_BYMINUTE = _handle_int_list + _handle_BYSECOND = _handle_int_list + + def _handle_FREQ(self, rrkwargs, name, value, **kwargs): + rrkwargs["freq"] = self._freq_map[value] + + def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): + global parser + if not parser: + from dateutil import parser + try: + rrkwargs["until"] = parser.parse(value, + ignoretz=kwargs.get("ignoretz"), + tzinfos=kwargs.get("tzinfos")) + except ValueError: + raise ValueError("invalid until date") + + def _handle_WKST(self, rrkwargs, name, value, **kwargs): + rrkwargs["wkst"] = self._weekday_map[value] + + def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs): + """ + Two ways to specify this: +1MO or MO(+1) + """ + l = [] + for wday in value.split(','): + if '(' in wday: + # If it's of the form TH(+1), etc. + splt = wday.split('(') + w = splt[0] + n = int(splt[1][:-1]) + elif len(wday): + # If it's of the form +1MO + for i in range(len(wday)): + if wday[i] not in '+-0123456789': + break + n = wday[:i] or None + w = wday[i:] + if n: + n = int(n) + else: + raise ValueError("Invalid (empty) BYDAY specification.") + + l.append(weekdays[self._weekday_map[w]](n)) + rrkwargs["byweekday"] = l + + _handle_BYDAY = _handle_BYWEEKDAY + + def _parse_rfc_rrule(self, line, + dtstart=None, + cache=False, + ignoretz=False, + tzinfos=None): + if line.find(':') != -1: + name, value = line.split(':') + if name != "RRULE": + raise ValueError("unknown parameter name") + else: + value = line + rrkwargs = {} + for pair in value.split(';'): + name, value = pair.split('=') + name = name.upper() + value = value.upper() + try: + getattr(self, "_handle_"+name)(rrkwargs, name, value, + ignoretz=ignoretz, + tzinfos=tzinfos) + except AttributeError: + raise ValueError("unknown parameter '%s'" % name) + except (KeyError, ValueError): + raise ValueError("invalid '%s': %s" % (name, value)) + return rrule(dtstart=dtstart, cache=cache, **rrkwargs) + + def _parse_date_value(self, date_value, parms, rule_tzids, + ignoretz, tzids, tzinfos): + global parser + if not parser: + from dateutil import parser + + datevals = [] + value_found = False + TZID = None + + for parm in parms: + if parm.startswith("TZID="): + try: + tzkey = rule_tzids[parm.split('TZID=')[-1]] + except KeyError: + continue + if tzids is None: + from . import tz + tzlookup = tz.gettz + elif callable(tzids): + tzlookup = tzids + else: + tzlookup = getattr(tzids, 'get', None) + if tzlookup is None: + msg = ('tzids must be a callable, mapping, or None, ' + 'not %s' % tzids) + raise ValueError(msg) + + TZID = tzlookup(tzkey) + continue + + # RFC 5445 3.8.2.4: The VALUE parameter is optional, but may be found + # only once. + if parm not in {"VALUE=DATE-TIME", "VALUE=DATE"}: + raise ValueError("unsupported parm: " + parm) + else: + if value_found: + msg = ("Duplicate value parameter found in: " + parm) + raise ValueError(msg) + value_found = True + + for datestr in date_value.split(','): + date = parser.parse(datestr, ignoretz=ignoretz, tzinfos=tzinfos) + if TZID is not None: + if date.tzinfo is None: + date = date.replace(tzinfo=TZID) + else: + raise ValueError('DTSTART/EXDATE specifies multiple timezone') + datevals.append(date) + + return datevals + + def _parse_rfc(self, s, + dtstart=None, + cache=False, + unfold=False, + forceset=False, + compatible=False, + ignoretz=False, + tzids=None, + tzinfos=None): + global parser + if compatible: + forceset = True + unfold = True + + TZID_NAMES = dict(map( + lambda x: (x.upper(), x), + re.findall('TZID=(?P[^:]+):', s) + )) + s = s.upper() + if not s.strip(): + raise ValueError("empty string") + if unfold: + lines = s.splitlines() + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + else: + lines = s.split() + if (not forceset and len(lines) == 1 and (s.find(':') == -1 or + s.startswith('RRULE:'))): + return self._parse_rfc_rrule(lines[0], cache=cache, + dtstart=dtstart, ignoretz=ignoretz, + tzinfos=tzinfos) + else: + rrulevals = [] + rdatevals = [] + exrulevals = [] + exdatevals = [] + for line in lines: + if not line: + continue + if line.find(':') == -1: + name = "RRULE" + value = line + else: + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0] + parms = parms[1:] + if name == "RRULE": + for parm in parms: + raise ValueError("unsupported RRULE parm: "+parm) + rrulevals.append(value) + elif name == "RDATE": + for parm in parms: + if parm != "VALUE=DATE-TIME": + raise ValueError("unsupported RDATE parm: "+parm) + rdatevals.append(value) + elif name == "EXRULE": + for parm in parms: + raise ValueError("unsupported EXRULE parm: "+parm) + exrulevals.append(value) + elif name == "EXDATE": + exdatevals.extend( + self._parse_date_value(value, parms, + TZID_NAMES, ignoretz, + tzids, tzinfos) + ) + elif name == "DTSTART": + dtvals = self._parse_date_value(value, parms, TZID_NAMES, + ignoretz, tzids, tzinfos) + if len(dtvals) != 1: + raise ValueError("Multiple DTSTART values specified:" + + value) + dtstart = dtvals[0] + else: + raise ValueError("unsupported property: "+name) + if (forceset or len(rrulevals) > 1 or rdatevals + or exrulevals or exdatevals): + if not parser and (rdatevals or exdatevals): + from dateutil import parser + rset = rruleset(cache=cache) + for value in rrulevals: + rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in rdatevals: + for datestr in value.split(','): + rset.rdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exrulevals: + rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exdatevals: + rset.exdate(value) + if compatible and dtstart: + rset.rdate(dtstart) + return rset + else: + return self._parse_rfc_rrule(rrulevals[0], + dtstart=dtstart, + cache=cache, + ignoretz=ignoretz, + tzinfos=tzinfos) + + def __call__(self, s, **kwargs): + return self._parse_rfc(s, **kwargs) + + +rrulestr = _rrulestr() + +# vim:ts=4:sw=4:et diff --git a/myenv/lib/python3.12/site-packages/dateutil/tz/__init__.py b/myenv/lib/python3.12/site-packages/dateutil/tz/__init__.py new file mode 100644 index 0000000..af1352c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/tz/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +from .tz import * +from .tz import __doc__ + +__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", + "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz", + "enfold", "datetime_ambiguous", "datetime_exists", + "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"] + + +class DeprecatedTzFormatWarning(Warning): + """Warning raised when time zones are parsed from deprecated formats.""" diff --git a/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..4d59fe1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_common.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_common.cpython-312.pyc new file mode 100644 index 0000000..9d619a8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_common.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_factories.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_factories.cpython-312.pyc new file mode 100644 index 0000000..83b33c8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_factories.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/tz.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/tz.cpython-312.pyc new file mode 100644 index 0000000..925a12d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/tz.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/win.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/win.cpython-312.pyc new file mode 100644 index 0000000..cbc47b4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/tz/__pycache__/win.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/tz/_common.py b/myenv/lib/python3.12/site-packages/dateutil/tz/_common.py new file mode 100644 index 0000000..e6ac118 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/tz/_common.py @@ -0,0 +1,419 @@ +from six import PY2 + +from functools import wraps + +from datetime import datetime, timedelta, tzinfo + + +ZERO = timedelta(0) + +__all__ = ['tzname_in_python2', 'enfold'] + + +def tzname_in_python2(namefunc): + """Change unicode output into bytestrings in Python 2 + + tzname() API changed in Python 3. It used to return bytes, but was changed + to unicode strings + """ + if PY2: + @wraps(namefunc) + def adjust_encoding(*args, **kwargs): + name = namefunc(*args, **kwargs) + if name is not None: + name = name.encode() + + return name + + return adjust_encoding + else: + return namefunc + + +# The following is adapted from Alexander Belopolsky's tz library +# https://github.com/abalkin/tz +if hasattr(datetime, 'fold'): + # This is the pre-python 3.6 fold situation + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + return dt.replace(fold=fold) + +else: + class _DatetimeWithFold(datetime): + """ + This is a class designed to provide a PEP 495-compliant interface for + Python versions before 3.6. It is used only for dates in a fold, so + the ``fold`` attribute is fixed at ``1``. + + .. versionadded:: 2.6.0 + """ + __slots__ = () + + def replace(self, *args, **kwargs): + """ + Return a datetime with the same attributes, except for those + attributes given new values by whichever keyword arguments are + specified. Note that tzinfo=None can be specified to create a naive + datetime from an aware datetime with no conversion of date and time + data. + + This is reimplemented in ``_DatetimeWithFold`` because pypy3 will + return a ``datetime.datetime`` even if ``fold`` is unchanged. + """ + argnames = ( + 'year', 'month', 'day', 'hour', 'minute', 'second', + 'microsecond', 'tzinfo' + ) + + for arg, argname in zip(args, argnames): + if argname in kwargs: + raise TypeError('Duplicate argument: {}'.format(argname)) + + kwargs[argname] = arg + + for argname in argnames: + if argname not in kwargs: + kwargs[argname] = getattr(self, argname) + + dt_class = self.__class__ if kwargs.get('fold', 1) else datetime + + return dt_class(**kwargs) + + @property + def fold(self): + return 1 + + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + if getattr(dt, 'fold', 0) == fold: + return dt + + args = dt.timetuple()[:6] + args += (dt.microsecond, dt.tzinfo) + + if fold: + return _DatetimeWithFold(*args) + else: + return datetime(*args) + + +def _validate_fromutc_inputs(f): + """ + The CPython version of ``fromutc`` checks that the input is a ``datetime`` + object and that ``self`` is attached as its ``tzinfo``. + """ + @wraps(f) + def fromutc(self, dt): + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + return f(self, dt) + + return fromutc + + +class _tzinfo(tzinfo): + """ + Base class for all ``dateutil`` ``tzinfo`` objects. + """ + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + + dt = dt.replace(tzinfo=self) + + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None) + + return same_dt and not same_offset + + def _fold_status(self, dt_utc, dt_wall): + """ + Determine the fold status of a "wall" datetime, given a representation + of the same datetime as a (naive) UTC datetime. This is calculated based + on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all + datetimes, and that this offset is the actual number of hours separating + ``dt_utc`` and ``dt_wall``. + + :param dt_utc: + Representation of the datetime as UTC + + :param dt_wall: + Representation of the datetime as "wall time". This parameter must + either have a `fold` attribute or have a fold-naive + :class:`datetime.tzinfo` attached, otherwise the calculation may + fail. + """ + if self.is_ambiguous(dt_wall): + delta_wall = dt_wall - dt_utc + _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst())) + else: + _fold = 0 + + return _fold + + def _fold(self, dt): + return getattr(dt, 'fold', 0) + + def _fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + + # Re-implement the algorithm from Python's datetime.py + dtoff = dt.utcoffset() + if dtoff is None: + raise ValueError("fromutc() requires a non-None utcoffset() " + "result") + + # The original datetime.py code assumes that `dst()` defaults to + # zero during ambiguous times. PEP 495 inverts this presumption, so + # for pre-PEP 495 versions of python, we need to tweak the algorithm. + dtdst = dt.dst() + if dtdst is None: + raise ValueError("fromutc() requires a non-None dst() result") + delta = dtoff - dtdst + + dt += delta + # Set fold=1 so we can default to being in the fold for + # ambiguous dates. + dtdst = enfold(dt, fold=1).dst() + if dtdst is None: + raise ValueError("fromutc(): dt.dst gave inconsistent " + "results; cannot convert") + return dt + dtdst + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + dt_wall = self._fromutc(dt) + + # Calculate the fold status given the two datetimes. + _fold = self._fold_status(dt, dt_wall) + + # Set the default fold value for ambiguous dates + return enfold(dt_wall, fold=_fold) + + +class tzrangebase(_tzinfo): + """ + This is an abstract base class for time zones represented by an annual + transition into and out of DST. Child classes should implement the following + methods: + + * ``__init__(self, *args, **kwargs)`` + * ``transitions(self, year)`` - this is expected to return a tuple of + datetimes representing the DST on and off transitions in standard + time. + + A fully initialized ``tzrangebase`` subclass should also provide the + following attributes: + * ``hasdst``: Boolean whether or not the zone uses DST. + * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects + representing the respective UTC offsets. + * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short + abbreviations in DST and STD, respectively. + * ``_hasdst``: Whether or not the zone has DST. + + .. versionadded:: 2.6.0 + """ + def __init__(self): + raise NotImplementedError('tzrangebase is an abstract base class') + + def utcoffset(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_base_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + if self._isdst(dt): + return self._dst_abbr + else: + return self._std_abbr + + def fromutc(self, dt): + """ Given a datetime in UTC, return local time """ + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # Get transitions - if there are none, fixed offset + transitions = self.transitions(dt.year) + if transitions is None: + return dt + self.utcoffset(dt) + + # Get the transition times in UTC + dston, dstoff = transitions + + dston -= self._std_offset + dstoff -= self._std_offset + + utc_transitions = (dston, dstoff) + dt_utc = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt_utc, utc_transitions) + + if isdst: + dt_wall = dt + self._dst_offset + else: + dt_wall = dt + self._std_offset + + _fold = int(not isdst and self.is_ambiguous(dt_wall)) + + return enfold(dt_wall, fold=_fold) + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if not self.hasdst: + return False + + start, end = self.transitions(dt.year) + + dt = dt.replace(tzinfo=None) + return (end <= dt < end + self._dst_base_offset) + + def _isdst(self, dt): + if not self.hasdst: + return False + elif dt is None: + return None + + transitions = self.transitions(dt.year) + + if transitions is None: + return False + + dt = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt, transitions) + + # Handle ambiguous dates + if not isdst and self.is_ambiguous(dt): + return not self._fold(dt) + else: + return isdst + + def _naive_isdst(self, dt, transitions): + dston, dstoff = transitions + + dt = dt.replace(tzinfo=None) + + if dston < dstoff: + isdst = dston <= dt < dstoff + else: + isdst = not dstoff <= dt < dston + + return isdst + + @property + def _dst_base_offset(self): + return self._dst_offset - self._std_offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(...)" % self.__class__.__name__ + + __reduce__ = object.__reduce__ diff --git a/myenv/lib/python3.12/site-packages/dateutil/tz/_factories.py b/myenv/lib/python3.12/site-packages/dateutil/tz/_factories.py new file mode 100644 index 0000000..f8a6589 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/tz/_factories.py @@ -0,0 +1,80 @@ +from datetime import timedelta +import weakref +from collections import OrderedDict + +from six.moves import _thread + + +class _TzSingleton(type): + def __init__(cls, *args, **kwargs): + cls.__instance = None + super(_TzSingleton, cls).__init__(*args, **kwargs) + + def __call__(cls): + if cls.__instance is None: + cls.__instance = super(_TzSingleton, cls).__call__() + return cls.__instance + + +class _TzFactory(type): + def instance(cls, *args, **kwargs): + """Alternate constructor that returns a fresh instance""" + return type.__call__(cls, *args, **kwargs) + + +class _TzOffsetFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls._cache_lock = _thread.allocate_lock() + + def __call__(cls, name, offset): + if isinstance(offset, timedelta): + key = (name, offset.total_seconds()) + else: + key = (name, offset) + + instance = cls.__instances.get(key, None) + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(name, offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls._cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + + +class _TzStrFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls.__cache_lock = _thread.allocate_lock() + + def __call__(cls, s, posix_offset=False): + key = (s, posix_offset) + instance = cls.__instances.get(key, None) + + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(s, posix_offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls.__cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + diff --git a/myenv/lib/python3.12/site-packages/dateutil/tz/tz.py b/myenv/lib/python3.12/site-packages/dateutil/tz/tz.py new file mode 100644 index 0000000..6175914 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/tz/tz.py @@ -0,0 +1,1849 @@ +# -*- coding: utf-8 -*- +""" +This module offers timezone implementations subclassing the abstract +:py:class:`datetime.tzinfo` type. There are classes to handle tzfile format +files (usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, +etc), TZ environment string (in all known formats), given ranges (with help +from relative deltas), local machine timezone, fixed offset timezone, and UTC +timezone. +""" +import datetime +import struct +import time +import sys +import os +import bisect +import weakref +from collections import OrderedDict + +import six +from six import string_types +from six.moves import _thread +from ._common import tzname_in_python2, _tzinfo +from ._common import tzrangebase, enfold +from ._common import _validate_fromutc_inputs + +from ._factories import _TzSingleton, _TzOffsetFactory +from ._factories import _TzStrFactory +try: + from .win import tzwin, tzwinlocal +except ImportError: + tzwin = tzwinlocal = None + +# For warning about rounding tzinfo +from warnings import warn + +ZERO = datetime.timedelta(0) +EPOCH = datetime.datetime(1970, 1, 1, 0, 0) +EPOCHORDINAL = EPOCH.toordinal() + + +@six.add_metaclass(_TzSingleton) +class tzutc(datetime.tzinfo): + """ + This is a tzinfo object that represents the UTC time zone. + + **Examples:** + + .. doctest:: + + >>> from datetime import * + >>> from dateutil.tz import * + + >>> datetime.now() + datetime.datetime(2003, 9, 27, 9, 40, 1, 521290) + + >>> datetime.now(tzutc()) + datetime.datetime(2003, 9, 27, 12, 40, 12, 156379, tzinfo=tzutc()) + + >>> datetime.now(tzutc()).tzname() + 'UTC' + + .. versionchanged:: 2.7.0 + ``tzutc()`` is now a singleton, so the result of ``tzutc()`` will + always return the same object. + + .. doctest:: + + >>> from dateutil.tz import tzutc, UTC + >>> tzutc() is tzutc() + True + >>> tzutc() is UTC + True + """ + def utcoffset(self, dt): + return ZERO + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return "UTC" + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Fast track version of fromutc() returns the original ``dt`` object for + any valid :py:class:`datetime.datetime` object. + """ + return dt + + def __eq__(self, other): + if not isinstance(other, (tzutc, tzoffset)): + return NotImplemented + + return (isinstance(other, tzutc) or + (isinstance(other, tzoffset) and other._offset == ZERO)) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +#: Convenience constant providing a :class:`tzutc()` instance +#: +#: .. versionadded:: 2.7.0 +UTC = tzutc() + + +@six.add_metaclass(_TzOffsetFactory) +class tzoffset(datetime.tzinfo): + """ + A simple class for representing a fixed offset from UTC. + + :param name: + The timezone name, to be returned when ``tzname()`` is called. + :param offset: + The time zone offset in seconds, or (since version 2.6.0, represented + as a :py:class:`datetime.timedelta` object). + """ + def __init__(self, name, offset): + self._name = name + + try: + # Allow a timedelta + offset = offset.total_seconds() + except (TypeError, AttributeError): + pass + + self._offset = datetime.timedelta(seconds=_get_supported_offset(offset)) + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._name + + @_validate_fromutc_inputs + def fromutc(self, dt): + return dt + self._offset + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + def __eq__(self, other): + if not isinstance(other, tzoffset): + return NotImplemented + + return self._offset == other._offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s, %s)" % (self.__class__.__name__, + repr(self._name), + int(self._offset.total_seconds())) + + __reduce__ = object.__reduce__ + + +class tzlocal(_tzinfo): + """ + A :class:`tzinfo` subclass built around the ``time`` timezone functions. + """ + def __init__(self): + super(tzlocal, self).__init__() + + self._std_offset = datetime.timedelta(seconds=-time.timezone) + if time.daylight: + self._dst_offset = datetime.timedelta(seconds=-time.altzone) + else: + self._dst_offset = self._std_offset + + self._dst_saved = self._dst_offset - self._std_offset + self._hasdst = bool(self._dst_saved) + self._tznames = tuple(time.tzname) + + def utcoffset(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset - self._std_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._tznames[self._isdst(dt)] + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + naive_dst = self._naive_is_dst(dt) + return (not naive_dst and + (naive_dst != self._naive_is_dst(dt - self._dst_saved))) + + def _naive_is_dst(self, dt): + timestamp = _datetime_to_timestamp(dt) + return time.localtime(timestamp + time.timezone).tm_isdst + + def _isdst(self, dt, fold_naive=True): + # We can't use mktime here. It is unstable when deciding if + # the hour near to a change is DST or not. + # + # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, + # dt.minute, dt.second, dt.weekday(), 0, -1)) + # return time.localtime(timestamp).tm_isdst + # + # The code above yields the following result: + # + # >>> import tz, datetime + # >>> t = tz.tzlocal() + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # + # Here is a more stable implementation: + # + if not self._hasdst: + return False + + # Check for ambiguous times: + dstval = self._naive_is_dst(dt) + fold = getattr(dt, 'fold', None) + + if self.is_ambiguous(dt): + if fold is not None: + return not self._fold(dt) + else: + return True + + return dstval + + def __eq__(self, other): + if isinstance(other, tzlocal): + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset) + elif isinstance(other, tzutc): + return (not self._hasdst and + self._tznames[0] in {'UTC', 'GMT'} and + self._std_offset == ZERO) + elif isinstance(other, tzoffset): + return (not self._hasdst and + self._tznames[0] == other._name and + self._std_offset == other._offset) + else: + return NotImplemented + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +class _ttinfo(object): + __slots__ = ["offset", "delta", "isdst", "abbr", + "isstd", "isgmt", "dstoffset"] + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def __repr__(self): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) + + def __eq__(self, other): + if not isinstance(other, _ttinfo): + return NotImplemented + + return (self.offset == other.offset and + self.delta == other.delta and + self.isdst == other.isdst and + self.abbr == other.abbr and + self.isstd == other.isstd and + self.isgmt == other.isgmt and + self.dstoffset == other.dstoffset) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __getstate__(self): + state = {} + for name in self.__slots__: + state[name] = getattr(self, name, None) + return state + + def __setstate__(self, state): + for name in self.__slots__: + if name in state: + setattr(self, name, state[name]) + + +class _tzfile(object): + """ + Lightweight class for holding the relevant transition and time zone + information read from binary tzfiles. + """ + attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list', + 'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first'] + + def __init__(self, **kwargs): + for attr in self.attrs: + setattr(self, attr, kwargs.get(attr, None)) + + +class tzfile(_tzinfo): + """ + This is a ``tzinfo`` subclass that allows one to use the ``tzfile(5)`` + format timezone files to extract current and historical zone information. + + :param fileobj: + This can be an opened file stream or a file name that the time zone + information can be read from. + + :param filename: + This is an optional parameter specifying the source of the time zone + information in the event that ``fileobj`` is a file object. If omitted + and ``fileobj`` is a file stream, this parameter will be set either to + ``fileobj``'s ``name`` attribute or to ``repr(fileobj)``. + + See `Sources for Time Zone and Daylight Saving Time Data + `_ for more information. + Time zone files can be compiled from the `IANA Time Zone database files + `_ with the `zic time zone compiler + `_ + + .. note:: + + Only construct a ``tzfile`` directly if you have a specific timezone + file on disk that you want to read into a Python ``tzinfo`` object. + If you want to get a ``tzfile`` representing a specific IANA zone, + (e.g. ``'America/New_York'``), you should call + :func:`dateutil.tz.gettz` with the zone identifier. + + + **Examples:** + + Using the US Eastern time zone as an example, we can see that a ``tzfile`` + provides time zone information for the standard Daylight Saving offsets: + + .. testsetup:: tzfile + + from dateutil.tz import gettz + from datetime import datetime + + .. doctest:: tzfile + + >>> NYC = gettz('America/New_York') + >>> NYC + tzfile('/usr/share/zoneinfo/America/New_York') + + >>> print(datetime(2016, 1, 3, tzinfo=NYC)) # EST + 2016-01-03 00:00:00-05:00 + + >>> print(datetime(2016, 7, 7, tzinfo=NYC)) # EDT + 2016-07-07 00:00:00-04:00 + + + The ``tzfile`` structure contains a fully history of the time zone, + so historical dates will also have the right offsets. For example, before + the adoption of the UTC standards, New York used local solar mean time: + + .. doctest:: tzfile + + >>> print(datetime(1901, 4, 12, tzinfo=NYC)) # LMT + 1901-04-12 00:00:00-04:56 + + And during World War II, New York was on "Eastern War Time", which was a + state of permanent daylight saving time: + + .. doctest:: tzfile + + >>> print(datetime(1944, 2, 7, tzinfo=NYC)) # EWT + 1944-02-07 00:00:00-04:00 + + """ + + def __init__(self, fileobj, filename=None): + super(tzfile, self).__init__() + + file_opened_here = False + if isinstance(fileobj, string_types): + self._filename = fileobj + fileobj = open(fileobj, 'rb') + file_opened_here = True + elif filename is not None: + self._filename = filename + elif hasattr(fileobj, "name"): + self._filename = fileobj.name + else: + self._filename = repr(fileobj) + + if fileobj is not None: + if not file_opened_here: + fileobj = _nullcontext(fileobj) + + with fileobj as file_stream: + tzobj = self._read_tzfile(file_stream) + + self._set_tzdata(tzobj) + + def _set_tzdata(self, tzobj): + """ Set the time zone data of this object from a _tzfile object """ + # Copy the relevant attributes over as private attributes + for attr in _tzfile.attrs: + setattr(self, '_' + attr, getattr(tzobj, attr)) + + def _read_tzfile(self, fileobj): + out = _tzfile() + + # From tzfile(5): + # + # The time zone information files used by tzset(3) + # begin with the magic characters "TZif" to identify + # them as time zone information files, followed by + # sixteen bytes reserved for future use, followed by + # six four-byte values of type long, written in a + # ``standard'' byte order (the high-order byte + # of the value is written first). + if fileobj.read(4).decode() != "TZif": + raise ValueError("magic not found") + + fileobj.read(16) + + ( + # The number of UTC/local indicators stored in the file. + ttisgmtcnt, + + # The number of standard/wall indicators stored in the file. + ttisstdcnt, + + # The number of leap seconds for which data is + # stored in the file. + leapcnt, + + # The number of "transition times" for which data + # is stored in the file. + timecnt, + + # The number of "local time types" for which data + # is stored in the file (must not be zero). + typecnt, + + # The number of characters of "time zone + # abbreviation strings" stored in the file. + charcnt, + + ) = struct.unpack(">6l", fileobj.read(24)) + + # The above header is followed by tzh_timecnt four-byte + # values of type long, sorted in ascending order. + # These values are written in ``standard'' byte order. + # Each is used as a transition time (as returned by + # time(2)) at which the rules for computing local time + # change. + + if timecnt: + out.trans_list_utc = list(struct.unpack(">%dl" % timecnt, + fileobj.read(timecnt*4))) + else: + out.trans_list_utc = [] + + # Next come tzh_timecnt one-byte values of type unsigned + # char; each one tells which of the different types of + # ``local time'' types described in the file is associated + # with the same-indexed transition time. These values + # serve as indices into an array of ttinfo structures that + # appears next in the file. + + if timecnt: + out.trans_idx = struct.unpack(">%dB" % timecnt, + fileobj.read(timecnt)) + else: + out.trans_idx = [] + + # Each ttinfo structure is written as a four-byte value + # for tt_gmtoff of type long, in a standard byte + # order, followed by a one-byte value for tt_isdst + # and a one-byte value for tt_abbrind. In each + # structure, tt_gmtoff gives the number of + # seconds to be added to UTC, tt_isdst tells whether + # tm_isdst should be set by localtime(3), and + # tt_abbrind serves as an index into the array of + # time zone abbreviation characters that follow the + # ttinfo structure(s) in the file. + + ttinfo = [] + + for i in range(typecnt): + ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) + + abbr = fileobj.read(charcnt).decode() + + # Then there are tzh_leapcnt pairs of four-byte + # values, written in standard byte order; the + # first value of each pair gives the time (as + # returned by time(2)) at which a leap second + # occurs; the second gives the total number of + # leap seconds to be applied after the given time. + # The pairs of values are sorted in ascending order + # by time. + + # Not used, for now (but seek for correct file position) + if leapcnt: + fileobj.seek(leapcnt * 8, os.SEEK_CUR) + + # Then there are tzh_ttisstdcnt standard/wall + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as standard + # time or wall clock time, and are used when + # a time zone file is used in handling POSIX-style + # time zone environment variables. + + if ttisstdcnt: + isstd = struct.unpack(">%db" % ttisstdcnt, + fileobj.read(ttisstdcnt)) + + # Finally, there are tzh_ttisgmtcnt UTC/local + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as UTC or + # local time, and are used when a time zone file + # is used in handling POSIX-style time zone envi- + # ronment variables. + + if ttisgmtcnt: + isgmt = struct.unpack(">%db" % ttisgmtcnt, + fileobj.read(ttisgmtcnt)) + + # Build ttinfo list + out.ttinfo_list = [] + for i in range(typecnt): + gmtoff, isdst, abbrind = ttinfo[i] + gmtoff = _get_supported_offset(gmtoff) + tti = _ttinfo() + tti.offset = gmtoff + tti.dstoffset = datetime.timedelta(0) + tti.delta = datetime.timedelta(seconds=gmtoff) + tti.isdst = isdst + tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] + tti.isstd = (ttisstdcnt > i and isstd[i] != 0) + tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) + out.ttinfo_list.append(tti) + + # Replace ttinfo indexes for ttinfo objects. + out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx] + + # Set standard, dst, and before ttinfos. before will be + # used when a given time is before any transitions, + # and will be set to the first non-dst ttinfo, or to + # the first dst, if all of them are dst. + out.ttinfo_std = None + out.ttinfo_dst = None + out.ttinfo_before = None + if out.ttinfo_list: + if not out.trans_list_utc: + out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0] + else: + for i in range(timecnt-1, -1, -1): + tti = out.trans_idx[i] + if not out.ttinfo_std and not tti.isdst: + out.ttinfo_std = tti + elif not out.ttinfo_dst and tti.isdst: + out.ttinfo_dst = tti + + if out.ttinfo_std and out.ttinfo_dst: + break + else: + if out.ttinfo_dst and not out.ttinfo_std: + out.ttinfo_std = out.ttinfo_dst + + for tti in out.ttinfo_list: + if not tti.isdst: + out.ttinfo_before = tti + break + else: + out.ttinfo_before = out.ttinfo_list[0] + + # Now fix transition times to become relative to wall time. + # + # I'm not sure about this. In my tests, the tz source file + # is setup to wall time, and in the binary file isstd and + # isgmt are off, so it should be in wall time. OTOH, it's + # always in gmt time. Let me know if you have comments + # about this. + lastdst = None + lastoffset = None + lastdstoffset = None + lastbaseoffset = None + out.trans_list = [] + + for i, tti in enumerate(out.trans_idx): + offset = tti.offset + dstoffset = 0 + + if lastdst is not None: + if tti.isdst: + if not lastdst: + dstoffset = offset - lastoffset + + if not dstoffset and lastdstoffset: + dstoffset = lastdstoffset + + tti.dstoffset = datetime.timedelta(seconds=dstoffset) + lastdstoffset = dstoffset + + # If a time zone changes its base offset during a DST transition, + # then you need to adjust by the previous base offset to get the + # transition time in local time. Otherwise you use the current + # base offset. Ideally, I would have some mathematical proof of + # why this is true, but I haven't really thought about it enough. + baseoffset = offset - dstoffset + adjustment = baseoffset + if (lastbaseoffset is not None and baseoffset != lastbaseoffset + and tti.isdst != lastdst): + # The base DST has changed + adjustment = lastbaseoffset + + lastdst = tti.isdst + lastoffset = offset + lastbaseoffset = baseoffset + + out.trans_list.append(out.trans_list_utc[i] + adjustment) + + out.trans_idx = tuple(out.trans_idx) + out.trans_list = tuple(out.trans_list) + out.trans_list_utc = tuple(out.trans_list_utc) + + return out + + def _find_last_transition(self, dt, in_utc=False): + # If there's no list, there are no transitions to find + if not self._trans_list: + return None + + timestamp = _datetime_to_timestamp(dt) + + # Find where the timestamp fits in the transition list - if the + # timestamp is a transition time, it's part of the "after" period. + trans_list = self._trans_list_utc if in_utc else self._trans_list + idx = bisect.bisect_right(trans_list, timestamp) + + # We want to know when the previous transition was, so subtract off 1 + return idx - 1 + + def _get_ttinfo(self, idx): + # For no list or after the last transition, default to _ttinfo_std + if idx is None or (idx + 1) >= len(self._trans_list): + return self._ttinfo_std + + # If there is a list and the time is before it, return _ttinfo_before + if idx < 0: + return self._ttinfo_before + + return self._trans_idx[idx] + + def _find_ttinfo(self, dt): + idx = self._resolve_ambiguous_time(dt) + + return self._get_ttinfo(idx) + + def fromutc(self, dt): + """ + The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. + + :param dt: + A :py:class:`datetime.datetime` object. + + :raises TypeError: + Raised if ``dt`` is not a :py:class:`datetime.datetime` object. + + :raises ValueError: + Raised if this is called with a ``dt`` which does not have this + ``tzinfo`` attached. + + :return: + Returns a :py:class:`datetime.datetime` object representing the + wall time in ``self``'s time zone. + """ + # These isinstance checks are in datetime.tzinfo, so we'll preserve + # them, even if we don't care about duck typing. + if not isinstance(dt, datetime.datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # First treat UTC as wall time and get the transition we're in. + idx = self._find_last_transition(dt, in_utc=True) + tti = self._get_ttinfo(idx) + + dt_out = dt + datetime.timedelta(seconds=tti.offset) + + fold = self.is_ambiguous(dt_out, idx=idx) + + return enfold(dt_out, fold=int(fold)) + + def is_ambiguous(self, dt, idx=None): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if idx is None: + idx = self._find_last_transition(dt) + + # Calculate the difference in offsets from current to previous + timestamp = _datetime_to_timestamp(dt) + tti = self._get_ttinfo(idx) + + if idx is None or idx <= 0: + return False + + od = self._get_ttinfo(idx - 1).offset - tti.offset + tt = self._trans_list[idx] # Transition time + + return timestamp < tt + od + + def _resolve_ambiguous_time(self, dt): + idx = self._find_last_transition(dt) + + # If we have no transitions, return the index + _fold = self._fold(dt) + if idx is None or idx == 0: + return idx + + # If it's ambiguous and we're in a fold, shift to a different index. + idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) + + return idx - idx_offset + + def utcoffset(self, dt): + if dt is None: + return None + + if not self._ttinfo_std: + return ZERO + + return self._find_ttinfo(dt).delta + + def dst(self, dt): + if dt is None: + return None + + if not self._ttinfo_dst: + return ZERO + + tti = self._find_ttinfo(dt) + + if not tti.isdst: + return ZERO + + # The documentation says that utcoffset()-dst() must + # be constant for every dt. + return tti.dstoffset + + @tzname_in_python2 + def tzname(self, dt): + if not self._ttinfo_std or dt is None: + return None + return self._find_ttinfo(dt).abbr + + def __eq__(self, other): + if not isinstance(other, tzfile): + return NotImplemented + return (self._trans_list == other._trans_list and + self._trans_idx == other._trans_idx and + self._ttinfo_list == other._ttinfo_list) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) + + def __reduce__(self): + return self.__reduce_ex__(None) + + def __reduce_ex__(self, protocol): + return (self.__class__, (None, self._filename), self.__dict__) + + +class tzrange(tzrangebase): + """ + The ``tzrange`` object is a time zone specified by a set of offsets and + abbreviations, equivalent to the way the ``TZ`` variable can be specified + in POSIX-like systems, but using Python delta objects to specify DST + start, end and offsets. + + :param stdabbr: + The abbreviation for standard time (e.g. ``'EST'``). + + :param stdoffset: + An integer or :class:`datetime.timedelta` object or equivalent + specifying the base offset from UTC. + + If unspecified, +00:00 is used. + + :param dstabbr: + The abbreviation for DST / "Summer" time (e.g. ``'EDT'``). + + If specified, with no other DST information, DST is assumed to occur + and the default behavior or ``dstoffset``, ``start`` and ``end`` is + used. If unspecified and no other DST information is specified, it + is assumed that this zone has no DST. + + If this is unspecified and other DST information is *is* specified, + DST occurs in the zone but the time zone abbreviation is left + unchanged. + + :param dstoffset: + A an integer or :class:`datetime.timedelta` object or equivalent + specifying the UTC offset during DST. If unspecified and any other DST + information is specified, it is assumed to be the STD offset +1 hour. + + :param start: + A :class:`relativedelta.relativedelta` object or equivalent specifying + the time and time of year that daylight savings time starts. To + specify, for example, that DST starts at 2AM on the 2nd Sunday in + March, pass: + + ``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))`` + + If unspecified and any other DST information is specified, the default + value is 2 AM on the first Sunday in April. + + :param end: + A :class:`relativedelta.relativedelta` object or equivalent + representing the time and time of year that daylight savings time + ends, with the same specification method as in ``start``. One note is + that this should point to the first time in the *standard* zone, so if + a transition occurs at 2AM in the DST zone and the clocks are set back + 1 hour to 1AM, set the ``hours`` parameter to +1. + + + **Examples:** + + .. testsetup:: tzrange + + from dateutil.tz import tzrange, tzstr + + .. doctest:: tzrange + + >>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT") + True + + >>> from dateutil.relativedelta import * + >>> range1 = tzrange("EST", -18000, "EDT") + >>> range2 = tzrange("EST", -18000, "EDT", -14400, + ... relativedelta(hours=+2, month=4, day=1, + ... weekday=SU(+1)), + ... relativedelta(hours=+1, month=10, day=31, + ... weekday=SU(-1))) + >>> tzstr('EST5EDT') == range1 == range2 + True + + """ + def __init__(self, stdabbr, stdoffset=None, + dstabbr=None, dstoffset=None, + start=None, end=None): + + global relativedelta + from dateutil import relativedelta + + self._std_abbr = stdabbr + self._dst_abbr = dstabbr + + try: + stdoffset = stdoffset.total_seconds() + except (TypeError, AttributeError): + pass + + try: + dstoffset = dstoffset.total_seconds() + except (TypeError, AttributeError): + pass + + if stdoffset is not None: + self._std_offset = datetime.timedelta(seconds=stdoffset) + else: + self._std_offset = ZERO + + if dstoffset is not None: + self._dst_offset = datetime.timedelta(seconds=dstoffset) + elif dstabbr and stdoffset is not None: + self._dst_offset = self._std_offset + datetime.timedelta(hours=+1) + else: + self._dst_offset = ZERO + + if dstabbr and start is None: + self._start_delta = relativedelta.relativedelta( + hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) + else: + self._start_delta = start + + if dstabbr and end is None: + self._end_delta = relativedelta.relativedelta( + hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) + else: + self._end_delta = end + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = bool(self._start_delta) + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + if not self.hasdst: + return None + + base_year = datetime.datetime(year, 1, 1) + + start = base_year + self._start_delta + end = base_year + self._end_delta + + return (start, end) + + def __eq__(self, other): + if not isinstance(other, tzrange): + return NotImplemented + + return (self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr and + self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._start_delta == other._start_delta and + self._end_delta == other._end_delta) + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +@six.add_metaclass(_TzStrFactory) +class tzstr(tzrange): + """ + ``tzstr`` objects are time zone objects specified by a time-zone string as + it would be passed to a ``TZ`` variable on POSIX-style systems (see + the `GNU C Library: TZ Variable`_ for more details). + + There is one notable exception, which is that POSIX-style time zones use an + inverted offset format, so normally ``GMT+3`` would be parsed as an offset + 3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an + offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX + behavior, pass a ``True`` value to ``posix_offset``. + + The :class:`tzrange` object provides the same functionality, but is + specified using :class:`relativedelta.relativedelta` objects. rather than + strings. + + :param s: + A time zone string in ``TZ`` variable format. This can be a + :class:`bytes` (2.x: :class:`str`), :class:`str` (2.x: + :class:`unicode`) or a stream emitting unicode characters + (e.g. :class:`StringIO`). + + :param posix_offset: + Optional. If set to ``True``, interpret strings such as ``GMT+3`` or + ``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the + POSIX standard. + + .. caution:: + + Prior to version 2.7.0, this function also supported time zones + in the format: + + * ``EST5EDT,4,0,6,7200,10,0,26,7200,3600`` + * ``EST5EDT,4,1,0,7200,10,-1,0,7200,3600`` + + This format is non-standard and has been deprecated; this function + will raise a :class:`DeprecatedTZFormatWarning` until + support is removed in a future version. + + .. _`GNU C Library: TZ Variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + """ + def __init__(self, s, posix_offset=False): + global parser + from dateutil.parser import _parser as parser + + self._s = s + + res = parser._parsetz(s) + if res is None or res.any_unused_tokens: + raise ValueError("unknown string format") + + # Here we break the compatibility with the TZ variable handling. + # GMT-3 actually *means* the timezone -3. + if res.stdabbr in ("GMT", "UTC") and not posix_offset: + res.stdoffset *= -1 + + # We must initialize it first, since _delta() needs + # _std_offset and _dst_offset set. Use False in start/end + # to avoid building it two times. + tzrange.__init__(self, res.stdabbr, res.stdoffset, + res.dstabbr, res.dstoffset, + start=False, end=False) + + if not res.dstabbr: + self._start_delta = None + self._end_delta = None + else: + self._start_delta = self._delta(res.start) + if self._start_delta: + self._end_delta = self._delta(res.end, isend=1) + + self.hasdst = bool(self._start_delta) + + def _delta(self, x, isend=0): + from dateutil import relativedelta + kwargs = {} + if x.month is not None: + kwargs["month"] = x.month + if x.weekday is not None: + kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) + if x.week > 0: + kwargs["day"] = 1 + else: + kwargs["day"] = 31 + elif x.day: + kwargs["day"] = x.day + elif x.yday is not None: + kwargs["yearday"] = x.yday + elif x.jyday is not None: + kwargs["nlyearday"] = x.jyday + if not kwargs: + # Default is to start on first sunday of april, and end + # on last sunday of october. + if not isend: + kwargs["month"] = 4 + kwargs["day"] = 1 + kwargs["weekday"] = relativedelta.SU(+1) + else: + kwargs["month"] = 10 + kwargs["day"] = 31 + kwargs["weekday"] = relativedelta.SU(-1) + if x.time is not None: + kwargs["seconds"] = x.time + else: + # Default is 2AM. + kwargs["seconds"] = 7200 + if isend: + # Convert to standard time, to follow the documented way + # of working with the extra hour. See the documentation + # of the tzinfo class. + delta = self._dst_offset - self._std_offset + kwargs["seconds"] -= delta.seconds + delta.days * 86400 + return relativedelta.relativedelta(**kwargs) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +class _tzicalvtzcomp(object): + def __init__(self, tzoffsetfrom, tzoffsetto, isdst, + tzname=None, rrule=None): + self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) + self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) + self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom + self.isdst = isdst + self.tzname = tzname + self.rrule = rrule + + +class _tzicalvtz(_tzinfo): + def __init__(self, tzid, comps=[]): + super(_tzicalvtz, self).__init__() + + self._tzid = tzid + self._comps = comps + self._cachedate = [] + self._cachecomp = [] + self._cache_lock = _thread.allocate_lock() + + def _find_comp(self, dt): + if len(self._comps) == 1: + return self._comps[0] + + dt = dt.replace(tzinfo=None) + + try: + with self._cache_lock: + return self._cachecomp[self._cachedate.index( + (dt, self._fold(dt)))] + except ValueError: + pass + + lastcompdt = None + lastcomp = None + + for comp in self._comps: + compdt = self._find_compdt(comp, dt) + + if compdt and (not lastcompdt or lastcompdt < compdt): + lastcompdt = compdt + lastcomp = comp + + if not lastcomp: + # RFC says nothing about what to do when a given + # time is before the first onset date. We'll look for the + # first standard component, or the first component, if + # none is found. + for comp in self._comps: + if not comp.isdst: + lastcomp = comp + break + else: + lastcomp = comp[0] + + with self._cache_lock: + self._cachedate.insert(0, (dt, self._fold(dt))) + self._cachecomp.insert(0, lastcomp) + + if len(self._cachedate) > 10: + self._cachedate.pop() + self._cachecomp.pop() + + return lastcomp + + def _find_compdt(self, comp, dt): + if comp.tzoffsetdiff < ZERO and self._fold(dt): + dt -= comp.tzoffsetdiff + + compdt = comp.rrule.before(dt, inc=True) + + return compdt + + def utcoffset(self, dt): + if dt is None: + return None + + return self._find_comp(dt).tzoffsetto + + def dst(self, dt): + comp = self._find_comp(dt) + if comp.isdst: + return comp.tzoffsetdiff + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._find_comp(dt).tzname + + def __repr__(self): + return "" % repr(self._tzid) + + __reduce__ = object.__reduce__ + + +class tzical(object): + """ + This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure + as set out in `RFC 5545`_ Section 4.6.5 into one or more `tzinfo` objects. + + :param `fileobj`: + A file or stream in iCalendar format, which should be UTF-8 encoded + with CRLF endings. + + .. _`RFC 5545`: https://tools.ietf.org/html/rfc5545 + """ + def __init__(self, fileobj): + global rrule + from dateutil import rrule + + if isinstance(fileobj, string_types): + self._s = fileobj + # ical should be encoded in UTF-8 with CRLF + fileobj = open(fileobj, 'r') + else: + self._s = getattr(fileobj, 'name', repr(fileobj)) + fileobj = _nullcontext(fileobj) + + self._vtz = {} + + with fileobj as fobj: + self._parse_rfc(fobj.read()) + + def keys(self): + """ + Retrieves the available time zones as a list. + """ + return list(self._vtz.keys()) + + def get(self, tzid=None): + """ + Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``. + + :param tzid: + If there is exactly one time zone available, omitting ``tzid`` + or passing :py:const:`None` value returns it. Otherwise a valid + key (which can be retrieved from :func:`keys`) is required. + + :raises ValueError: + Raised if ``tzid`` is not specified but there are either more + or fewer than 1 zone defined. + + :returns: + Returns either a :py:class:`datetime.tzinfo` object representing + the relevant time zone or :py:const:`None` if the ``tzid`` was + not found. + """ + if tzid is None: + if len(self._vtz) == 0: + raise ValueError("no timezones defined") + elif len(self._vtz) > 1: + raise ValueError("more than one timezone available") + tzid = next(iter(self._vtz)) + + return self._vtz.get(tzid) + + def _parse_offset(self, s): + s = s.strip() + if not s: + raise ValueError("empty offset") + if s[0] in ('+', '-'): + signal = (-1, +1)[s[0] == '+'] + s = s[1:] + else: + signal = +1 + if len(s) == 4: + return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal + elif len(s) == 6: + return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal + else: + raise ValueError("invalid offset: " + s) + + def _parse_rfc(self, s): + lines = s.splitlines() + if not lines: + raise ValueError("empty string") + + # Unfold + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + + tzid = None + comps = [] + invtz = False + comptype = None + for line in lines: + if not line: + continue + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0].upper() + parms = parms[1:] + if invtz: + if name == "BEGIN": + if value in ("STANDARD", "DAYLIGHT"): + # Process component + pass + else: + raise ValueError("unknown component: "+value) + comptype = value + founddtstart = False + tzoffsetfrom = None + tzoffsetto = None + rrulelines = [] + tzname = None + elif name == "END": + if value == "VTIMEZONE": + if comptype: + raise ValueError("component not closed: "+comptype) + if not tzid: + raise ValueError("mandatory TZID not found") + if not comps: + raise ValueError( + "at least one component is needed") + # Process vtimezone + self._vtz[tzid] = _tzicalvtz(tzid, comps) + invtz = False + elif value == comptype: + if not founddtstart: + raise ValueError("mandatory DTSTART not found") + if tzoffsetfrom is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + if tzoffsetto is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + # Process component + rr = None + if rrulelines: + rr = rrule.rrulestr("\n".join(rrulelines), + compatible=True, + ignoretz=True, + cache=True) + comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, + (comptype == "DAYLIGHT"), + tzname, rr) + comps.append(comp) + comptype = None + else: + raise ValueError("invalid component end: "+value) + elif comptype: + if name == "DTSTART": + # DTSTART in VTIMEZONE takes a subset of valid RRULE + # values under RFC 5545. + for parm in parms: + if parm != 'VALUE=DATE-TIME': + msg = ('Unsupported DTSTART param in ' + + 'VTIMEZONE: ' + parm) + raise ValueError(msg) + rrulelines.append(line) + founddtstart = True + elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): + rrulelines.append(line) + elif name == "TZOFFSETFROM": + if parms: + raise ValueError( + "unsupported %s parm: %s " % (name, parms[0])) + tzoffsetfrom = self._parse_offset(value) + elif name == "TZOFFSETTO": + if parms: + raise ValueError( + "unsupported TZOFFSETTO parm: "+parms[0]) + tzoffsetto = self._parse_offset(value) + elif name == "TZNAME": + if parms: + raise ValueError( + "unsupported TZNAME parm: "+parms[0]) + tzname = value + elif name == "COMMENT": + pass + else: + raise ValueError("unsupported property: "+name) + else: + if name == "TZID": + if parms: + raise ValueError( + "unsupported TZID parm: "+parms[0]) + tzid = value + elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): + pass + else: + raise ValueError("unsupported property: "+name) + elif name == "BEGIN" and value == "VTIMEZONE": + tzid = None + comps = [] + invtz = True + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +if sys.platform != "win32": + TZFILES = ["/etc/localtime", "localtime"] + TZPATHS = ["/usr/share/zoneinfo", + "/usr/lib/zoneinfo", + "/usr/share/lib/zoneinfo", + "/etc/zoneinfo"] +else: + TZFILES = [] + TZPATHS = [] + + +def __get_gettz(): + tzlocal_classes = (tzlocal,) + if tzwinlocal is not None: + tzlocal_classes += (tzwinlocal,) + + class GettzFunc(object): + """ + Retrieve a time zone object from a string representation + + This function is intended to retrieve the :py:class:`tzinfo` subclass + that best represents the time zone that would be used if a POSIX + `TZ variable`_ were set to the same value. + + If no argument or an empty string is passed to ``gettz``, local time + is returned: + + .. code-block:: python3 + + >>> gettz() + tzfile('/etc/localtime') + + This function is also the preferred way to map IANA tz database keys + to :class:`tzfile` objects: + + .. code-block:: python3 + + >>> gettz('Pacific/Kiritimati') + tzfile('/usr/share/zoneinfo/Pacific/Kiritimati') + + On Windows, the standard is extended to include the Windows-specific + zone names provided by the operating system: + + .. code-block:: python3 + + >>> gettz('Egypt Standard Time') + tzwin('Egypt Standard Time') + + Passing a GNU ``TZ`` style string time zone specification returns a + :class:`tzstr` object: + + .. code-block:: python3 + + >>> gettz('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + tzstr('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + + :param name: + A time zone name (IANA, or, on Windows, Windows keys), location of + a ``tzfile(5)`` zoneinfo file or ``TZ`` variable style time zone + specifier. An empty string, no argument or ``None`` is interpreted + as local time. + + :return: + Returns an instance of one of ``dateutil``'s :py:class:`tzinfo` + subclasses. + + .. versionchanged:: 2.7.0 + + After version 2.7.0, any two calls to ``gettz`` using the same + input strings will return the same object: + + .. code-block:: python3 + + >>> tz.gettz('America/Chicago') is tz.gettz('America/Chicago') + True + + In addition to improving performance, this ensures that + `"same zone" semantics`_ are used for datetimes in the same zone. + + + .. _`TZ variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + + .. _`"same zone" semantics`: + https://blog.ganssle.io/articles/2018/02/aware-datetime-arithmetic.html + """ + def __init__(self): + + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache_size = 8 + self.__strong_cache = OrderedDict() + self._cache_lock = _thread.allocate_lock() + + def __call__(self, name=None): + with self._cache_lock: + rv = self.__instances.get(name, None) + + if rv is None: + rv = self.nocache(name=name) + if not (name is None + or isinstance(rv, tzlocal_classes) + or rv is None): + # tzlocal is slightly more complicated than the other + # time zone providers because it depends on environment + # at construction time, so don't cache that. + # + # We also cannot store weak references to None, so we + # will also not store that. + self.__instances[name] = rv + else: + # No need for strong caching, return immediately + return rv + + self.__strong_cache[name] = self.__strong_cache.pop(name, rv) + + if len(self.__strong_cache) > self.__strong_cache_size: + self.__strong_cache.popitem(last=False) + + return rv + + def set_cache_size(self, size): + with self._cache_lock: + self.__strong_cache_size = size + while len(self.__strong_cache) > size: + self.__strong_cache.popitem(last=False) + + def cache_clear(self): + with self._cache_lock: + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache.clear() + + @staticmethod + def nocache(name=None): + """A non-cached version of gettz""" + tz = None + if not name: + try: + name = os.environ["TZ"] + except KeyError: + pass + if name is None or name in ("", ":"): + for filepath in TZFILES: + if not os.path.isabs(filepath): + filename = filepath + for path in TZPATHS: + filepath = os.path.join(path, filename) + if os.path.isfile(filepath): + break + else: + continue + if os.path.isfile(filepath): + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = tzlocal() + else: + try: + if name.startswith(":"): + name = name[1:] + except TypeError as e: + if isinstance(name, bytes): + new_msg = "gettz argument should be str, not bytes" + six.raise_from(TypeError(new_msg), e) + else: + raise + if os.path.isabs(name): + if os.path.isfile(name): + tz = tzfile(name) + else: + tz = None + else: + for path in TZPATHS: + filepath = os.path.join(path, name) + if not os.path.isfile(filepath): + filepath = filepath.replace(' ', '_') + if not os.path.isfile(filepath): + continue + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = None + if tzwin is not None: + try: + tz = tzwin(name) + except (WindowsError, UnicodeEncodeError): + # UnicodeEncodeError is for Python 2.7 compat + tz = None + + if not tz: + from dateutil.zoneinfo import get_zonefile_instance + tz = get_zonefile_instance().get(name) + + if not tz: + for c in name: + # name is not a tzstr unless it has at least + # one offset. For short values of "name", an + # explicit for loop seems to be the fastest way + # To determine if a string contains a digit + if c in "0123456789": + try: + tz = tzstr(name) + except ValueError: + pass + break + else: + if name in ("GMT", "UTC"): + tz = UTC + elif name in time.tzname: + tz = tzlocal() + return tz + + return GettzFunc() + + +gettz = __get_gettz() +del __get_gettz + + +def datetime_exists(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + would fall in a gap. + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" exists in + ``tz``. + + .. versionadded:: 2.7.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + tz = dt.tzinfo + + dt = dt.replace(tzinfo=None) + + # This is essentially a test of whether or not the datetime can survive + # a round trip to UTC. + dt_rt = dt.replace(tzinfo=tz).astimezone(UTC).astimezone(tz) + dt_rt = dt_rt.replace(tzinfo=None) + + return dt == dt_rt + + +def datetime_ambiguous(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + is ambiguous (i.e if there are two times differentiated only by their DST + status). + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" is ambiguous in + ``tz``. + + .. versionadded:: 2.6.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + + tz = dt.tzinfo + + # If a time zone defines its own "is_ambiguous" function, we'll use that. + is_ambiguous_fn = getattr(tz, 'is_ambiguous', None) + if is_ambiguous_fn is not None: + try: + return tz.is_ambiguous(dt) + except Exception: + pass + + # If it doesn't come out and tell us it's ambiguous, we'll just check if + # the fold attribute has any effect on this particular date and time. + dt = dt.replace(tzinfo=tz) + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dst = wall_0.dst() == wall_1.dst() + + return not (same_offset and same_dst) + + +def resolve_imaginary(dt): + """ + Given a datetime that may be imaginary, return an existing datetime. + + This function assumes that an imaginary datetime represents what the + wall time would be in a zone had the offset transition not occurred, so + it will always fall forward by the transition's change in offset. + + .. doctest:: + + >>> from dateutil import tz + >>> from datetime import datetime + >>> NYC = tz.gettz('America/New_York') + >>> print(tz.resolve_imaginary(datetime(2017, 3, 12, 2, 30, tzinfo=NYC))) + 2017-03-12 03:30:00-04:00 + + >>> KIR = tz.gettz('Pacific/Kiritimati') + >>> print(tz.resolve_imaginary(datetime(1995, 1, 1, 12, 30, tzinfo=KIR))) + 1995-01-02 12:30:00+14:00 + + As a note, :func:`datetime.astimezone` is guaranteed to produce a valid, + existing datetime, so a round-trip to and from UTC is sufficient to get + an extant datetime, however, this generally "falls back" to an earlier time + rather than falling forward to the STD side (though no guarantees are made + about this behavior). + + :param dt: + A :class:`datetime.datetime` which may or may not exist. + + :return: + Returns an existing :class:`datetime.datetime`. If ``dt`` was not + imaginary, the datetime returned is guaranteed to be the same object + passed to the function. + + .. versionadded:: 2.7.0 + """ + if dt.tzinfo is not None and not datetime_exists(dt): + + curr_offset = (dt + datetime.timedelta(hours=24)).utcoffset() + old_offset = (dt - datetime.timedelta(hours=24)).utcoffset() + + dt += curr_offset - old_offset + + return dt + + +def _datetime_to_timestamp(dt): + """ + Convert a :class:`datetime.datetime` object to an epoch timestamp in + seconds since January 1, 1970, ignoring the time zone. + """ + return (dt.replace(tzinfo=None) - EPOCH).total_seconds() + + +if sys.version_info >= (3, 6): + def _get_supported_offset(second_offset): + return second_offset +else: + def _get_supported_offset(second_offset): + # For python pre-3.6, round to full-minutes if that's not the case. + # Python's datetime doesn't accept sub-minute timezones. Check + # http://python.org/sf/1447945 or https://bugs.python.org/issue5288 + # for some information. + old_offset = second_offset + calculated_offset = 60 * ((second_offset + 30) // 60) + return calculated_offset + + +try: + # Python 3.7 feature + from contextlib import nullcontext as _nullcontext +except ImportError: + class _nullcontext(object): + """ + Class for wrapping contexts so that they are passed through in a + with statement. + """ + def __init__(self, context): + self.context = context + + def __enter__(self): + return self.context + + def __exit__(*args, **kwargs): + pass + +# vim:ts=4:sw=4:et diff --git a/myenv/lib/python3.12/site-packages/dateutil/tz/win.py b/myenv/lib/python3.12/site-packages/dateutil/tz/win.py new file mode 100644 index 0000000..cde07ba --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/tz/win.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +""" +This module provides an interface to the native time zone data on Windows, +including :py:class:`datetime.tzinfo` implementations. + +Attempting to import this module on a non-Windows platform will raise an +:py:obj:`ImportError`. +""" +# This code was originally contributed by Jeffrey Harris. +import datetime +import struct + +from six.moves import winreg +from six import text_type + +try: + import ctypes + from ctypes import wintypes +except ValueError: + # ValueError is raised on non-Windows systems for some horrible reason. + raise ImportError("Running tzwin on non-Windows system") + +from ._common import tzrangebase + +__all__ = ["tzwin", "tzwinlocal", "tzres"] + +ONEWEEK = datetime.timedelta(7) + +TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" +TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" +TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + + +def _settzkeyname(): + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + winreg.OpenKey(handle, TZKEYNAMENT).Close() + TZKEYNAME = TZKEYNAMENT + except WindowsError: + TZKEYNAME = TZKEYNAME9X + handle.Close() + return TZKEYNAME + + +TZKEYNAME = _settzkeyname() + + +class tzres(object): + """ + Class for accessing ``tzres.dll``, which contains timezone name related + resources. + + .. versionadded:: 2.5.0 + """ + p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char + + def __init__(self, tzres_loc='tzres.dll'): + # Load the user32 DLL so we can load strings from tzres + user32 = ctypes.WinDLL('user32') + + # Specify the LoadStringW function + user32.LoadStringW.argtypes = (wintypes.HINSTANCE, + wintypes.UINT, + wintypes.LPWSTR, + ctypes.c_int) + + self.LoadStringW = user32.LoadStringW + self._tzres = ctypes.WinDLL(tzres_loc) + self.tzres_loc = tzres_loc + + def load_name(self, offset): + """ + Load a timezone name from a DLL offset (integer). + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.load_name(112)) + 'Eastern Standard Time' + + :param offset: + A positive integer value referring to a string from the tzres dll. + + .. note:: + + Offsets found in the registry are generally of the form + ``@tzres.dll,-114``. The offset in this case is 114, not -114. + + """ + resource = self.p_wchar() + lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR) + nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0) + return resource[:nchar] + + def name_from_string(self, tzname_str): + """ + Parse strings as returned from the Windows registry into the time zone + name as defined in the registry. + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.name_from_string('@tzres.dll,-251')) + 'Dateline Daylight Time' + >>> print(tzr.name_from_string('Eastern Standard Time')) + 'Eastern Standard Time' + + :param tzname_str: + A timezone name string as returned from a Windows registry key. + + :return: + Returns the localized timezone string from tzres.dll if the string + is of the form `@tzres.dll,-offset`, else returns the input string. + """ + if not tzname_str.startswith('@'): + return tzname_str + + name_splt = tzname_str.split(',-') + try: + offset = int(name_splt[1]) + except: + raise ValueError("Malformed timezone string.") + + return self.load_name(offset) + + +class tzwinbase(tzrangebase): + """tzinfo class based on win32's timezones available in the registry.""" + def __init__(self): + raise NotImplementedError('tzwinbase is an abstract base class') + + def __eq__(self, other): + # Compare on all relevant dimensions, including name. + if not isinstance(other, tzwinbase): + return NotImplemented + + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._stddayofweek == other._stddayofweek and + self._dstdayofweek == other._dstdayofweek and + self._stdweeknumber == other._stdweeknumber and + self._dstweeknumber == other._dstweeknumber and + self._stdhour == other._stdhour and + self._dsthour == other._dsthour and + self._stdminute == other._stdminute and + self._dstminute == other._dstminute and + self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr) + + @staticmethod + def list(): + """Return a list of all time zones known to the system.""" + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZKEYNAME) as tzkey: + result = [winreg.EnumKey(tzkey, i) + for i in range(winreg.QueryInfoKey(tzkey)[0])] + return result + + def display(self): + """ + Return the display name of the time zone. + """ + return self._display + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + + if not self.hasdst: + return None + + dston = picknthweekday(year, self._dstmonth, self._dstdayofweek, + self._dsthour, self._dstminute, + self._dstweeknumber) + + dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek, + self._stdhour, self._stdminute, + self._stdweeknumber) + + # Ambiguous dates default to the STD side + dstoff -= self._dst_base_offset + + return dston, dstoff + + def _get_hasdst(self): + return self._dstmonth != 0 + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +class tzwin(tzwinbase): + """ + Time zone object created from the zone info in the Windows registry + + These are similar to :py:class:`dateutil.tz.tzrange` objects in that + the time zone data is provided in the format of a single offset rule + for either 0 or 2 time zone transitions per year. + + :param: name + The name of a Windows time zone key, e.g. "Eastern Standard Time". + The full list of keys can be retrieved with :func:`tzwin.list`. + """ + + def __init__(self, name): + self._name = name + + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + keydict = valuestodict(tzkey) + + self._std_abbr = keydict["Std"] + self._dst_abbr = keydict["Dlt"] + + self._display = keydict["Display"] + + # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack("=3l16h", keydict["TZI"]) + stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 + dstoffset = stdoffset-tup[2] # + DaylightBias * -1 + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs + # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx + (self._stdmonth, + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[4:9] + + (self._dstmonth, + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[12:17] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwin(%s)" % repr(self._name) + + def __reduce__(self): + return (self.__class__, (self._name,)) + + +class tzwinlocal(tzwinbase): + """ + Class representing the local time zone information in the Windows registry + + While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time` + module) to retrieve time zone information, ``tzwinlocal`` retrieves the + rules directly from the Windows registry and creates an object like + :class:`dateutil.tz.tzwin`. + + Because Windows does not have an equivalent of :func:`time.tzset`, on + Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the + time zone settings *at the time that the process was started*, meaning + changes to the machine's time zone settings during the run of a program + on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`. + Because ``tzwinlocal`` reads the registry directly, it is unaffected by + this issue. + """ + def __init__(self): + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: + keydict = valuestodict(tzlocalkey) + + self._std_abbr = keydict["StandardName"] + self._dst_abbr = keydict["DaylightName"] + + try: + tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME, + sn=self._std_abbr) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + _keydict = valuestodict(tzkey) + self._display = _keydict["Display"] + except OSError: + self._display = None + + stdoffset = -keydict["Bias"]-keydict["StandardBias"] + dstoffset = stdoffset-keydict["DaylightBias"] + + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # For reasons unclear, in this particular key, the day of week has been + # moved to the END of the SYSTEMTIME structure. + tup = struct.unpack("=8h", keydict["StandardStart"]) + + (self._stdmonth, + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[1:5] + + self._stddayofweek = tup[7] + + tup = struct.unpack("=8h", keydict["DaylightStart"]) + + (self._dstmonth, + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[1:5] + + self._dstdayofweek = tup[7] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwinlocal()" + + def __str__(self): + # str will return the standard name, not the daylight name. + return "tzwinlocal(%s)" % repr(self._std_abbr) + + def __reduce__(self): + return (self.__class__, ()) + + +def picknthweekday(year, month, dayofweek, hour, minute, whichweek): + """ dayofweek == 0 means Sunday, whichweek 5 means last instance """ + first = datetime.datetime(year, month, 1, hour, minute) + + # This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6), + # Because 7 % 7 = 0 + weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) + wd = weekdayone + ((whichweek - 1) * ONEWEEK) + if (wd.month != month): + wd -= ONEWEEK + + return wd + + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dout = {} + size = winreg.QueryInfoKey(key)[1] + tz_res = None + + for i in range(size): + key_name, value, dtype = winreg.EnumValue(key, i) + if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN: + # If it's a DWORD (32-bit integer), it's stored as unsigned - convert + # that to a proper signed integer + if value & (1 << 31): + value = value - (1 << 32) + elif dtype == winreg.REG_SZ: + # If it's a reference to the tzres DLL, load the actual string + if value.startswith('@tzres'): + tz_res = tz_res or tzres() + value = tz_res.name_from_string(value) + + value = value.rstrip('\x00') # Remove trailing nulls + + dout[key_name] = value + + return dout diff --git a/myenv/lib/python3.12/site-packages/dateutil/tzwin.py b/myenv/lib/python3.12/site-packages/dateutil/tzwin.py new file mode 100644 index 0000000..cebc673 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/tzwin.py @@ -0,0 +1,2 @@ +# tzwin has moved to dateutil.tz.win +from .tz.win import * diff --git a/myenv/lib/python3.12/site-packages/dateutil/utils.py b/myenv/lib/python3.12/site-packages/dateutil/utils.py new file mode 100644 index 0000000..dd2d245 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/utils.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +This module offers general convenience and utility functions for dealing with +datetimes. + +.. versionadded:: 2.7.0 +""" +from __future__ import unicode_literals + +from datetime import datetime, time + + +def today(tzinfo=None): + """ + Returns a :py:class:`datetime` representing the current day at midnight + + :param tzinfo: + The time zone to attach (also used to determine the current day). + + :return: + A :py:class:`datetime.datetime` object representing the current day + at midnight. + """ + + dt = datetime.now(tzinfo) + return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) + + +def default_tzinfo(dt, tzinfo): + """ + Sets the ``tzinfo`` parameter on naive datetimes only + + This is useful for example when you are provided a datetime that may have + either an implicit or explicit time zone, such as when parsing a time zone + string. + + .. doctest:: + + >>> from dateutil.tz import tzoffset + >>> from dateutil.parser import parse + >>> from dateutil.utils import default_tzinfo + >>> dflt_tz = tzoffset("EST", -18000) + >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz)) + 2014-01-01 12:30:00+00:00 + >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz)) + 2014-01-01 12:30:00-05:00 + + :param dt: + The datetime on which to replace the time zone + + :param tzinfo: + The :py:class:`datetime.tzinfo` subclass instance to assign to + ``dt`` if (and only if) it is naive. + + :return: + Returns an aware :py:class:`datetime.datetime`. + """ + if dt.tzinfo is not None: + return dt + else: + return dt.replace(tzinfo=tzinfo) + + +def within_delta(dt1, dt2, delta): + """ + Useful for comparing two datetimes that may have a negligible difference + to be considered equal. + """ + delta = abs(delta) + difference = dt1 - dt2 + return -delta <= difference <= delta diff --git a/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/__init__.py b/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/__init__.py new file mode 100644 index 0000000..34f11ad --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/__init__.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +import warnings +import json + +from tarfile import TarFile +from pkgutil import get_data +from io import BytesIO + +from dateutil.tz import tzfile as _tzfile + +__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] + +ZONEFILENAME = "dateutil-zoneinfo.tar.gz" +METADATA_FN = 'METADATA' + + +class tzfile(_tzfile): + def __reduce__(self): + return (gettz, (self._filename,)) + + +def getzoneinfofile_stream(): + try: + return BytesIO(get_data(__name__, ZONEFILENAME)) + except IOError as e: # TODO switch to FileNotFoundError? + warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) + return None + + +class ZoneInfoFile(object): + def __init__(self, zonefile_stream=None): + if zonefile_stream is not None: + with TarFile.open(fileobj=zonefile_stream) as tf: + self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) + for zf in tf.getmembers() + if zf.isfile() and zf.name != METADATA_FN} + # deal with links: They'll point to their parent object. Less + # waste of memory + links = {zl.name: self.zones[zl.linkname] + for zl in tf.getmembers() if + zl.islnk() or zl.issym()} + self.zones.update(links) + try: + metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) + metadata_str = metadata_json.read().decode('UTF-8') + self.metadata = json.loads(metadata_str) + except KeyError: + # no metadata in tar file + self.metadata = None + else: + self.zones = {} + self.metadata = None + + def get(self, name, default=None): + """ + Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method + for retrieving zones from the zone dictionary. + + :param name: + The name of the zone to retrieve. (Generally IANA zone names) + + :param default: + The value to return in the event of a missing key. + + .. versionadded:: 2.6.0 + + """ + return self.zones.get(name, default) + + +# The current API has gettz as a module function, although in fact it taps into +# a stateful class. So as a workaround for now, without changing the API, we +# will create a new "global" class instance the first time a user requests a +# timezone. Ugly, but adheres to the api. +# +# TODO: Remove after deprecation period. +_CLASS_ZONE_INSTANCE = [] + + +def get_zonefile_instance(new_instance=False): + """ + This is a convenience function which provides a :class:`ZoneInfoFile` + instance using the data provided by the ``dateutil`` package. By default, it + caches a single instance of the ZoneInfoFile object and returns that. + + :param new_instance: + If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and + used as the cached instance for the next call. Otherwise, new instances + are created only as necessary. + + :return: + Returns a :class:`ZoneInfoFile` object. + + .. versionadded:: 2.6 + """ + if new_instance: + zif = None + else: + zif = getattr(get_zonefile_instance, '_cached_instance', None) + + if zif is None: + zif = ZoneInfoFile(getzoneinfofile_stream()) + + get_zonefile_instance._cached_instance = zif + + return zif + + +def gettz(name): + """ + This retrieves a time zone from the local zoneinfo tarball that is packaged + with dateutil. + + :param name: + An IANA-style time zone name, as found in the zoneinfo file. + + :return: + Returns a :class:`dateutil.tz.tzfile` time zone object. + + .. warning:: + It is generally inadvisable to use this function, and it is only + provided for API compatibility with earlier versions. This is *not* + equivalent to ``dateutil.tz.gettz()``, which selects an appropriate + time zone based on the inputs, favoring system zoneinfo. This is ONLY + for accessing the dateutil-specific zoneinfo (which may be out of + date compared to the system zoneinfo). + + .. deprecated:: 2.6 + If you need to use a specific zoneinfofile over the system zoneinfo, + instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call + :func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead. + + Use :func:`get_zonefile_instance` to retrieve an instance of the + dateutil-provided zoneinfo. + """ + warnings.warn("zoneinfo.gettz() will be removed in future versions, " + "to use the dateutil-provided zoneinfo files, instantiate a " + "ZoneInfoFile object and use ZoneInfoFile.zones.get() " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].zones.get(name) + + +def gettz_db_metadata(): + """ Get the zonefile metadata + + See `zonefile_metadata`_ + + :returns: + A dictionary with the database metadata + + .. deprecated:: 2.6 + See deprecation warning in :func:`zoneinfo.gettz`. To get metadata, + query the attribute ``zoneinfo.ZoneInfoFile.metadata``. + """ + warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future " + "versions, to use the dateutil-provided zoneinfo files, " + "ZoneInfoFile object and query the 'metadata' attribute " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].metadata diff --git a/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..cb0c1ae Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-312.pyc b/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-312.pyc new file mode 100644 index 0000000..7831894 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz new file mode 100644 index 0000000..1461f8c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz differ diff --git a/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/rebuild.py b/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/rebuild.py new file mode 100644 index 0000000..684c658 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/dateutil/zoneinfo/rebuild.py @@ -0,0 +1,75 @@ +import logging +import os +import tempfile +import shutil +import json +from subprocess import check_call, check_output +from tarfile import TarFile + +from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME + + +def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): + """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* + + filename is the timezone tarball from ``ftp.iana.org/tz``. + + """ + tmpdir = tempfile.mkdtemp() + zonedir = os.path.join(tmpdir, "zoneinfo") + moduledir = os.path.dirname(__file__) + try: + with TarFile.open(filename) as tf: + for name in zonegroups: + tf.extract(name, tmpdir) + filepaths = [os.path.join(tmpdir, n) for n in zonegroups] + + _run_zic(zonedir, filepaths) + + # write metadata file + with open(os.path.join(zonedir, METADATA_FN), 'w') as f: + json.dump(metadata, f, indent=4, sort_keys=True) + target = os.path.join(moduledir, ZONEFILENAME) + with TarFile.open(target, "w:%s" % format) as tf: + for entry in os.listdir(zonedir): + entrypath = os.path.join(zonedir, entry) + tf.add(entrypath, entry) + finally: + shutil.rmtree(tmpdir) + + +def _run_zic(zonedir, filepaths): + """Calls the ``zic`` compiler in a compatible way to get a "fat" binary. + + Recent versions of ``zic`` default to ``-b slim``, while older versions + don't even have the ``-b`` option (but default to "fat" binaries). The + current version of dateutil does not support Version 2+ TZif files, which + causes problems when used in conjunction with "slim" binaries, so this + function is used to ensure that we always get a "fat" binary. + """ + + try: + help_text = check_output(["zic", "--help"]) + except OSError as e: + _print_on_nosuchfile(e) + raise + + if b"-b " in help_text: + bloat_args = ["-b", "fat"] + else: + bloat_args = [] + + check_call(["zic"] + bloat_args + ["-d", zonedir] + filepaths) + + +def _print_on_nosuchfile(e): + """Print helpful troubleshooting message + + e is an exception raised by subprocess.check_call() + + """ + if e.errno == 2: + logging.error( + "Could not find zic. Perhaps you need to install " + "libc-bin or some other package that provides it, " + "or it's not in your PATH?") diff --git a/myenv/lib/python3.12/site-packages/distutils-precedence.pth b/myenv/lib/python3.12/site-packages/distutils-precedence.pth new file mode 100644 index 0000000..7f009fe --- /dev/null +++ b/myenv/lib/python3.12/site-packages/distutils-precedence.pth @@ -0,0 +1 @@ +import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/LICENSE.txt b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..8f080ea --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/METADATA b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/METADATA new file mode 100644 index 0000000..cf12a82 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/METADATA @@ -0,0 +1,193 @@ +Metadata-Version: 2.1 +Name: h11 +Version: 0.14.0 +Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 +Home-page: https://github.com/python-hyper/h11 +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: System :: Networking +Requires-Python: >=3.7 +License-File: LICENSE.txt +Requires-Dist: typing-extensions ; python_version < "3.8" + +h11 +=== + +.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master + :target: https://travis-ci.org/python-hyper/h11 + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-hyper/h11 + :alt: Test coverage + +.. image:: https://readthedocs.org/projects/h11/badge/?version=latest + :target: http://h11.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +This is a little HTTP/1.1 library written from scratch in Python, +heavily inspired by `hyper-h2 `_. + +It's a "bring-your-own-I/O" library; h11 contains no IO code +whatsoever. This means you can hook h11 up to your favorite network +API, and that could be anything you want: synchronous, threaded, +asynchronous, or your own implementation of `RFC 6214 +`_ -- h11 won't judge you. +(Compare this to the current state of the art, where every time a `new +network API `_ comes along then someone +gets to start over reimplementing the entire HTTP protocol from +scratch.) Cory Benfield made an `excellent blog post describing the +benefits of this approach +`_, or if you like video +then here's his `PyCon 2016 talk on the same theme +`_. + +This also means that h11 is not immediately useful out of the box: +it's a toolkit for building programs that speak HTTP, not something +that could directly replace ``requests`` or ``twisted.web`` or +whatever. But h11 makes it much easier to implement something like +``requests`` or ``twisted.web``. + +At a high level, working with h11 goes like this: + +1) First, create an ``h11.Connection`` object to track the state of a + single HTTP/1.1 connection. + +2) When you read data off the network, pass it to + ``conn.receive_data(...)``; you'll get back a list of objects + representing high-level HTTP "events". + +3) When you want to send a high-level HTTP event, create the + corresponding "event" object and pass it to ``conn.send(...)``; + this will give you back some bytes that you can then push out + through the network. + +For example, a client might instantiate and then send a +``h11.Request`` object, then zero or more ``h11.Data`` objects for the +request body (e.g., if this is a POST), and then a +``h11.EndOfMessage`` to indicate the end of the message. Then the +server would then send back a ``h11.Response``, some ``h11.Data``, and +its own ``h11.EndOfMessage``. If either side violates the protocol, +you'll get a ``h11.ProtocolError`` exception. + +h11 is suitable for implementing both servers and clients, and has a +pleasantly symmetric API: the events you send as a client are exactly +the ones that you receive as a server and vice-versa. + +`Here's an example of a tiny HTTP client +`_ + +It also has `a fine manual `_. + +FAQ +--- + +*Whyyyyy?* + +I wanted to play with HTTP in `Curio +`__ and `Trio +`__, which at the time didn't have any +HTTP libraries. So I thought, no big deal, Python has, like, a dozen +different implementations of HTTP, surely I can find one that's +reusable. I didn't find one, but I did find Cory's call-to-arms +blog-post. So I figured, well, fine, if I have to implement HTTP from +scratch, at least I can make sure no-one *else* has to ever again. + +*Should I use it?* + +Maybe. You should be aware that it's a very young project. But, it's +feature complete and has an exhaustive test-suite and complete docs, +so the next step is for people to try using it and see how it goes +:-). If you do then please let us know -- if nothing else we'll want +to talk to you before making any incompatible changes! + +*What are the features/limitations?* + +Roughly speaking, it's trying to be a robust, complete, and non-hacky +implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: +HTTP/1.1 Message Syntax and Routing +`_. That is, it mostly focuses on +implementing HTTP at the level of taking bytes on and off the wire, +and the headers related to that, and tries to be anal about spec +conformance. It doesn't know about higher-level concerns like URL +routing, conditional GETs, cross-origin cookie policies, or content +negotiation. But it does know how to take care of framing, +cross-version differences in keep-alive handling, and the "obsolete +line folding" rule, so you can focus your energies on the hard / +interesting parts for your application, and it tries to support the +full specification in the sense that any useful HTTP/1.1 conformant +application should be able to use h11. + +It's pure Python, and has no dependencies outside of the standard +library. + +It has a test suite with 100.0% coverage for both statements and +branches. + +Currently it supports Python 3 (testing on 3.7-3.10) and PyPy 3. +The last Python 2-compatible version was h11 0.11.x. +(Originally it had a Cython wrapper for `http-parser +`_ and a beautiful nested state +machine implemented with ``yield from`` to postprocess the output. But +I had to take these out -- the new *parser* needs fewer lines-of-code +than the old *parser wrapper*, is written in pure Python, uses no +exotic language syntax, and has more features. It's sad, really; that +old state machine was really slick. I just need a few sentences here +to mourn that.) + +I don't know how fast it is. I haven't benchmarked or profiled it yet, +so it's probably got a few pointless hot spots, and I've been trying +to err on the side of simplicity and robustness instead of +micro-optimization. But at the architectural level I tried hard to +avoid fundamentally bad decisions, e.g., I believe that all the +parsing algorithms remain linear-time even in the face of pathological +input like slowloris, and there are no byte-by-byte loops. (I also +believe that it maintains bounded memory usage in the face of +arbitrary/pathological input.) + +The whole library is ~800 lines-of-code. You can read and understand +the whole thing in less than an hour. Most of the energy invested in +this so far has been spent on trying to keep things simple by +minimizing special-cases and ad hoc state manipulation; even though it +is now quite small and simple, I'm still annoyed that I haven't +figured out how to make it even smaller and simpler. (Unfortunately, +HTTP does not lend itself to simplicity.) + +The API is ~feature complete and I don't expect the general outlines +to change much, but you can't judge an API's ergonomics until you +actually document and use it, so I'd expect some changes in the +details. + +*How do I try it?* + +.. code-block:: sh + + $ pip install h11 + $ git clone git@github.com:python-hyper/h11 + $ cd h11/examples + $ python basic-client.py + +and go from there. + +*License?* + +MIT + +*Code of conduct?* + +Contributors are requested to follow our `code of conduct +`_ in +all project spaces. diff --git a/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/RECORD b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/RECORD new file mode 100644 index 0000000..a63f6cc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/RECORD @@ -0,0 +1,52 @@ +h11-0.14.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h11-0.14.0.dist-info/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.14.0.dist-info/METADATA,sha256=B7pZ0m7WBXNs17vl6hUH9bJTL9s37DaGvY31w7jNxSg,8175 +h11-0.14.0.dist-info/RECORD,, +h11-0.14.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +h11-0.14.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 +h11/__pycache__/__init__.cpython-312.pyc,, +h11/__pycache__/_abnf.cpython-312.pyc,, +h11/__pycache__/_connection.cpython-312.pyc,, +h11/__pycache__/_events.cpython-312.pyc,, +h11/__pycache__/_headers.cpython-312.pyc,, +h11/__pycache__/_readers.cpython-312.pyc,, +h11/__pycache__/_receivebuffer.cpython-312.pyc,, +h11/__pycache__/_state.cpython-312.pyc,, +h11/__pycache__/_util.cpython-312.pyc,, +h11/__pycache__/_version.cpython-312.pyc,, +h11/__pycache__/_writers.cpython-312.pyc,, +h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815 +h11/_connection.py,sha256=eS2sorMD0zKLCFiB9lW9W9F_Nzny2tjHa4e6s1ujr1c,26539 +h11/_events.py,sha256=LEfuvg1AbhHaVRwxCd0I-pFn9-ezUOaoL8o2Kvy1PBA,11816 +h11/_headers.py,sha256=RqB8cd8CN0blYPzcLe5qeCh-phv6D1U_CHj4hs67lgQ,10230 +h11/_readers.py,sha256=EbSed0jzwVUiD1nOPAeUcVE4Flf3wXkxfb8c06-OTBM,8383 +h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 +h11/_state.py,sha256=k1VL6SDbaPkSrZ-49ewCXDpuiUS69_46YhbWjuV1qEY,13300 +h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 +h11/_version.py,sha256=LVyTdiZRzIIEv79UyOgbM5iUrJUllEzlCWaJEYBY1zc,686 +h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081 +h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 +h11/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +h11/tests/__pycache__/__init__.cpython-312.pyc,, +h11/tests/__pycache__/helpers.cpython-312.pyc,, +h11/tests/__pycache__/test_against_stdlib_http.cpython-312.pyc,, +h11/tests/__pycache__/test_connection.cpython-312.pyc,, +h11/tests/__pycache__/test_events.cpython-312.pyc,, +h11/tests/__pycache__/test_headers.cpython-312.pyc,, +h11/tests/__pycache__/test_helpers.cpython-312.pyc,, +h11/tests/__pycache__/test_io.cpython-312.pyc,, +h11/tests/__pycache__/test_receivebuffer.cpython-312.pyc,, +h11/tests/__pycache__/test_state.cpython-312.pyc,, +h11/tests/__pycache__/test_util.cpython-312.pyc,, +h11/tests/data/test-file,sha256=ZJ03Rqs98oJw29OHzJg7LlMzyGQaRAY0r3AqBeM2wVU,65 +h11/tests/helpers.py,sha256=a1EVG_p7xU4wRsa3tMPTRxuaKCmretok9sxXWvqfmQA,3355 +h11/tests/test_against_stdlib_http.py,sha256=cojCHgHXFQ8gWhNlEEwl3trmOpN-5uDukRoHnElqo3A,3995 +h11/tests/test_connection.py,sha256=ZbPLDPclKvjgjAhgk-WlCPBaf17c4XUIV2tpaW08jOI,38720 +h11/tests/test_events.py,sha256=LPVLbcV-NvPNK9fW3rraR6Bdpz1hAlsWubMtNaJ5gHg,4657 +h11/tests/test_headers.py,sha256=qd8T1Zenuz5GbD6wklSJ5G8VS7trrYgMV0jT-SMvqg8,5612 +h11/tests/test_helpers.py,sha256=kAo0CEM4LGqmyyP2ZFmhsyq3UFJqoFfAbzu3hbWreRM,794 +h11/tests/test_io.py,sha256=uCZVnjarkRBkudfC1ij-KSCQ71XWJhnkgkgWWkKgYPQ,16386 +h11/tests/test_receivebuffer.py,sha256=3jGbeJM36Akqg_pAhPb7XzIn2NS6RhPg-Ryg8Eu6ytk,3454 +h11/tests/test_state.py,sha256=rqll9WqFsJPE0zSrtCn9LH659mPKsDeXZ-DwXwleuBQ,8928 +h11/tests/test_util.py,sha256=VO5L4nSFe4pgtSwKuv6u_6l0H7UeizF5WKuHTWreg70,2970 diff --git a/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/top_level.txt b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/top_level.txt new file mode 100644 index 0000000..0d24def --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11-0.14.0.dist-info/top_level.txt @@ -0,0 +1 @@ +h11 diff --git a/myenv/lib/python3.12/site-packages/h11/__init__.py b/myenv/lib/python3.12/site-packages/h11/__init__.py new file mode 100644 index 0000000..989e92c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/__init__.py @@ -0,0 +1,62 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from h11._connection import Connection, NEED_DATA, PAUSED +from h11._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from h11._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from h11._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ( + "Connection", + "NEED_DATA", + "PAUSED", + "ConnectionClosed", + "Data", + "EndOfMessage", + "Event", + "InformationalResponse", + "Request", + "Response", + "CLIENT", + "CLOSED", + "DONE", + "ERROR", + "IDLE", + "MUST_CLOSE", + "SEND_BODY", + "SEND_RESPONSE", + "SERVER", + "SWITCHED_PROTOCOL", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", +) diff --git a/myenv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..28fb8f3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc new file mode 100644 index 0000000..4899fe3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc new file mode 100644 index 0000000..ea3ae4d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc new file mode 100644 index 0000000..8804ac5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc new file mode 100644 index 0000000..510f469 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc new file mode 100644 index 0000000..1d3ceed Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc new file mode 100644 index 0000000..61340ea Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc new file mode 100644 index 0000000..a272b35 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc new file mode 100644 index 0000000..a4cc9e8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc new file mode 100644 index 0000000..f70c18b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc new file mode 100644 index 0000000..fcad705 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/_abnf.py b/myenv/lib/python3.12/site-packages/h11/_abnf.py new file mode 100644 index 0000000..933587f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/_abnf.py @@ -0,0 +1,132 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"{OWS}\r\n".format( + **globals() + ) # Even though the specification does not allow for extra whitespaces, + # we are lenient with trailing whitespaces because some servers on the wild use it. +) diff --git a/myenv/lib/python3.12/site-packages/h11/_connection.py b/myenv/lib/python3.12/site-packages/h11/_connection.py new file mode 100644 index 0000000..d175270 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/_connection.py @@ -0,0 +1,633 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. +from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union + +from ._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS, ReadersType +from ._receivebuffer import ReceiveBuffer +from ._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + ConnectionState, + DONE, + ERROR, + MIGHT_SWITCH_PROTOCOL, + SEND_BODY, + SERVER, + SWITCHED_PROTOCOL, +) +from ._util import ( # Import the internal things we need + LocalProtocolError, + RemoteProtocolError, + Sentinel, +) +from ._writers import WRITERS, WritersType + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + + +class NEED_DATA(Sentinel, metaclass=Sentinel): + pass + + +class PAUSED(Sentinel, metaclass=Sentinel): + pass + + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/response line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event: Union[Request, Response]) -> bool: + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing( + request_method: bytes, event: Union[Request, Response] +) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection: + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, + our_role: Type[Sentinel], + max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + ) -> None: + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) + self.our_role = our_role + self.their_role: Type[Sentinel] + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version: Optional[bytes] = None + self._request_method: Optional[bytes] = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self) -> Type[Sentinel]: + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self) -> Type[Sentinel]: + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self) -> bool: + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self) -> None: + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role: Type[Sentinel]) -> None: + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role: Type[Sentinel], event: Event) -> None: + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + if type(event) is Request: + self._request_method = event.method + + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + event = cast(Union[Request, Response, InformationalResponse], event) + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive( + cast(Union[Request, Response], event) + ): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object( + self, + role: Type[Sentinel], + event: Optional[Event], + io_dict: Union[ReadersType, WritersType], + ) -> Optional[Callable[..., Any]]: + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing( + cast(bytes, self._request_method), cast(Union[Request, Response], event) + ) + return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) # type: ignore[return-value] + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes( + self, + old_states: Dict[Type[Sentinel], Type[Sentinel]], + event: Optional[Event] = None, + ) -> None: + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self) -> Tuple[bytes, bool]: + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data: bytes) -> None: + """Add data to our internal receive buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event( + self, + ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() # type: ignore[attr-defined] + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event # type: ignore[no-any-return] + + def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, cast(Event, event)) + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + def send(self, event: Event) -> Optional[bytes]: + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + event = self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list: List[bytes] = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self) -> None: + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. + def _clean_up_response_headers_for_sending(self, response: Response) -> Response: + assert type(response) is Response + + headers = response.headers + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = cast(bytes, self._request_method) + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + headers = set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + headers = set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + headers = set_comma_header(headers, b"connection", sorted(connection)) + + return Response( + headers=headers, + status_code=response.status_code, + http_version=response.http_version, + reason=response.reason, + ) diff --git a/myenv/lib/python3.12/site-packages/h11/_events.py b/myenv/lib/python3.12/site-packages/h11/_events.py new file mode 100644 index 0000000..075bf8a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/_events.py @@ -0,0 +1,369 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re +from abc import ABC +from dataclasses import dataclass, field +from typing import Any, cast, Dict, List, Tuple, Union + +from ._abnf import method, request_target +from ._headers import Headers, normalize_and_validate +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Event", + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +method_re = re.compile(method.encode("ascii")) +request_target_re = re.compile(request_target.encode("ascii")) + + +class Event(ABC): + """ + Base class for h11 events. + """ + + __slots__ = () + + +@dataclass(init=False, frozen=True) +class Request(Event): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + __slots__ = ("method", "headers", "target", "http_version") + + method: bytes + headers: Headers + target: bytes + http_version: bytes + + def __init__( + self, + *, + method: Union[bytes, str], + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + target: Union[bytes, str], + http_version: Union[bytes, str] = b"1.1", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "method", bytesify(method)) + object.__setattr__(self, "target", bytesify(target)) + object.__setattr__(self, "http_version", bytesify(http_version)) + else: + object.__setattr__(self, "method", method) + object.__setattr__(self, "target", target) + object.__setattr__(self, "http_version", http_version) + + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(method_re, self.method, "Illegal method characters") + validate(request_target_re, self.target, "Illegal target characters") + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class _ResponseBase(Event): + __slots__ = ("headers", "http_version", "reason", "status_code") + + headers: Headers + http_version: bytes + reason: bytes + status_code: int + + def __init__( + self, + *, + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + status_code: int, + http_version: Union[bytes, str] = b"1.1", + reason: Union[bytes, str] = b"", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "reason", bytesify(reason)) + object.__setattr__(self, "http_version", bytesify(http_version)) + if not isinstance(status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + object.__setattr__(self, "status_code", int(status_code)) + else: + object.__setattr__(self, "reason", reason) + object.__setattr__(self, "http_version", http_version) + object.__setattr__(self, "status_code", status_code) + + self.__post_init__() + + def __post_init__(self) -> None: + pass + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 1000). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (200 <= self.status_code < 1000): + raise LocalProtocolError( + "Response status_code should be in range [200, 1000), not {}".format( + self.status_code + ) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Data(Event): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + __slots__ = ("data", "chunk_start", "chunk_end") + + data: bytes + chunk_start: bool + chunk_end: bool + + def __init__( + self, data: bytes, chunk_start: bool = False, chunk_end: bool = False + ) -> None: + object.__setattr__(self, "data", data) + object.__setattr__(self, "chunk_start", chunk_start) + object.__setattr__(self, "chunk_end", chunk_end) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +@dataclass(init=False, frozen=True) +class EndOfMessage(Event): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + __slots__ = ("headers",) + + headers: Headers + + def __init__( + self, + *, + headers: Union[ + Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None + ] = None, + _parsed: bool = False, + ) -> None: + super().__init__() + if headers is None: + headers = Headers([]) + elif not isinstance(headers, Headers): + headers = normalize_and_validate(headers, _parsed=_parsed) + + object.__setattr__(self, "headers", headers) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(frozen=True) +class ConnectionClosed(Event): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/myenv/lib/python3.12/site-packages/h11/_headers.py b/myenv/lib/python3.12/site-packages/h11/_headers.py new file mode 100644 index 0000000..b97d020 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/_headers.py @@ -0,0 +1,278 @@ +import re +from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +if TYPE_CHECKING: + from ._events import Request + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(rb"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +class Headers(Sequence[Tuple[bytes, bytes]]): + """ + A list-like interface that allows iterating over headers as byte-pairs + of (lowercased-name, value). + + Internally we actually store the representation as three-tuples, + including both the raw original casing, in order to preserve casing + over-the-wire, and the lowercased name, for case-insensitive comparisions. + + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert r.headers == [ + (b"host", b"example.org"), + (b"connection", b"keep-alive") + ] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive") + ] + """ + + __slots__ = "_full_items" + + def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: + self._full_items = full_items + + def __bool__(self) -> bool: + return bool(self._full_items) + + def __eq__(self, other: object) -> bool: + return list(self) == list(other) # type: ignore + + def __len__(self) -> int: + return len(self._full_items) + + def __repr__(self) -> str: + return "" % repr(list(self)) + + def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] + _, name, value = self._full_items[idx] + return (name, value) + + def raw_items(self) -> List[Tuple[bytes, bytes]]: + return [(raw_name, value) for raw_name, _, value in self._full_items] + + +HeaderTypes = Union[ + List[Tuple[bytes, bytes]], + List[Tuple[bytes, str]], + List[Tuple[str, bytes]], + List[Tuple[str, str]], +] + + +@overload +def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: + ... + + +@overload +def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: + ... + + +@overload +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + ... + + +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + new_headers = [] + seen_content_length = None + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + assert isinstance(name, bytes) + assert isinstance(value, bytes) + + raw_name = name + name = name.lower() + if name == b"content-length": + lengths = {length.strip() for length in value.split(b",")} + if len(lengths) != 1: + raise LocalProtocolError("conflicting Content-Length headers") + value = lengths.pop() + validate(_content_length_re, value, "bad Content-Length") + if seen_content_length is None: + seen_content_length = value + new_headers.append((raw_name, name, value)) + elif seen_content_length != value: + raise LocalProtocolError("conflicting Content-Length headers") + elif name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((raw_name, name, value)) + else: + new_headers.append((raw_name, name, value)) + return Headers(new_headers) + + +def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out: List[bytes] = [] + for _, found_name, found_raw_value in headers._full_items: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: + # The header name `name` is expected to be lower-case bytes. + # + # Note that when we store the header we use title casing for the header + # names, in order to match the conventional HTTP header style. + # + # Simply calling `.title()` is a blunt approach, but it's correct + # here given the cases where we're using `set_comma_header`... + # + # Connection, Content-Length, Transfer-Encoding. + new_headers: List[Tuple[bytes, bytes]] = [] + for found_raw_name, found_name, found_raw_value in headers._full_items: + if found_name != name: + new_headers.append((found_raw_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name.title(), new_value)) + return normalize_and_validate(new_headers) + + +def has_expect_100_continue(request: "Request") -> bool: + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/myenv/lib/python3.12/site-packages/h11/_readers.py b/myenv/lib/python3.12/site-packages/h11/_readers.py new file mode 100644 index 0000000..08a9574 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/_readers.py @@ -0,0 +1,247 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re +from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import Data, EndOfMessage, InformationalResponse, Request, Response +from ._receivebuffer import ReceiveBuffer +from ._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, +) +from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) +obs_fold_re = re.compile(rb"[ \t]+") + + +def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: + it = iter(lines) + last: Optional[bytes] = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + # Cast to a mutable type, avoiding copy on append to ensure O(n) time + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines( + lines: Iterable[bytes], +) -> Iterable[Tuple[bytes, bytes]]: + for line in _obsolete_line_fold(lines): + matches = validate(header_field_re, line, "illegal header line: {!r}", line) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate( + request_line_re, lines[0], "illegal request line: {!r}", lines[0] + ) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server( + buf: ReceiveBuffer, +) -> Union[InformationalResponse, Response, None]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) + http_version = ( + b"1.1" if matches["http_version"] is None else matches["http_version"] + ) + reason = b"" if matches["reason"] is None else matches["reason"] + status_code = int(matches["status_code"]) + class_: Union[Type[InformationalResponse], Type[Response]] = ( + InformationalResponse if status_code < 200 else Response + ) + return class_( + headers=list(_decode_header_lines(lines[1:])), + _parsed=True, + status_code=status_code, + reason=reason, + http_version=http_version, + ) + + +class ContentLengthReader: + def __init__(self, length: int) -> None: + self._length = length + self._remaining = length + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader: + def __init__(self) -> None: + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n; if + # this is >0 then we discard that many bytes before resuming regular + # de-chunkification. + self._bytes_to_discard = 0 + self._reading_trailer = False + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard > 0: + data = buf.maybe_extract_at_most(self._bytes_to_discard) + if data is None: + return None + self._bytes_to_discard -= len(data) + if self._bytes_to_discard > 0: + return None + # else, fall through and read some more + assert self._bytes_to_discard == 0 + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_next_line() + if chunk_header is None: + return None + matches = validate( + chunk_header_re, + chunk_header, + "illegal chunk header: {!r}", + chunk_header, + ) + # XX FIXME: we discard chunk extensions. Does anyone care? + self._bytes_in_chunk = int(matches["chunk_size"], base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = 2 + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader: + def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self) -> EndOfMessage: + return EndOfMessage() + + +def expect_nothing(buf: ReceiveBuffer) -> None: + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +ReadersType = Dict[ + Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]], + Union[Callable[..., Any], Dict[str, Callable[..., Any]]], +] + +READERS: ReadersType = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/myenv/lib/python3.12/site-packages/h11/_receivebuffer.py b/myenv/lib/python3.12/site-packages/h11/_receivebuffer.py new file mode 100644 index 0000000..e5c4e08 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/_receivebuffer.py @@ -0,0 +1,153 @@ +import re +import sys +from typing import List, Optional, Union + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), +# or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) + + +class ReceiveBuffer: + def __init__(self) -> None: + self._data = bytearray() + self._next_line_search = 0 + self._multiple_lines_search = 0 + + def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": + self._data += byteslike + return self + + def __bool__(self) -> bool: + return bool(len(self)) + + def __len__(self) -> int: + return len(self._data) + + # for @property unprocessed_data + def __bytes__(self) -> bytes: + return bytes(self._data) + + def _extract(self, count: int) -> bytearray: + # extracting an initial slice of the data buffer and return it + out = self._data[:count] + del self._data[:count] + + self._next_line_search = 0 + self._multiple_lines_search = 0 + + return out + + def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: + """ + Extract a fixed number of bytes from the buffer. + """ + out = self._data[:count] + if not out: + return None + + return self._extract(count) + + def maybe_extract_next_line(self) -> Optional[bytearray]: + """ + Extract the first line, if it is completed in the buffer. + """ + # Only search in buffer space that we've not already looked at. + search_start_index = max(0, self._next_line_search - 1) + partial_idx = self._data.find(b"\r\n", search_start_index) + + if partial_idx == -1: + self._next_line_search = len(self._data) + return None + + # + 2 is to compensate len(b"\r\n") + idx = partial_idx + 2 + + return self._extract(idx) + + def maybe_extract_lines(self) -> Optional[List[bytearray]]: + """ + Extract everything up to the first blank line, and return a list of lines. + """ + # Handle the case where we have an immediate empty line. + if self._data[:1] == b"\n": + self._extract(1) + return [] + + if self._data[:2] == b"\r\n": + self._extract(2) + return [] + + # Only search in buffer space that we've not already looked at. + match = blank_line_regex.search(self._data, self._multiple_lines_search) + if match is None: + self._multiple_lines_search = max(0, len(self._data) - 2) + return None + + # Truncate the buffer and return it. + idx = match.span(0)[-1] + out = self._extract(idx) + lines = out.split(b"\n") + + for line in lines: + if line.endswith(b"\r"): + del line[-1] + + assert lines[-2] == lines[-1] == b"" + + del lines[-2:] + + return lines + + # In theory we should wait until `\r\n` before starting to validate + # incoming data. However it's interesting to detect (very) invalid data + # early given they might not even contain `\r\n` at all (hence only + # timeout will get rid of them). + # This is not a 100% effective detection but more of a cheap sanity check + # allowing for early abort in some useful cases. + # This is especially interesting when peer is messing up with HTTPS and + # sent us a TLS stream where we were expecting plain HTTP given all + # versions of TLS so far start handshake with a 0x16 message type code. + def is_next_line_obviously_invalid_request_line(self) -> bool: + try: + # HTTP header line must not contain non-printable characters + # and should not start with a space + return self._data[0] < 0x21 + except IndexError: + return False diff --git a/myenv/lib/python3.12/site-packages/h11/_state.py b/myenv/lib/python3.12/site-packages/h11/_state.py new file mode 100644 index 0000000..3593430 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/_state.py @@ -0,0 +1,367 @@ +################################################################ +# The core state machine +################################################################ +# +# Rule 1: everything that affects the state machine and state transitions must +# live here in this file. As much as possible goes into the table-based +# representation, but for the bits that don't quite fit, the actual code and +# state must nonetheless live here. +# +# Rule 2: this file does not know about what role we're playing; it only knows +# about HTTP request/response cycles in the abstract. This ensures that we +# don't cheat and apply different rules to local and remote parties. +# +# +# Theory of operation +# =================== +# +# Possibly the simplest way to think about this is that we actually have 5 +# different state machines here. Yes, 5. These are: +# +# 1) The client state, with its complicated automaton (see the docs) +# 2) The server state, with its complicated automaton (see the docs) +# 3) The keep-alive state, with possible states {True, False} +# 4) The SWITCH_CONNECT state, with possible states {False, True} +# 5) The SWITCH_UPGRADE state, with possible states {False, True} +# +# For (3)-(5), the first state listed is the initial state. +# +# (1)-(3) are stored explicitly in member variables. The last +# two are stored implicitly in the pending_switch_proposals set as: +# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) +# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) +# +# And each of these machines has two different kinds of transitions: +# +# a) Event-triggered +# b) State-triggered +# +# Event triggered is the obvious thing that you'd think it is: some event +# happens, and if it's the right event at the right time then a transition +# happens. But there are somewhat complicated rules for which machines can +# "see" which events. (As a rule of thumb, if a machine "sees" an event, this +# means two things: the event can affect the machine, and if the machine is +# not in a state where it expects that event then it's an error.) These rules +# are: +# +# 1) The client machine sees all h11.events objects emitted by the client. +# +# 2) The server machine sees all h11.events objects emitted by the server. +# +# It also sees the client's Request event. +# +# And sometimes, server events are annotated with a _SWITCH_* event. For +# example, we can have a (Response, _SWITCH_CONNECT) event, which is +# different from a regular Response event. +# +# 3) The keep-alive machine sees the process_keep_alive_disabled() event +# (which is derived from Request/Response events), and this event +# transitions it from True -> False, or from False -> False. There's no way +# to transition back. +# +# 4&5) The _SWITCH_* machines transition from False->True when we get a +# Request that proposes the relevant type of switch (via +# process_client_switch_proposals), and they go from True->False when we +# get a Response that has no _SWITCH_* annotation. +# +# So that's event-triggered transitions. +# +# State-triggered transitions are less standard. What they do here is couple +# the machines together. The way this works is, when certain *joint* +# configurations of states are achieved, then we automatically transition to a +# new *joint* state. So, for example, if we're ever in a joint state with +# +# client: DONE +# keep-alive: False +# +# then the client state immediately transitions to: +# +# client: MUST_CLOSE +# +# This is fundamentally different from an event-based transition, because it +# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state +# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive +# transitioned True -> False. Either way, once this precondition is satisfied, +# this transition is immediately triggered. +# +# What if two conflicting state-based transitions get enabled at the same +# time? In practice there's only one case where this arises (client DONE -> +# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by +# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. +# +# Implementation +# -------------- +# +# The event-triggered transitions for the server and client machines are all +# stored explicitly in a table. Ditto for the state-triggered transitions that +# involve just the server and client state. +# +# The transitions for the other machines, and the state-triggered transitions +# that involve the other machines, are written out as explicit Python code. +# +# It'd be nice if there were some cleaner way to do all this. This isn't +# *too* terrible, but I feel like it could probably be better. +# +# WARNING +# ------- +# +# The script that generates the state machine diagrams for the docs knows how +# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS +# tables. But it can't automatically read the transitions that are written +# directly in Python code. So if you touch those, you need to also update the +# script to keep it in sync! +from typing import cast, Dict, Optional, Set, Tuple, Type, Union + +from ._events import * +from ._util import LocalProtocolError, Sentinel + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "CLIENT", + "SERVER", + "IDLE", + "SEND_RESPONSE", + "SEND_BODY", + "DONE", + "MUST_CLOSE", + "CLOSED", + "MIGHT_SWITCH_PROTOCOL", + "SWITCHED_PROTOCOL", + "ERROR", +] + + +class CLIENT(Sentinel, metaclass=Sentinel): + pass + + +class SERVER(Sentinel, metaclass=Sentinel): + pass + + +# States +class IDLE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_RESPONSE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_BODY(Sentinel, metaclass=Sentinel): + pass + + +class DONE(Sentinel, metaclass=Sentinel): + pass + + +class MUST_CLOSE(Sentinel, metaclass=Sentinel): + pass + + +class CLOSED(Sentinel, metaclass=Sentinel): + pass + + +class ERROR(Sentinel, metaclass=Sentinel): + pass + + +# Switch types +class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): + pass + + +EventTransitionType = Dict[ + Type[Sentinel], + Dict[ + Type[Sentinel], + Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], + ], +] + +EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { + CLIENT: { + IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + MIGHT_SWITCH_PROTOCOL: {}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, + SERVER: { + IDLE: { + ConnectionClosed: CLOSED, + Response: SEND_BODY, + # Special case: server sees client Request events, in this form + (Request, CLIENT): SEND_RESPONSE, + }, + SEND_RESPONSE: { + InformationalResponse: SEND_RESPONSE, + Response: SEND_BODY, + (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, + (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, + }, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, +} + +StateTransitionType = Dict[ + Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]] +] + +# NB: there are also some special-case state-triggered transitions hard-coded +# into _fire_state_triggered_transitions below. +STATE_TRIGGERED_TRANSITIONS: StateTransitionType = { + # (Client state, Server state) -> new states + # Protocol negotiation + (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, + # Socket shutdown + (CLOSED, DONE): {SERVER: MUST_CLOSE}, + (CLOSED, IDLE): {SERVER: MUST_CLOSE}, + (ERROR, DONE): {SERVER: MUST_CLOSE}, + (DONE, CLOSED): {CLIENT: MUST_CLOSE}, + (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, + (DONE, ERROR): {CLIENT: MUST_CLOSE}, +} + + +class ConnectionState: + def __init__(self) -> None: + # Extra bits of state that don't quite fit into the state model. + + # If this is False then it enables the automatic DONE -> MUST_CLOSE + # transition. Don't set this directly; call .keep_alive_disabled() + self.keep_alive = True + + # This is a subset of {UPGRADE, CONNECT}, containing the proposals + # made by the client for switching protocols. + self.pending_switch_proposals: Set[Type[Sentinel]] = set() + + self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} + + def process_error(self, role: Type[Sentinel]) -> None: + self.states[role] = ERROR + self._fire_state_triggered_transitions() + + def process_keep_alive_disabled(self) -> None: + self.keep_alive = False + self._fire_state_triggered_transitions() + + def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: + self.pending_switch_proposals.add(switch_event) + self._fire_state_triggered_transitions() + + def process_event( + self, + role: Type[Sentinel], + event_type: Type[Event], + server_switch_event: Optional[Type[Sentinel]] = None, + ) -> None: + _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type + if server_switch_event is not None: + assert role is SERVER + if server_switch_event not in self.pending_switch_proposals: + raise LocalProtocolError( + "Received server {} event without a pending proposal".format( + server_switch_event + ) + ) + _event_type = (event_type, server_switch_event) + if server_switch_event is None and _event_type is Response: + self.pending_switch_proposals = set() + self._fire_event_triggered_transitions(role, _event_type) + # Special case: the server state does get to see Request + # events. + if _event_type is Request: + assert role is CLIENT + self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) + self._fire_state_triggered_transitions() + + def _fire_event_triggered_transitions( + self, + role: Type[Sentinel], + event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], + ) -> None: + state = self.states[role] + try: + new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] + except KeyError: + event_type = cast(Type[Event], event_type) + raise LocalProtocolError( + "can't handle event type {} when role={} and state={}".format( + event_type.__name__, role, self.states[role] + ) + ) from None + self.states[role] = new_state + + def _fire_state_triggered_transitions(self) -> None: + # We apply these rules repeatedly until converging on a fixed point + while True: + start_states = dict(self.states) + + # It could happen that both these special-case transitions are + # enabled at the same time: + # + # DONE -> MIGHT_SWITCH_PROTOCOL + # DONE -> MUST_CLOSE + # + # For example, this will always be true of a HTTP/1.0 client + # requesting CONNECT. If this happens, the protocol switch takes + # priority. From there the client will either go to + # SWITCHED_PROTOCOL, in which case it's none of our business when + # they close the connection, or else the server will deny the + # request, in which case the client will go back to DONE and then + # from there to MUST_CLOSE. + if self.pending_switch_proposals: + if self.states[CLIENT] is DONE: + self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL + + if not self.pending_switch_proposals: + if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: + self.states[CLIENT] = DONE + + if not self.keep_alive: + for role in (CLIENT, SERVER): + if self.states[role] is DONE: + self.states[role] = MUST_CLOSE + + # Tabular state-triggered transitions + joint_state = (self.states[CLIENT], self.states[SERVER]) + changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) + self.states.update(changes) + + if self.states == start_states: + # Fixed point reached + return + + def start_next_cycle(self) -> None: + if self.states != {CLIENT: DONE, SERVER: DONE}: + raise LocalProtocolError( + "not in a reusable state. self.states={}".format(self.states) + ) + # Can't reach DONE/DONE with any of these active, but still, let's be + # sure. + assert self.keep_alive + assert not self.pending_switch_proposals + self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/myenv/lib/python3.12/site-packages/h11/_util.py b/myenv/lib/python3.12/site-packages/h11/_util.py new file mode 100644 index 0000000..6718445 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/_util.py @@ -0,0 +1,135 @@ +from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union + +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg: str, error_status_hint: int = 400) -> None: + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self) -> NoReturn: + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError # type: ignore + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + + +class RemoteProtocolError(ProtocolError): + pass + + +def validate( + regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any +) -> Dict[str, bytes]: + match = regex.fullmatch(data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). + +_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") + + +class Sentinel(type): + def __new__( + cls: Type[_T_Sentinel], + name: str, + bases: Tuple[type, ...], + namespace: Dict[str, Any], + **kwds: Any + ) -> _T_Sentinel: + assert bases == (Sentinel,) + v = super().__new__(cls, name, bases, namespace, **kwds) + v.__class__ = v # type: ignore + return v + + def __repr__(self) -> str: + return self.__name__ + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/myenv/lib/python3.12/site-packages/h11/_version.py b/myenv/lib/python3.12/site-packages/h11/_version.py new file mode 100644 index 0000000..4c89113 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.14.0" diff --git a/myenv/lib/python3.12/site-packages/h11/_writers.py b/myenv/lib/python3.12/site-packages/h11/_writers.py new file mode 100644 index 0000000..939cdb9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/_writers.py @@ -0,0 +1,145 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +from typing import Any, Callable, Dict, List, Tuple, Type, Union + +from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response +from ._headers import Headers +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError, Sentinel + +__all__ = ["WRITERS"] + +Writer = Callable[[bytes], Any] + + +def write_headers(headers: Headers, write: Writer) -> None: + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + raw_items = headers._full_items + for raw_name, name, value in raw_items: + if name == b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + for raw_name, name, value in raw_items: + if name != b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + write(b"\r\n") + + +def write_request(request: Request, write: Writer) -> None: + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response( + response: Union[InformationalResponse, Response], write: Writer +) -> None: + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) + write_headers(response.headers, write) + + +class BodyWriter: + def __call__(self, event: Event, write: Writer) -> None: + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + def send_data(self, data: bytes, write: Writer) -> None: + pass + + def send_eom(self, headers: Headers, write: Writer) -> None: + pass + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length: int) -> None: + self._length = length + + def send_data(self, data: bytes, write: Writer) -> None: + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(b"%x\r\n" % len(data)) + write(data) + write(b"\r\n") + + def send_eom(self, headers: Headers, write: Writer) -> None: + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WritersType = Dict[ + Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]], + Union[ + Dict[str, Type[BodyWriter]], + Callable[[Union[InformationalResponse, Response], Writer], None], + Callable[[Request, Writer], None], + ], +] + +WRITERS: WritersType = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/myenv/lib/python3.12/site-packages/h11/py.typed b/myenv/lib/python3.12/site-packages/h11/py.typed new file mode 100644 index 0000000..f5642f7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/py.typed @@ -0,0 +1 @@ +Marker diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__init__.py b/myenv/lib/python3.12/site-packages/h11/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..bb1f3b6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/helpers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/helpers.cpython-312.pyc new file mode 100644 index 0000000..f01c71c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/helpers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-312.pyc new file mode 100644 index 0000000..e433f59 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_against_stdlib_http.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_connection.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_connection.cpython-312.pyc new file mode 100644 index 0000000..3642e27 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_connection.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_events.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_events.cpython-312.pyc new file mode 100644 index 0000000..8ed295c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_events.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_headers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_headers.cpython-312.pyc new file mode 100644 index 0000000..5e923e7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_headers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_helpers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_helpers.cpython-312.pyc new file mode 100644 index 0000000..9936c00 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_helpers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_io.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_io.cpython-312.pyc new file mode 100644 index 0000000..305c005 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_io.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-312.pyc new file mode 100644 index 0000000..fcd7a8a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_receivebuffer.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_state.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_state.cpython-312.pyc new file mode 100644 index 0000000..875f516 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_state.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_util.cpython-312.pyc b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_util.cpython-312.pyc new file mode 100644 index 0000000..ec79368 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/h11/tests/__pycache__/test_util.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/h11/tests/data/test-file b/myenv/lib/python3.12/site-packages/h11/tests/data/test-file new file mode 100644 index 0000000..d0be0a6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/tests/data/test-file @@ -0,0 +1 @@ +92b12bc045050b55b848d37167a1a63947c364579889ce1d39788e45e9fac9e5 diff --git a/myenv/lib/python3.12/site-packages/h11/tests/helpers.py b/myenv/lib/python3.12/site-packages/h11/tests/helpers.py new file mode 100644 index 0000000..571be44 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/tests/helpers.py @@ -0,0 +1,101 @@ +from typing import cast, List, Type, Union, ValuesView + +from .._connection import Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import CLIENT, CLOSED, DONE, MUST_CLOSE, SERVER +from .._util import Sentinel + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +def get_all_events(conn: Connection) -> List[Event]: + got_events = [] + while True: + event = conn.next_event() + if event in (NEED_DATA, PAUSED): + break + event = cast(Event, event) + got_events.append(event) + if type(event) is ConnectionClosed: + break + return got_events + + +def receive_and_get(conn: Connection, data: bytes) -> List[Event]: + conn.receive_data(data) + return get_all_events(conn) + + +# Merges adjacent Data events, converts payloads to bytestrings, and removes +# chunk boundaries. +def normalize_data_events(in_events: List[Event]) -> List[Event]: + out_events: List[Event] = [] + for event in in_events: + if type(event) is Data: + event = Data(data=bytes(event.data), chunk_start=False, chunk_end=False) + if out_events and type(out_events[-1]) is type(event) is Data: + out_events[-1] = Data( + data=out_events[-1].data + event.data, + chunk_start=out_events[-1].chunk_start, + chunk_end=out_events[-1].chunk_end, + ) + else: + out_events.append(event) + return out_events + + +# Given that we want to write tests that push some events through a Connection +# and check that its state updates appropriately... we might as make a habit +# of pushing them through two Connections with a fake network link in +# between. +class ConnectionPair: + def __init__(self) -> None: + self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} + self.other = {CLIENT: SERVER, SERVER: CLIENT} + + @property + def conns(self) -> ValuesView[Connection]: + return self.conn.values() + + # expect="match" if expect=send_events; expect=[...] to say what expected + def send( + self, + role: Type[Sentinel], + send_events: Union[List[Event], Event], + expect: Union[List[Event], Event, Literal["match"]] = "match", + ) -> bytes: + if not isinstance(send_events, list): + send_events = [send_events] + data = b"" + closed = False + for send_event in send_events: + new_data = self.conn[role].send(send_event) + if new_data is None: + closed = True + else: + data += new_data + # send uses b"" to mean b"", and None to mean closed + # receive uses b"" to mean closed, and None to mean "try again" + # so we have to translate between the two conventions + if data: + self.conn[self.other[role]].receive_data(data) + if closed: + self.conn[self.other[role]].receive_data(b"") + got_events = get_all_events(self.conn[self.other[role]]) + if expect == "match": + expect = send_events + if not isinstance(expect, list): + expect = [expect] + assert got_events == expect + return data diff --git a/myenv/lib/python3.12/site-packages/h11/tests/test_against_stdlib_http.py b/myenv/lib/python3.12/site-packages/h11/tests/test_against_stdlib_http.py new file mode 100644 index 0000000..d2ee131 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/tests/test_against_stdlib_http.py @@ -0,0 +1,115 @@ +import json +import os.path +import socket +import socketserver +import threading +from contextlib import closing, contextmanager +from http.server import SimpleHTTPRequestHandler +from typing import Callable, Generator +from urllib.request import urlopen + +import h11 + + +@contextmanager +def socket_server( + handler: Callable[..., socketserver.BaseRequestHandler] +) -> Generator[socketserver.TCPServer, None, None]: + httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) + thread = threading.Thread( + target=httpd.serve_forever, kwargs={"poll_interval": 0.01} + ) + thread.daemon = True + try: + thread.start() + yield httpd + finally: + httpd.shutdown() + + +test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file") +with open(test_file_path, "rb") as f: + test_file_data = f.read() + + +class SingleMindedRequestHandler(SimpleHTTPRequestHandler): + def translate_path(self, path: str) -> str: + return test_file_path + + +def test_h11_as_client() -> None: + with socket_server(SingleMindedRequestHandler) as httpd: + with closing(socket.create_connection(httpd.server_address)) as s: + c = h11.Connection(h11.CLIENT) + + s.sendall( + c.send( # type: ignore[arg-type] + h11.Request( + method="GET", target="/foo", headers=[("Host", "localhost")] + ) + ) + ) + s.sendall(c.send(h11.EndOfMessage())) # type: ignore[arg-type] + + data = bytearray() + while True: + event = c.next_event() + print(event) + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Response: + assert event.status_code == 200 + if type(event) is h11.Data: + data += event.data + if type(event) is h11.EndOfMessage: + break + assert bytes(data) == test_file_data + + +class H11RequestHandler(socketserver.BaseRequestHandler): + def handle(self) -> None: + with closing(self.request) as s: + c = h11.Connection(h11.SERVER) + request = None + while True: + event = c.next_event() + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Request: + request = event + if type(event) is h11.EndOfMessage: + break + assert request is not None + info = json.dumps( + { + "method": request.method.decode("ascii"), + "target": request.target.decode("ascii"), + "headers": { + name.decode("ascii"): value.decode("ascii") + for (name, value) in request.headers + }, + } + ) + s.sendall(c.send(h11.Response(status_code=200, headers=[]))) # type: ignore[arg-type] + s.sendall(c.send(h11.Data(data=info.encode("ascii")))) + s.sendall(c.send(h11.EndOfMessage())) + + +def test_h11_as_server() -> None: + with socket_server(H11RequestHandler) as httpd: + host, port = httpd.server_address + url = "http://{}:{}/some-path".format(host, port) + with closing(urlopen(url)) as f: + assert f.getcode() == 200 + data = f.read() + info = json.loads(data.decode("ascii")) + print(info) + assert info["method"] == "GET" + assert info["target"] == "/some-path" + assert "urllib" in info["headers"]["user-agent"] diff --git a/myenv/lib/python3.12/site-packages/h11/tests/test_connection.py b/myenv/lib/python3.12/site-packages/h11/tests/test_connection.py new file mode 100644 index 0000000..73a27b9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/tests/test_connection.py @@ -0,0 +1,1122 @@ +from typing import Any, cast, Dict, List, Optional, Tuple, Type + +import pytest + +from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError, RemoteProtocolError, Sentinel +from .helpers import ConnectionPair, get_all_events, receive_and_get + + +def test__keep_alive() -> None: + assert _keep_alive( + Request(method="GET", target="/", headers=[("Host", "Example.com")]) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "close")], + ) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "a, b, cLOse, foo")], + ) + ) + assert not _keep_alive( + Request(method="GET", target="/", headers=[], http_version="1.0") # type: ignore[arg-type] + ) + + assert _keep_alive(Response(status_code=200, headers=[])) # type: ignore[arg-type] + assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")])) + assert not _keep_alive( + Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")]) + ) + assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) # type: ignore[arg-type] + + +def test__body_framing() -> None: + def headers(cl: Optional[int], te: bool) -> List[Tuple[str, str]]: + headers = [] + if cl is not None: + headers.append(("Content-Length", str(cl))) + if te: + headers.append(("Transfer-Encoding", "chunked")) + return headers + + def resp( + status_code: int = 200, cl: Optional[int] = None, te: bool = False + ) -> Response: + return Response(status_code=status_code, headers=headers(cl, te)) + + def req(cl: Optional[int] = None, te: bool = False) -> Request: + h = headers(cl, te) + h += [("Host", "example.com")] + return Request(method="GET", target="/", headers=h) + + # Special cases where the headers are ignored: + for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]: + kwargs = cast(Dict[str, Any], kwargs) + for meth, r in [ + (b"HEAD", resp(**kwargs)), + (b"GET", resp(status_code=204, **kwargs)), + (b"GET", resp(status_code=304, **kwargs)), + ]: + assert _body_framing(meth, r) == ("content-length", (0,)) + + # Transfer-encoding + for kwargs in [{"te": True}, {"cl": 100, "te": True}]: + kwargs = cast(Dict[str, Any], kwargs) + for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: # type: ignore + assert _body_framing(meth, r) == ("chunked", ()) + + # Content-Length + for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: # type: ignore + assert _body_framing(meth, r) == ("content-length", (100,)) + + # No headers + assert _body_framing(None, req()) == ("content-length", (0,)) # type: ignore + assert _body_framing(b"GET", resp()) == ("http/1.0", ()) + + +def test_Connection_basics_and_content_length() -> None: + with pytest.raises(ValueError): + Connection("CLIENT") # type: ignore + + p = ConnectionPair() + assert p.conn[CLIENT].our_role is CLIENT + assert p.conn[CLIENT].their_role is SERVER + assert p.conn[SERVER].our_role is SERVER + assert p.conn[SERVER].their_role is CLIENT + + data = p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Content-Length", "10")], + ), + ) + assert data == ( + b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 10\r\n\r\n" + ) + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + assert p.conn[CLIENT].our_state is SEND_BODY + assert p.conn[CLIENT].their_state is SEND_RESPONSE + assert p.conn[SERVER].our_state is SEND_RESPONSE + assert p.conn[SERVER].their_state is SEND_BODY + + assert p.conn[CLIENT].their_http_version is None + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] + assert data == b"HTTP/1.1 100 \r\n\r\n" + + data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")])) + assert data == b"HTTP/1.1 200 \r\nContent-Length: 11\r\n\r\n" + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + assert p.conn[CLIENT].their_http_version == b"1.1" + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(CLIENT, Data(data=b"12345")) + assert data == b"12345" + data = p.send( + CLIENT, Data(data=b"67890"), expect=[Data(data=b"67890"), EndOfMessage()] + ) + assert data == b"67890" + data = p.send(CLIENT, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + + data = p.send(SERVER, Data(data=b"1234567890")) + assert data == b"1234567890" + data = p.send(SERVER, Data(data=b"1"), expect=[Data(data=b"1"), EndOfMessage()]) + assert data == b"1" + data = p.send(SERVER, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunked() -> None: + p = ConnectionPair() + + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ), + ) + data = p.send(CLIENT, Data(data=b"1234567890", chunk_start=True, chunk_end=True)) + assert data == b"a\r\n1234567890\r\n" + data = p.send(CLIENT, Data(data=b"abcde", chunk_start=True, chunk_end=True)) + assert data == b"5\r\nabcde\r\n" + data = p.send(CLIENT, Data(data=b""), expect=[]) + assert data == b"" + data = p.send(CLIENT, EndOfMessage(headers=[("hello", "there")])) + assert data == b"0\r\nhello: there\r\n\r\n" + + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + p.send(SERVER, Data(data=b"54321", chunk_start=True, chunk_end=True)) + p.send(SERVER, Data(data=b"12345", chunk_start=True, chunk_end=True)) + p.send(SERVER, EndOfMessage()) + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunk_boundaries() -> None: + conn = Connection(our_role=SERVER) + + request = ( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + ) + conn.receive_data(request) + assert conn.next_event() == Request( + method="POST", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ) + assert conn.next_event() is NEED_DATA + + conn.receive_data(b"5\r\nhello\r\n") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"5\r\nhel") + assert conn.next_event() == Data(data=b"hel", chunk_start=True, chunk_end=False) + + conn.receive_data(b"l") + assert conn.next_event() == Data(data=b"l", chunk_start=False, chunk_end=False) + + conn.receive_data(b"o\r\n") + assert conn.next_event() == Data(data=b"o", chunk_start=False, chunk_end=True) + + conn.receive_data(b"5\r\nhello") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"\r\n") + assert conn.next_event() == NEED_DATA + + conn.receive_data(b"0\r\n\r\n") + assert conn.next_event() == EndOfMessage() + + +def test_client_talking_to_http10_server() -> None: + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "example.com")])) + c.send(EndOfMessage()) + assert c.our_state is DONE + # No content-length, so Http10 framing for body + assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [ + Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") # type: ignore[arg-type] + ] + assert c.our_state is MUST_CLOSE + assert receive_and_get(c, b"12345") == [Data(data=b"12345")] + assert receive_and_get(c, b"67890") == [Data(data=b"67890")] + assert receive_and_get(c, b"") == [EndOfMessage(), ConnectionClosed()] + assert c.their_state is CLOSED + + +def test_server_talking_to_http10_client() -> None: + c = Connection(SERVER) + # No content-length, so no body + # NB: no host header + assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] + EndOfMessage(), + ] + assert c.their_state is MUST_CLOSE + + # We automatically Connection: close back at them + assert ( + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + + assert c.send(Data(data=b"12345")) == b"12345" + assert c.send(EndOfMessage()) == b"" + assert c.our_state is MUST_CLOSE + + # Check that it works if they do send Content-Length + c = Connection(SERVER) + # NB: no host header + assert receive_and_get(c, b"POST / HTTP/1.0\r\nContent-Length: 10\r\n\r\n1") == [ + Request( + method="POST", + target="/", + headers=[("Content-Length", "10")], + http_version="1.0", + ), + Data(data=b"1"), + ] + assert receive_and_get(c, b"234567890") == [Data(data=b"234567890"), EndOfMessage()] + assert c.their_state is MUST_CLOSE + assert receive_and_get(c, b"") == [ConnectionClosed()] + + +def test_automatic_transfer_encoding_in_response() -> None: + # Check that in responses, the user can specify either Transfer-Encoding: + # chunked or no framing at all, and in both cases we automatically select + # the right option depending on whether the peer speaks HTTP/1.0 or + # HTTP/1.1 + for user_headers in [ + [("Transfer-Encoding", "chunked")], + [], + # In fact, this even works if Content-Length is set, + # because if both are set then Transfer-Encoding wins + [("Transfer-Encoding", "chunked"), ("Content-Length", "100")], + ]: + user_headers = cast(List[Tuple[str, str]], user_headers) + p = ConnectionPair() + p.send( + CLIENT, + [ + Request(method="GET", target="/", headers=[("Host", "example.com")]), + EndOfMessage(), + ], + ) + # When speaking to HTTP/1.1 client, all of the above cases get + # normalized to Transfer-Encoding: chunked + p.send( + SERVER, + Response(status_code=200, headers=user_headers), + expect=Response( + status_code=200, headers=[("Transfer-Encoding", "chunked")] + ), + ) + + # When speaking to HTTP/1.0 client, all of the above cases get + # normalized to no-framing-headers + c = Connection(SERVER) + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert ( + c.send(Response(status_code=200, headers=user_headers)) + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + assert c.send(Data(data=b"12345")) == b"12345" + + +def test_automagic_connection_close_handling() -> None: + p = ConnectionPair() + # If the user explicitly sets Connection: close, then we notice and + # respect it + p.send( + CLIENT, + [ + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Connection", "close")], + ), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states[CLIENT] is MUST_CLOSE + # And if the client sets it, the server automatically echoes it back + p.send( + SERVER, + # no header here... + [Response(status_code=204, headers=[]), EndOfMessage()], # type: ignore[arg-type] + # ...but oh look, it arrived anyway + expect=[ + Response(status_code=204, headers=[("connection", "close")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_100_continue() -> None: + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[ + ("Host", "example.com"), + ("Content-Length", "100"), + ("Expect", "100-continue"), + ], + ), + ) + for conn in p.conns: + assert conn.client_is_waiting_for_100_continue + assert not p.conn[CLIENT].they_are_waiting_for_100_continue + assert p.conn[SERVER].they_are_waiting_for_100_continue + return p + + # Disabled by 100 Continue + p = setup() + p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by a real response + p = setup() + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by the client going ahead and sending stuff anyway + p = setup() + p.send(CLIENT, Data(data=b"12345")) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + +def test_max_incomplete_event_size_countermeasure() -> None: + # Infinitely long headers are definitely not okay + c = Connection(SERVER) + c.receive_data(b"GET / HTTP/1.0\r\nEndless: ") + assert c.next_event() is NEED_DATA + with pytest.raises(RemoteProtocolError): + while True: + c.receive_data(b"a" * 1024) + c.next_event() + + # Checking that the same header is accepted / rejected depending on the + # max_incomplete_event_size setting: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + c.receive_data(b"\r\n\r\n") + assert get_all_events(c) == [ + Request( + method="GET", target="/", http_version="1.0", headers=[("big", "a" * 4000)] + ), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=4000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + with pytest.raises(RemoteProtocolError): + c.next_event() + + # Temporarily exceeding the size limit is fine, as long as its done with + # complete events: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nContent-Length: 10000") + c.receive_data(b"\r\n\r\n" + b"a" * 10000) + assert get_all_events(c) == [ + Request( + method="GET", + target="/", + http_version="1.0", + headers=[("Content-Length", "10000")], + ), + Data(data=b"a" * 10000), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=100) + # Two pipelined requests to create a way-too-big receive buffer... but + # it's fine because we're not checking + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a\r\n\r\n" + b"GET /2 HTTP/1.1\r\nHost: b\r\n\r\n" + b"X" * 1000 + ) + assert get_all_events(c) == [ + Request(method="GET", target="/1", headers=[("host", "a")]), + EndOfMessage(), + ] + # Even more data comes in, still no problem + c.receive_data(b"X" * 1000) + # We can respond and reuse to get the second pipelined request + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + assert get_all_events(c) == [ + Request(method="GET", target="/2", headers=[("host", "b")]), + EndOfMessage(), + ] + # But once we unpause and try to read the next message, and find that it's + # incomplete and the buffer is *still* way too large, then *that's* a + # problem: + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_reuse_simple() -> None: + p = ConnectionPair() + p.send( + CLIENT, + [Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()], + ) + p.send( + SERVER, + [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + conn.start_next_cycle() + + p.send( + CLIENT, + [ + Request(method="DELETE", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ], + ) + p.send( + SERVER, + [ + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) + + +def test_pipelining() -> None: + # Client doesn't support pipelining, so we have to do this by hand + c = Connection(SERVER) + assert c.next_event() is NEED_DATA + # 3 requests all bunched up + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + b"GET /3 HTTP/1.1\r\nHost: a.com\r\n\r\n" + ) + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.their_state is DONE + assert c.our_state is SEND_RESPONSE + + assert c.next_event() is PAUSED + + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.their_state is DONE + assert c.our_state is DONE + + c.start_next_cycle() + + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ] + assert c.next_event() is PAUSED + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + + assert get_all_events(c) == [ + Request(method="GET", target="/3", headers=[("Host", "a.com")]), + EndOfMessage(), + ] + # Doesn't pause this time, no trailing data + assert c.next_event() is NEED_DATA + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + + # Arrival of more data triggers pause + assert c.next_event() is NEED_DATA + c.receive_data(b"SADF") + assert c.next_event() is PAUSED + assert c.trailing_data == (b"SADF", False) + # If EOF arrives while paused, we don't see that either: + c.receive_data(b"") + assert c.trailing_data == (b"SADF", True) + assert c.next_event() is PAUSED + c.receive_data(b"") + assert c.next_event() is PAUSED + # Can't call receive_data with non-empty buf after closing it + with pytest.raises(RuntimeError): + c.receive_data(b"FDSA") + + +def test_protocol_switch() -> None: + for (req, deny, accept) in [ + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + ), + ( + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + InformationalResponse(status_code=101, headers=[("Upgrade", "a")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + # Accept CONNECT, not upgrade + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + # Accept Upgrade, not CONNECT + InformationalResponse(status_code=101, headers=[("Upgrade", "b")]), + ), + ]: + + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send(CLIENT, req) + # No switch-related state change stuff yet; the client has to + # finish the request before that kicks in + for conn in p.conns: + assert conn.states[CLIENT] is SEND_BODY + p.send(CLIENT, [Data(data=b"1"), EndOfMessage()]) + for conn in p.conns: + assert conn.states[CLIENT] is MIGHT_SWITCH_PROTOCOL + assert p.conn[SERVER].next_event() is PAUSED + return p + + # Test deny case + p = setup() + p.send(SERVER, deny) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + p.send(SERVER, EndOfMessage()) + # Check that re-use is still allowed after a denial + for conn in p.conns: + conn.start_next_cycle() + + # Test accept case + p = setup() + p.send(SERVER, accept) + for conn in p.conns: + assert conn.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + conn.receive_data(b"123") + assert conn.next_event() is PAUSED + conn.receive_data(b"456") + assert conn.next_event() is PAUSED + assert conn.trailing_data == (b"123456", False) + + # Pausing in might-switch, then recovery + # (weird artificial case where the trailing data actually is valid + # HTTP for some reason, because this makes it easier to test the state + # logic) + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"GET / HTTP/1.0\r\n\r\n") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"GET / HTTP/1.0\r\n\r\n", False) + sc.send(deny) + assert sc.next_event() is PAUSED + sc.send(EndOfMessage()) + sc.start_next_cycle() + assert get_all_events(sc) == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] + EndOfMessage(), + ] + + # When we're DONE, have no trailing data, and the connection gets + # closed, we report ConnectionClosed(). When we're in might-switch or + # switched, we don't. + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"", True) + p.send(SERVER, accept) + assert sc.next_event() is PAUSED + + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + sc.send(deny) + assert sc.next_event() == ConnectionClosed() + + # You can't send after switching protocols, or while waiting for a + # protocol switch + p = setup() + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send( + Request(method="GET", target="/", headers=[("Host", "a")]) + ) + p = setup() + p.send(SERVER, accept) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(Data(data=b"123")) + + +def test_close_simple() -> None: + # Just immediately closing a new connection without anything having + # happened yet. + for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]: + + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send(who_shot_first, ConnectionClosed()) + for conn in p.conns: + assert conn.states == { + who_shot_first: CLOSED, + who_shot_second: MUST_CLOSE, + } + return p + + # You can keep putting b"" into a closed connection, and you keep + # getting ConnectionClosed() out: + p = setup() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + p.conn[who_shot_second].receive_data(b"") + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + # Second party can close... + p = setup() + p.send(who_shot_second, ConnectionClosed()) + for conn in p.conns: + assert conn.our_state is CLOSED + assert conn.their_state is CLOSED + # But trying to receive new data on a closed connection is a + # RuntimeError (not ProtocolError, because the problem here isn't + # violation of HTTP, it's violation of physics) + p = setup() + with pytest.raises(RuntimeError): + p.conn[who_shot_second].receive_data(b"123") + # And receiving new data on a MUST_CLOSE connection is a ProtocolError + p = setup() + p.conn[who_shot_first].receive_data(b"GET") + with pytest.raises(RemoteProtocolError): + p.conn[who_shot_first].next_event() + + +def test_close_different_states() -> None: + req = [ + Request(method="GET", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ] + resp = [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ] + + # Client before request + p = ConnectionPair() + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + + # Client after request + p = ConnectionPair() + p.send(CLIENT, req) + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + + # Server after request -> not allowed + p = ConnectionPair() + p.send(CLIENT, req) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(ConnectionClosed()) + p.conn[CLIENT].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[CLIENT].next_event() + + # Server after response + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(SERVER, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + # Both after closing (ConnectionClosed() is idempotent) + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + + # In the middle of sending -> not allowed + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", target="/", headers=[("Host", "a"), ("Content-Length", "10")] + ), + ) + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send(ConnectionClosed()) + p.conn[SERVER].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[SERVER].next_event() + + +# Receive several requests and then client shuts down their side of the +# connection; we can respond to each +def test_pipelined_close() -> None: + c = Connection(SERVER) + # 2 requests then a close + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + ) + c.receive_data(b"") + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.states[CLIENT] is DONE + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.states[SERVER] is DONE + c.start_next_cycle() + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ConnectionClosed(), + ] + assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + c.send(ConnectionClosed()) + assert c.states == {CLIENT: CLOSED, SERVER: CLOSED} + + +def test_sendfile() -> None: + class SendfilePlaceholder: + def __len__(self) -> int: + return 10 + + placeholder = SendfilePlaceholder() + + def setup( + header: Tuple[str, str], http_version: str + ) -> Tuple[Connection, Optional[List[bytes]]]: + c = Connection(SERVER) + receive_and_get( + c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii") + ) + headers = [] + if header: + headers.append(header) + c.send(Response(status_code=200, headers=headers)) + return c, c.send_with_data_passthrough(Data(data=placeholder)) # type: ignore + + c, data = setup(("Content-Length", "10"), "1.1") + assert data == [placeholder] # type: ignore + # Raises an error if the connection object doesn't think we've sent + # exactly 10 bytes + c.send(EndOfMessage()) + + _, data = setup(("Transfer-Encoding", "chunked"), "1.1") + assert placeholder in data # type: ignore + data[data.index(placeholder)] = b"x" * 10 # type: ignore + assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" # type: ignore + + c, data = setup(None, "1.0") # type: ignore + assert data == [placeholder] # type: ignore + assert c.our_state is SEND_BODY + + +def test_errors() -> None: + # After a receive error, you can't receive + for role in [CLIENT, SERVER]: + c = Connection(our_role=role) + c.receive_data(b"gibberish\r\n\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + # Now any attempt to receive continues to raise + assert c.their_state is ERROR + assert c.our_state is not ERROR + print(c._cstate.states) + with pytest.raises(RemoteProtocolError): + c.next_event() + # But we can still yell at the client for sending us gibberish + if role is SERVER: + assert ( + c.send(Response(status_code=400, headers=[])) # type: ignore[arg-type] + == b"HTTP/1.1 400 \r\nConnection: close\r\n\r\n" + ) + + # After an error sending, you can no longer send + # (This is especially important for things like content-length errors, + # where there's complex internal state being modified) + def conn(role: Type[Sentinel]) -> Connection: + c = Connection(our_role=role) + if role is SERVER: + # Put it into the state where it *could* send a response... + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert c.our_state is SEND_RESPONSE + return c + + for role in [CLIENT, SERVER]: + if role is CLIENT: + # This HTTP/1.0 request won't be detected as bad until after we go + # through the state machine and hit the writing code + good = Request(method="GET", target="/", headers=[("Host", "example.com")]) + bad = Request( + method="GET", + target="/", + headers=[("Host", "example.com")], + http_version="1.0", + ) + elif role is SERVER: + good = Response(status_code=200, headers=[]) # type: ignore[arg-type,assignment] + bad = Response(status_code=200, headers=[], http_version="1.0") # type: ignore[arg-type,assignment] + # Make sure 'good' actually is good + c = conn(role) + c.send(good) + assert c.our_state is not ERROR + # Do that again, but this time sending 'bad' first + c = conn(role) + with pytest.raises(LocalProtocolError): + c.send(bad) + assert c.our_state is ERROR + assert c.their_state is not ERROR + # Now 'good' is not so good + with pytest.raises(LocalProtocolError): + c.send(good) + + # And check send_failed() too + c = conn(role) + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + # This is idempotent + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + + +def test_idle_receive_nothing() -> None: + # At one point this incorrectly raised an error + for role in [CLIENT, SERVER]: + c = Connection(role) + assert c.next_event() is NEED_DATA + + +def test_connection_drop() -> None: + c = Connection(SERVER) + c.receive_data(b"GET /") + assert c.next_event() is NEED_DATA + c.receive_data(b"") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_408_request_timeout() -> None: + # Should be able to send this spontaneously as a server without seeing + # anything from client + p = ConnectionPair() + p.send(SERVER, Response(status_code=408, headers=[(b"connection", b"close")])) + + +# This used to raise IndexError +def test_empty_request() -> None: + c = Connection(SERVER) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to raise IndexError +def test_empty_response() -> None: + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "a")])) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x01\x00\xa5", # Typical start of a TLS Client Hello + ], +) +def test_early_detection_of_invalid_request(data: bytes) -> None: + c = Connection(SERVER) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x03\x00\x31", # Typical start of a TLS Server Hello + ], +) +def test_early_detection_of_invalid_response(data: bytes) -> None: + c = Connection(CLIENT) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to give different headers for HEAD and GET. +# The correct way to handle HEAD is to put whatever headers we *would* have +# put if it were a GET -- even though we know that for HEAD, those headers +# will be ignored. +def test_HEAD_framing_headers() -> None: + def setup(method: bytes, http_version: bytes) -> Connection: + c = Connection(SERVER) + c.receive_data( + method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert type(c.next_event()) is EndOfMessage + return c + + for method in [b"GET", b"HEAD"]: + # No Content-Length, HTTP/1.1 peer, should use chunked + c = setup(method, b"1.1") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + # No Content-Length, HTTP/1.0 peer, frame with connection: close + c = setup(method, b"1.0") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] + b"Connection: close\r\n\r\n" + ) + + # Content-Length + Transfer-Encoding, TE wins + c = setup(method, b"1.1") + assert ( + c.send( + Response( + status_code=200, + headers=[ + ("Content-Length", "100"), + ("Transfer-Encoding", "chunked"), + ], + ) + ) + == b"HTTP/1.1 200 \r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + +def test_special_exceptions_for_lost_connection_in_message_body() -> None: + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"12345") + assert c.next_event() == Data(data=b"12345") + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "received 5 bytes" in str(excinfo.value) + assert "expected 100" in str(excinfo.value) + + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"8\r\n012345") + assert c.next_event().data == b"012345" # type: ignore + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "incomplete chunked read" in str(excinfo.value) diff --git a/myenv/lib/python3.12/site-packages/h11/tests/test_events.py b/myenv/lib/python3.12/site-packages/h11/tests/test_events.py new file mode 100644 index 0000000..bc6c313 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/tests/test_events.py @@ -0,0 +1,150 @@ +from http import HTTPStatus + +import pytest + +from .. import _events +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._util import LocalProtocolError + + +def test_events() -> None: + with pytest.raises(LocalProtocolError): + # Missing Host: + req = Request( + method="GET", target="/", headers=[("a", "b")], http_version="1.1" + ) + # But this is okay (HTTP/1.0) + req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") + # fields are normalized + assert req.method == b"GET" + assert req.target == b"/" + assert req.headers == [(b"a", b"b")] + assert req.http_version == b"1.0" + + # This is also okay -- has a Host (with weird capitalization, which is ok) + req = Request( + method="GET", + target="/", + headers=[("a", "b"), ("hOSt", "example.com")], + http_version="1.1", + ) + # we normalize header capitalization + assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] + + # Multiple host is bad too + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.1", + ) + # Even for HTTP/1.0 + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.0", + ) + + # Header values are validated + for bad_char in "\x00\r\n\f\v": + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd" + bad_char)], + http_version="1.0", + ) + + # But for compatibility we allow non-whitespace control characters, even + # though they're forbidden by the spec. + Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")], + http_version="1.0", + ) + + # Request target is validated + for bad_byte in b"\x00\x20\x7f\xee": + target = bytearray(b"/") + target.append(bad_byte) + with pytest.raises(LocalProtocolError): + Request( + method="GET", target=target, headers=[("Host", "a")], http_version="1.1" + ) + + # Request method is validated + with pytest.raises(LocalProtocolError): + Request( + method="GET / HTTP/1.1", + target=target, + headers=[("Host", "a")], + http_version="1.1", + ) + + ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) + assert ir.status_code == 100 + assert ir.headers == [(b"host", b"a")] + assert ir.http_version == b"1.1" + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=200, headers=[("Host", "a")]) + + resp = Response(status_code=204, headers=[], http_version="1.0") # type: ignore[arg-type] + assert resp.status_code == 204 + assert resp.headers == [] + assert resp.http_version == b"1.0" + + with pytest.raises(LocalProtocolError): + resp = Response(status_code=100, headers=[], http_version="1.0") # type: ignore[arg-type] + + with pytest.raises(LocalProtocolError): + Response(status_code="100", headers=[], http_version="1.0") # type: ignore[arg-type] + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=b"100", headers=[], http_version="1.0") # type: ignore[arg-type] + + d = Data(data=b"asdf") + assert d.data == b"asdf" + + eom = EndOfMessage() + assert eom.headers == [] + + cc = ConnectionClosed() + assert repr(cc) == "ConnectionClosed()" + + +def test_intenum_status_code() -> None: + # https://github.com/python-hyper/h11/issues/72 + + r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") # type: ignore[arg-type] + assert r.status_code == HTTPStatus.OK + assert type(r.status_code) is not type(HTTPStatus.OK) + assert type(r.status_code) is int + + +def test_header_casing() -> None: + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert len(r.headers) == 2 + assert r.headers[0] == (b"host", b"example.org") + assert r.headers == [(b"host", b"example.org"), (b"connection", b"keep-alive")] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive"), + ] diff --git a/myenv/lib/python3.12/site-packages/h11/tests/test_headers.py b/myenv/lib/python3.12/site-packages/h11/tests/test_headers.py new file mode 100644 index 0000000..ba53d08 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/tests/test_headers.py @@ -0,0 +1,157 @@ +import pytest + +from .._events import Request +from .._headers import ( + get_comma_header, + has_expect_100_continue, + Headers, + normalize_and_validate, + set_comma_header, +) +from .._util import LocalProtocolError + + +def test_normalize_and_validate() -> None: + assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")] + assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")] + + # no leading/trailing whitespace in names + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo ", "bar")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b" foo", "bar")]) + + # no weird characters in names + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([(b"foo bar", b"baz")]) + assert "foo bar" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x00bar", b"baz")]) + # Not even 8-bit characters: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\xffbar", b"baz")]) + # And not even the control characters we allow in values: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x01bar", b"baz")]) + + # no return or NUL characters in values + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("foo", "bar\rbaz")]) + assert "bar\\rbaz" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\nbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\x00baz")]) + # no leading/trailing whitespace + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz ")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", " barbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz\t")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "\tbarbaz")]) + + # content-length + assert normalize_and_validate([("Content-Length", "1")]) == [ + (b"content-length", b"1") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "asdf")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1x")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1"), ("Content-Length", "2")]) + assert normalize_and_validate( + [("Content-Length", "0"), ("Content-Length", "0")] + ) == [(b"content-length", b"0")] + assert normalize_and_validate([("Content-Length", "0 , 0")]) == [ + (b"content-length", b"0") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate( + [("Content-Length", "1"), ("Content-Length", "1"), ("Content-Length", "2")] + ) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1 , 1,2")]) + + # transfer-encoding + assert normalize_and_validate([("Transfer-Encoding", "chunked")]) == [ + (b"transfer-encoding", b"chunked") + ] + assert normalize_and_validate([("Transfer-Encoding", "cHuNkEd")]) == [ + (b"transfer-encoding", b"chunked") + ] + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("Transfer-Encoding", "gzip")]) + assert excinfo.value.error_status_hint == 501 # Not Implemented + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate( + [("Transfer-Encoding", "chunked"), ("Transfer-Encoding", "gzip")] + ) + assert excinfo.value.error_status_hint == 501 # Not Implemented + + +def test_get_set_comma_header() -> None: + headers = normalize_and_validate( + [ + ("Connection", "close"), + ("whatever", "something"), + ("connectiON", "fOo,, , BAR"), + ] + ) + + assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"] + + headers = set_comma_header(headers, b"newthing", ["a", "b"]) # type: ignore + + with pytest.raises(LocalProtocolError): + set_comma_header(headers, b"newthing", [" a", "b"]) # type: ignore + + assert headers == [ + (b"connection", b"close"), + (b"whatever", b"something"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + ] + + headers = set_comma_header(headers, b"whatever", ["different thing"]) # type: ignore + + assert headers == [ + (b"connection", b"close"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + (b"whatever", b"different thing"), + ] + + +def test_has_100_continue() -> None: + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + ) + ) + assert not has_expect_100_continue( + Request(method="GET", target="/", headers=[("Host", "example.com")]) + ) + # Case insensitive + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-Continue")], + ) + ) + # Doesn't work in HTTP/1.0 + assert not has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + http_version="1.0", + ) + ) diff --git a/myenv/lib/python3.12/site-packages/h11/tests/test_helpers.py b/myenv/lib/python3.12/site-packages/h11/tests/test_helpers.py new file mode 100644 index 0000000..c329c76 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/tests/test_helpers.py @@ -0,0 +1,32 @@ +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .helpers import normalize_data_events + + +def test_normalize_data_events() -> None: + assert normalize_data_events( + [ + Data(data=bytearray(b"1")), + Data(data=b"2"), + Response(status_code=200, headers=[]), # type: ignore[arg-type] + Data(data=b"3"), + Data(data=b"4"), + EndOfMessage(), + Data(data=b"5"), + Data(data=b"6"), + Data(data=b"7"), + ] + ) == [ + Data(data=b"12"), + Response(status_code=200, headers=[]), # type: ignore[arg-type] + Data(data=b"34"), + EndOfMessage(), + Data(data=b"567"), + ] diff --git a/myenv/lib/python3.12/site-packages/h11/tests/test_io.py b/myenv/lib/python3.12/site-packages/h11/tests/test_io.py new file mode 100644 index 0000000..2b47c0e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/tests/test_io.py @@ -0,0 +1,572 @@ +from typing import Any, Callable, Generator, List + +import pytest + +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._headers import Headers, normalize_and_validate +from .._readers import ( + _obsolete_line_fold, + ChunkedReader, + ContentLengthReader, + Http10Reader, + READERS, +) +from .._receivebuffer import ReceiveBuffer +from .._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError +from .._writers import ( + ChunkedWriter, + ContentLengthWriter, + Http10Writer, + write_any_response, + write_headers, + write_request, + WRITERS, +) +from .helpers import normalize_data_events + +SIMPLE_CASES = [ + ( + (CLIENT, IDLE), + Request( + method="GET", + target="/a", + headers=[("Host", "foo"), ("Connection", "close")], + ), + b"GET /a HTTP/1.1\r\nHost: foo\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"), + b"HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[], reason=b"OK"), # type: ignore[arg-type] + b"HTTP/1.1 200 OK\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse( + status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade" + ), + b"HTTP/1.1 101 Upgrade\r\nUpgrade: websocket\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), # type: ignore[arg-type] + b"HTTP/1.1 101 Upgrade\r\n\r\n", + ), +] + + +def dowrite(writer: Callable[..., None], obj: Any) -> bytes: + got_list: List[bytes] = [] + writer(obj, got_list.append) + return b"".join(got_list) + + +def tw(writer: Any, obj: Any, expected: Any) -> None: + got = dowrite(writer, obj) + assert got == expected + + +def makebuf(data: bytes) -> ReceiveBuffer: + buf = ReceiveBuffer() + buf += data + return buf + + +def tr(reader: Any, data: bytes, expected: Any) -> None: + def check(got: Any) -> None: + assert got == expected + # Headers should always be returned as bytes, not e.g. bytearray + # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 + for name, value in getattr(got, "headers", []): + assert type(name) is bytes + assert type(value) is bytes + + # Simple: consume whole thing + buf = makebuf(data) + check(reader(buf)) + assert not buf + + # Incrementally growing buffer + buf = ReceiveBuffer() + for i in range(len(data)): + assert reader(buf) is None + buf += data[i : i + 1] + check(reader(buf)) + + # Trailing data + buf = makebuf(data) + buf += b"trailing" + check(reader(buf)) + assert bytes(buf) == b"trailing" + + +def test_writers_simple() -> None: + for ((role, state), event, binary) in SIMPLE_CASES: + tw(WRITERS[role, state], event, binary) + + +def test_readers_simple() -> None: + for ((role, state), event, binary) in SIMPLE_CASES: + tr(READERS[role, state], binary, event) + + +def test_writers_unusual() -> None: + # Simple test of the write_headers utility routine + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("baz", "quux")]), + b"foo: bar\r\nbaz: quux\r\n\r\n", + ) + tw(write_headers, Headers([]), b"\r\n") + + # We understand HTTP/1.0, but we don't speak it + with pytest.raises(LocalProtocolError): + tw( + write_request, + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Connection", "close")], + http_version="1.0", + ), + None, + ) + with pytest.raises(LocalProtocolError): + tw( + write_any_response, + Response( + status_code=200, headers=[("Connection", "close")], http_version="1.0" + ), + None, + ) + + +def test_readers_unusual() -> None: + # Reading HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[("Some", "header")], + http_version="1.0", + ), + ) + + # check no-headers, since it's only legal with HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\n\r\n", + Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), # type: ignore[arg-type] + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n", + Response( + status_code=200, + headers=[("Some", "header")], + http_version="1.0", + reason=b"OK", + ), + ) + + # single-character header values (actually disallowed by the ABNF in RFC + # 7230 -- this is a bug in the standard that we originally copied...) + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n", + Response( + status_code=200, + headers=[("Foo", "a a a a a")], + http_version="1.0", + reason=b"OK", + ), + ) + + # Empty headers -- also legal + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + # Tolerate broken servers that leave off the response code + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b"" + ), + ) + + # Tolerate headers line endings (\r\n and \n) + # \n\r\b between headers and body + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader: val\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader", "val")], + http_version="1.1", + reason="OK", + ), + ) + + # delimited only with \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\nSomeHeader1: val1\nSomeHeader2: val2\n\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # mixed \r\n and \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader1: val1\nSomeHeader2: val2\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # obsolete line folding + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Some: multi-line\r\n" + b" header\r\n" + b"\tnonsense\r\n" + b" \t \t\tI guess\r\n" + b"Connection: close\r\n" + b"More-nonsense: in the\r\n" + b" last header \r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "example.com"), + ("Some", "multi-line header nonsense I guess"), + ("Connection", "close"), + ("More-nonsense", "in the last header"), + ], + ), + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n", + None, + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) + + +def test__obsolete_line_fold_bytes() -> None: + # _obsolete_line_fold has a defensive cast to bytearray, which is + # necessary to protect against O(n^2) behavior in case anyone ever passes + # in regular bytestrings... but right now we never pass in regular + # bytestrings. so this test just exists to get some coverage on that + # defensive cast. + assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [ + b"aaa", + bytearray(b"bbb ccc"), + b"ddd", + ] + + +def _run_reader_iter( + reader: Any, buf: bytes, do_eof: bool +) -> Generator[Any, None, None]: + while True: + event = reader(buf) + if event is None: + break + yield event + # body readers have undefined behavior after returning EndOfMessage, + # because this changes the state so they don't get called again + if type(event) is EndOfMessage: + break + if do_eof: + assert not buf + yield reader.read_eof() + + +def _run_reader(*args: Any) -> List[Event]: + events = list(_run_reader_iter(*args)) + return normalize_data_events(events) + + +def t_body_reader(thunk: Any, data: bytes, expected: Any, do_eof: bool = False) -> None: + # Simple: consume whole thing + print("Test 1") + buf = makebuf(data) + assert _run_reader(thunk(), buf, do_eof) == expected + + # Incrementally growing buffer + print("Test 2") + reader = thunk() + buf = ReceiveBuffer() + events = [] + for i in range(len(data)): + events += _run_reader(reader, buf, False) + buf += data[i : i + 1] + events += _run_reader(reader, buf, do_eof) + assert normalize_data_events(events) == expected + + is_complete = any(type(event) is EndOfMessage for event in expected) + if is_complete and not do_eof: + buf = makebuf(data + b"trailing") + assert _run_reader(thunk(), buf, False) == expected + + +def test_ContentLengthReader() -> None: + t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) + + t_body_reader( + lambda: ContentLengthReader(10), + b"0123456789", + [Data(data=b"0123456789"), EndOfMessage()], + ) + + +def test_Http10Reader() -> None: + t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) + t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) + t_body_reader( + Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True + ) + + +def test_ChunkedReader() -> None: + t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) + + t_body_reader( + ChunkedReader, + b"0\r\nSome: header\r\n\r\n", + [EndOfMessage(headers=[("Some", "header")])], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + + b"10\r\n0123456789abcdef\r\n" + + b"0\r\n" + + b"Some: header\r\n\r\n", + [ + Data(data=b"012340123456789abcdef"), + EndOfMessage(headers=[("Some", "header")]), + ], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n", + [Data(data=b"012340123456789abcdef"), EndOfMessage()], + ) + + # handles upper and lowercase hex + t_body_reader( + ChunkedReader, + b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n", + [Data(data=b"x" * 0xAA), EndOfMessage()], + ) + + # refuses arbitrarily long chunk integers + with pytest.raises(LocalProtocolError): + # Technically this is legal HTTP/1.1, but we refuse to process chunk + # sizes that don't fit into 20 characters of hex + t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")]) + + # refuses garbage in the chunk count + with pytest.raises(LocalProtocolError): + t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None) + + # handles (and discards) "chunk extensions" omg wtf + t_body_reader( + ChunkedReader, + b"5; hello=there\r\n" + + b"xxxxx" + + b"\r\n" + + b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n', + [Data(data=b"xxxxx"), EndOfMessage()], + ) + + t_body_reader( + ChunkedReader, + b"5 \r\n01234\r\n" + b"0\r\n\r\n", + [Data(data=b"01234"), EndOfMessage()], + ) + + +def test_ContentLengthWriter() -> None: + w = ContentLengthWriter(5) + assert dowrite(w, Data(data=b"123")) == b"123" + assert dowrite(w, Data(data=b"45")) == b"45" + assert dowrite(w, EndOfMessage()) == b"" + + w = ContentLengthWriter(5) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"123456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage()) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) == b"123" + dowrite(w, Data(data=b"45")) == b"45" + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_ChunkedWriter() -> None: + w = ChunkedWriter() + assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" + assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" + + assert dowrite(w, Data(data=b"")) == b"" + + assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n" + + assert ( + dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")])) + == b"0\r\nEtag: asdf\r\na: b\r\n\r\n" + ) + + +def test_Http10Writer() -> None: + w = Http10Writer() + assert dowrite(w, Data(data=b"1234")) == b"1234" + assert dowrite(w, EndOfMessage()) == b"" + + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_reject_garbage_after_request_line() -> None: + with pytest.raises(LocalProtocolError): + tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) + + +def test_reject_garbage_after_response_line() -> None: + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n", + None, + ) + + +def test_reject_garbage_in_header_line() -> None: + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n", + None, + ) + + +def test_reject_non_vchar_in_path() -> None: + for bad_char in b"\x00\x20\x7f\xee": + message = bytearray(b"HEAD /") + message.append(bad_char) + message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n") + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], message, None) + + +# https://github.com/python-hyper/h11/issues/57 +def test_allow_some_garbage_in_cookies() -> None: + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: foo\r\n" + b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n" + b"\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "foo"), + ("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"), + ], + ), + ) + + +def test_host_comes_first() -> None: + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), + b"Host: example.com\r\nfoo: bar\r\n\r\n", + ) diff --git a/myenv/lib/python3.12/site-packages/h11/tests/test_receivebuffer.py b/myenv/lib/python3.12/site-packages/h11/tests/test_receivebuffer.py new file mode 100644 index 0000000..21a3870 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/tests/test_receivebuffer.py @@ -0,0 +1,135 @@ +import re +from typing import Tuple + +import pytest + +from .._receivebuffer import ReceiveBuffer + + +def test_receivebuffer() -> None: + b = ReceiveBuffer() + assert not b + assert len(b) == 0 + assert bytes(b) == b"" + + b += b"123" + assert b + assert len(b) == 3 + assert bytes(b) == b"123" + + assert bytes(b) == b"123" + + assert b.maybe_extract_at_most(2) == b"12" + assert b + assert len(b) == 1 + assert bytes(b) == b"3" + + assert bytes(b) == b"3" + + assert b.maybe_extract_at_most(10) == b"3" + assert bytes(b) == b"" + + assert b.maybe_extract_at_most(10) is None + assert not b + + ################################################################ + # maybe_extract_until_next + ################################################################ + + b += b"123\n456\r\n789\r\n" + + assert b.maybe_extract_next_line() == b"123\n456\r\n" + assert bytes(b) == b"789\r\n" + + assert b.maybe_extract_next_line() == b"789\r\n" + assert bytes(b) == b"" + + b += b"12\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r" + + b += b"345\n\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r345\n\r" + + # here we stopped at the middle of b"\r\n" delimiter + + b += b"\n6789aaa123\r\n" + assert b.maybe_extract_next_line() == b"12\r345\n\r\n" + assert b.maybe_extract_next_line() == b"6789aaa123\r\n" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"" + + ################################################################ + # maybe_extract_lines + ################################################################ + + b += b"123\r\na: b\r\nfoo:bar\r\n\r\ntrailing" + lines = b.maybe_extract_lines() + assert lines == [b"123", b"a: b", b"foo:bar"] + assert bytes(b) == b"trailing" + + assert b.maybe_extract_lines() is None + + b += b"\r\n\r" + assert b.maybe_extract_lines() is None + + assert b.maybe_extract_at_most(100) == b"trailing\r\n\r" + assert not b + + # Empty body case (as happens at the end of chunked encoding if there are + # no trailing headers, e.g.) + b += b"\r\ntrailing" + assert b.maybe_extract_lines() == [] + assert bytes(b) == b"trailing" + + +@pytest.mark.parametrize( + "data", + [ + pytest.param( + ( + b"HTTP/1.1 200 OK\r\n", + b"Content-type: text/plain\r\n", + b"Connection: close\r\n", + b"\r\n", + b"Some body", + ), + id="with_crlf_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_lf_only_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\r\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_mixed_crlf_and_lf", + ), + ], +) +def test_receivebuffer_for_invalid_delimiter(data: Tuple[bytes]) -> None: + b = ReceiveBuffer() + + for line in data: + b += line + + lines = b.maybe_extract_lines() + + assert lines == [ + b"HTTP/1.1 200 OK", + b"Content-type: text/plain", + b"Connection: close", + ] + assert bytes(b) == b"Some body" diff --git a/myenv/lib/python3.12/site-packages/h11/tests/test_state.py b/myenv/lib/python3.12/site-packages/h11/tests/test_state.py new file mode 100644 index 0000000..bc974e6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/tests/test_state.py @@ -0,0 +1,271 @@ +import pytest + +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + CLOSED, + ConnectionState, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError + + +def test_ConnectionState() -> None: + cs = ConnectionState() + + # Basic event-triggered transitions + + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + cs.process_event(CLIENT, Request) + # The SERVER-Request special case: + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # Illegal transitions raise an error and nothing happens + with pytest.raises(LocalProtocolError): + cs.process_event(CLIENT, Request) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: DONE, SERVER: DONE} + + # State-triggered transition + + cs.process_event(SERVER, ConnectionClosed) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + +def test_ConnectionState_keep_alive() -> None: + # keep_alive = False + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_ConnectionState_keep_alive_in_DONE() -> None: + # Check that if keep_alive is disabled when the CLIENT is already in DONE, + # then this is sufficient to immediately trigger the DONE -> MUST_CLOSE + # transition + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states[CLIENT] is DONE + cs.process_keep_alive_disabled() + assert cs.states[CLIENT] is MUST_CLOSE + + +def test_ConnectionState_switch_denied() -> None: + for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE): + for deny_early in (True, False): + cs = ConnectionState() + cs.process_client_switch_proposal(switch_type) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + assert switch_type in cs.pending_switch_proposals + + if deny_early: + # before client reaches DONE + cs.process_event(SERVER, Response) + assert not cs.pending_switch_proposals + + cs.process_event(CLIENT, EndOfMessage) + + if deny_early: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + assert not cs.pending_switch_proposals + + +_response_type_for_switch = { + _SWITCH_UPGRADE: InformationalResponse, + _SWITCH_CONNECT: Response, + None: Response, +} + + +def test_ConnectionState_protocol_switch_accepted() -> None: + for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(switch_event) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, _response_type_for_switch[switch_event], switch_event) + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_double_protocol_switch() -> None: + # CONNECT + Upgrade is legal! Very silly, but legal. So we support + # it. Because sometimes doing the silly thing is easier than not. + for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_client_switch_proposal(_SWITCH_CONNECT) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + cs.process_event( + SERVER, _response_type_for_switch[server_switch], server_switch + ) + if server_switch is None: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_inconsistent_protocol_switch() -> None: + for client_switches, server_switch in [ + ([], _SWITCH_CONNECT), + ([], _SWITCH_UPGRADE), + ([_SWITCH_UPGRADE], _SWITCH_CONNECT), + ([_SWITCH_CONNECT], _SWITCH_UPGRADE), + ]: + cs = ConnectionState() + for client_switch in client_switches: # type: ignore[attr-defined] + cs.process_client_switch_proposal(client_switch) + cs.process_event(CLIENT, Request) + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Response, server_switch) + + +def test_ConnectionState_keepalive_protocol_switch_interaction() -> None: + # keep_alive=False + pending_switch_proposals + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # the protocol switch "wins" + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + # but when the server denies the request, keep_alive comes back into play + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY} + + +def test_ConnectionState_reuse() -> None: + cs = ConnectionState() + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + # No keepalive + + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # One side closed + + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(CLIENT, ConnectionClosed) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Succesful protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Failed protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + +def test_server_request_is_illegal() -> None: + # There used to be a bug in how we handled the Request special case that + # made this allowed... + cs = ConnectionState() + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Request) diff --git a/myenv/lib/python3.12/site-packages/h11/tests/test_util.py b/myenv/lib/python3.12/site-packages/h11/tests/test_util.py new file mode 100644 index 0000000..79bc095 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/h11/tests/test_util.py @@ -0,0 +1,112 @@ +import re +import sys +import traceback +from typing import NoReturn + +import pytest + +from .._util import ( + bytesify, + LocalProtocolError, + ProtocolError, + RemoteProtocolError, + Sentinel, + validate, +) + + +def test_ProtocolError() -> None: + with pytest.raises(TypeError): + ProtocolError("abstract base class") + + +def test_LocalProtocolError() -> None: + try: + raise LocalProtocolError("foo") + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 400 + + try: + raise LocalProtocolError("foo", error_status_hint=418) + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 418 + + def thunk() -> NoReturn: + raise LocalProtocolError("a", error_status_hint=420) + + try: + try: + thunk() + except LocalProtocolError as exc1: + orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + exc1._reraise_as_remote_protocol_error() + except RemoteProtocolError as exc2: + assert type(exc2) is RemoteProtocolError + assert exc2.args == ("a",) + assert exc2.error_status_hint == 420 + new_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + assert new_traceback.endswith(orig_traceback) + + +def test_validate() -> None: + my_re = re.compile(rb"(?P[0-9]+)\.(?P[0-9]+)") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.") + + groups = validate(my_re, b"0.1") + assert groups == {"group1": b"0", "group2": b"1"} + + # successful partial matches are an error - must match whole string + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1xx") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1\n") + + +def test_validate_formatting() -> None: + my_re = re.compile(rb"foo") + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops") + assert "oops" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {}") + assert "oops {}" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {} xx", 10) + assert "oops 10 xx" in str(excinfo.value) + + +def test_make_sentinel() -> None: + class S(Sentinel, metaclass=Sentinel): + pass + + assert repr(S) == "S" + assert S == S + assert type(S).__name__ == "S" + assert S in {S} + assert type(S) is S + + class S2(Sentinel, metaclass=Sentinel): + pass + + assert repr(S2) == "S2" + assert S != S2 + assert S not in {S2} + assert type(S) is not type(S2) + + +def test_bytesify() -> None: + assert bytesify(b"123") == b"123" + assert bytesify(bytearray(b"123")) == b"123" + assert bytesify("123") == b"123" + + with pytest.raises(UnicodeEncodeError): + bytesify("\u1234") + + with pytest.raises(TypeError): + bytesify(10) diff --git a/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/METADATA b/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/METADATA new file mode 100644 index 0000000..0e20095 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/METADATA @@ -0,0 +1,607 @@ +Metadata-Version: 2.3 +Name: httpcore +Version: 1.0.5 +Summary: A minimal low-level HTTP client. +Project-URL: Documentation, https://www.encode.io/httpcore +Project-URL: Homepage, https://www.encode.io/httpcore/ +Project-URL: Source, https://github.com/encode/httpcore +Author-email: Tom Christie +License-Expression: BSD-3-Clause +License-File: LICENSE.md +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Requires-Dist: certifi +Requires-Dist: h11<0.15,>=0.13 +Provides-Extra: asyncio +Requires-Dist: anyio<5.0,>=4.0; extra == 'asyncio' +Provides-Extra: http2 +Requires-Dist: h2<5,>=3; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio==1.*; extra == 'socks' +Provides-Extra: trio +Requires-Dist: trio<0.26.0,>=0.22.0; extra == 'trio' +Description-Content-Type: text/markdown + +# HTTP Core + +[![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) +[![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) + +> *Do one thing, and do it well.* + +The HTTP Core package provides a minimal low-level HTTP client, which does +one thing only. Sending HTTP requests. + +It does not provide any high level model abstractions over the API, +does not handle redirects, multipart uploads, building authentication headers, +transparent HTTP caching, URL parsing, session cookie handling, +content or charset decoding, handling JSON, environment based configuration +defaults, or any of that Jazz. + +Some things HTTP Core does do: + +* Sending HTTP requests. +* Thread-safe / task-safe connection pooling. +* HTTP(S) proxy & SOCKS proxy support. +* Supports HTTP/1.1 and HTTP/2. +* Provides both sync and async interfaces. +* Async backend support for `asyncio` and `trio`. + +## Requirements + +Python 3.8+ + +## Installation + +For HTTP/1.1 only support, install with: + +```shell +$ pip install httpcore +``` + +There are also a number of optional extras available... + +```shell +$ pip install httpcore['asyncio,trio,http2,socks'] +``` + +# Sending requests + +Send an HTTP request: + +```python +import httpcore + +response = httpcore.request("GET", "https://www.example.com/") + +print(response) +# +print(response.status) +# 200 +print(response.headers) +# [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] +print(response.content) +# b'\n\n\nExample Domain\n\n\n ...' +``` + +The top-level `httpcore.request()` function is provided for convenience. In practice whenever you're working with `httpcore` you'll want to use the connection pooling functionality that it provides. + +```python +import httpcore + +http = httpcore.ConnectionPool() +response = http.request("GET", "https://www.example.com/") +``` + +Once you're ready to get going, [head over to the documentation](https://www.encode.io/httpcore/). + +## Motivation + +You *probably* don't want to be using HTTP Core directly. It might make sense if +you're writing something like a proxy service in Python, and you just want +something at the lowest possible level, but more typically you'll want to use +a higher level client library, such as `httpx`. + +The motivation for `httpcore` is: + +* To provide a reusable low-level client library, that other packages can then build on top of. +* To provide a *really clear interface split* between the networking code and client logic, + so that each is easier to understand and reason about in isolation. + +## Dependencies + +The `httpcore` package has the following dependencies... + +* `h11` +* `certifi` + +And the following optional extras... + +* `anyio` - Required by `pip install httpcore['asyncio']`. +* `trio` - Required by `pip install httpcore['trio']`. +* `h2` - Required by `pip install httpcore['http2']`. +* `socksio` - Required by `pip install httpcore['socks']`. + +## Versioning + +We use [SEMVER for our versioning policy](https://semver.org/). + +For changes between package versions please see our [project changelog](CHANGELOG.md). + +We recommend pinning your requirements either the most current major version, or a more specific version range: + +```python +pip install 'httpcore==1.*' +``` +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## 1.0.5 (March 27th, 2024) + +- Handle `EndOfStream` exception for anyio backend. (#899) +- Allow trio `0.25.*` series in package dependancies. (#903) + +## 1.0.4 (February 21st, 2024) + +- Add `target` request extension. (#888) +- Fix support for connection `Upgrade` and `CONNECT` when some data in the stream has been read. (#882) + +## 1.0.3 (February 13th, 2024) + +- Fix support for async cancellations. (#880) +- Fix trace extension when used with socks proxy. (#849) +- Fix SSL context for connections using the "wss" scheme (#869) + +## 1.0.2 (November 10th, 2023) + +- Fix `float("inf")` timeouts in `Event.wait` function. (#846) + +## 1.0.1 (November 3rd, 2023) + +- Fix pool timeout to account for the total time spent retrying. (#823) +- Raise a neater RuntimeError when the correct async deps are not installed. (#826) +- Add support for synchronous TLS-in-TLS streams. (#840) + +## 1.0.0 (October 6th, 2023) + +From version 1.0 our async support is now optional, as the package has minimal dependencies by default. + +For async support use either `pip install 'httpcore[asyncio]'` or `pip install 'httpcore[trio]'`. + +The project versioning policy is now explicitly governed by SEMVER. See https://semver.org/. + +- Async support becomes fully optional. (#809) +- Add support for Python 3.12. (#807) + +## 0.18.0 (September 8th, 2023) + +- Add support for HTTPS proxies. (#745, #786) +- Drop Python 3.7 support. (#727) +- Handle `sni_hostname` extension with SOCKS proxy. (#774) +- Handle HTTP/1.1 half-closed connections gracefully. (#641) +- Change the type of `Extensions` from `Mapping[Str, Any]` to `MutableMapping[Str, Any]`. (#762) + +## 0.17.3 (July 5th, 2023) + +- Support async cancellations, ensuring that the connection pool is left in a clean state when cancellations occur. (#726) +- The networking backend interface has [been added to the public API](https://www.encode.io/httpcore/network-backends). Some classes which were previously private implementation detail are now part of the top-level public API. (#699) +- Graceful handling of HTTP/2 GoAway frames, with requests being transparently retried on a new connection. (#730) +- Add exceptions when a synchronous `trace callback` is passed to an asynchronous request or an asynchronous `trace callback` is passed to a synchronous request. (#717) +- Drop Python 3.7 support. (#727) + +## 0.17.2 (May 23th, 2023) + +- Add `socket_options` argument to `ConnectionPool` and `HTTProxy` classes. (#668) +- Improve logging with per-module logger names. (#690) +- Add `sni_hostname` request extension. (#696) +- Resolve race condition during import of `anyio` package. (#692) +- Enable TCP_NODELAY for all synchronous sockets. (#651) + +## 0.17.1 (May 17th, 2023) + +- If 'retries' is set, then allow retries if an SSL handshake error occurs. (#669) +- Improve correctness of tracebacks on network exceptions, by raising properly chained exceptions. (#678) +- Prevent connection-hanging behaviour when HTTP/2 connections are closed by a server-sent 'GoAway' frame. (#679) +- Fix edge-case exception when removing requests from the connection pool. (#680) +- Fix pool timeout edge-case. (#688) + +## 0.17.0 (March 16th, 2023) + +- Add DEBUG level logging. (#648) +- Respect HTTP/2 max concurrent streams when settings updates are sent by server. (#652) +- Increase the allowable HTTP header size to 100kB. (#647) +- Add `retries` option to SOCKS proxy classes. (#643) + +## 0.16.3 (December 20th, 2022) + +- Allow `ws` and `wss` schemes. Allows us to properly support websocket upgrade connections. (#625) +- Forwarding HTTP proxies use a connection-per-remote-host. Required by some proxy implementations. (#637) +- Don't raise `RuntimeError` when closing a connection pool with active connections. Removes some error cases when cancellations are used. (#631) +- Lazy import `anyio`, so that it's no longer a hard dependancy, and isn't imported if unused. (#639) + +## 0.16.2 (November 25th, 2022) + +- Revert 'Fix async cancellation behaviour', which introduced race conditions. (#627) +- Raise `RuntimeError` if attempting to us UNIX domain sockets on Windows. (#619) + +## 0.16.1 (November 17th, 2022) + +- Fix HTTP/1.1 interim informational responses, such as "100 Continue". (#605) + +## 0.16.0 (October 11th, 2022) + +- Support HTTP/1.1 informational responses. (#581) +- Fix async cancellation behaviour. (#580) +- Support `h11` 0.14. (#579) + +## 0.15.0 (May 17th, 2022) + +- Drop Python 3.6 support (#535) +- Ensure HTTP proxy CONNECT requests include `timeout` configuration. (#506) +- Switch to explicit `typing.Optional` for type hints. (#513) +- For `trio` map OSError exceptions to `ConnectError`. (#543) + +## 0.14.7 (February 4th, 2022) + +- Requests which raise a PoolTimeout need to be removed from the pool queue. (#502) +- Fix AttributeError that happened when Socks5Connection were terminated. (#501) + +## 0.14.6 (February 1st, 2022) + +- Fix SOCKS support for `http://` URLs. (#492) +- Resolve race condition around exceptions during streaming a response. (#491) + +## 0.14.5 (January 18th, 2022) + +- SOCKS proxy support. (#478) +- Add proxy_auth argument to HTTPProxy. (#481) +- Improve error message on 'RemoteProtocolError' exception when server disconnects without sending a response. (#479) + +## 0.14.4 (January 5th, 2022) + +- Support HTTP/2 on HTTPS tunnelling proxies. (#468) +- Fix proxy headers missing on HTTP forwarding. (#456) +- Only instantiate SSL context if required. (#457) +- More robust HTTP/2 handling. (#253, #439, #440, #441) + +## 0.14.3 (November 17th, 2021) + +- Fix race condition when removing closed connections from the pool. (#437) + +## 0.14.2 (November 16th, 2021) + +- Failed connections no longer remain in the pool. (Pull #433) + +## 0.14.1 (November 12th, 2021) + +- `max_connections` becomes optional. (Pull #429) +- `certifi` is now included in the install dependancies. (Pull #428) +- `h2` is now strictly optional. (Pull #428) + +## 0.14.0 (November 11th, 2021) + +The 0.14 release is a complete reworking of `httpcore`, comprehensively addressing some underlying issues in the connection pooling, as well as substantially redesigning the API to be more user friendly. + +Some of the lower-level API design also makes the components more easily testable in isolation, and the package now has 100% test coverage. + +See [discussion #419](https://github.com/encode/httpcore/discussions/419) for a little more background. + +There's some other neat bits in there too, such as the "trace" extension, which gives a hook into inspecting the internal events that occur during the request/response cycle. This extension is needed for the HTTPX cli, in order to... + +* Log the point at which the connection is established, and the IP/port on which it is made. +* Determine if the outgoing request should log as HTTP/1.1 or HTTP/2, rather than having to assume it's HTTP/2 if the --http2 flag was passed. (Which may not actually be true.) +* Log SSL version info / certificate info. + +Note that `curio` support is not currently available in 0.14.0. If you're using `httpcore` with `curio` please get in touch, so we can assess if we ought to prioritize it as a feature or not. + +## 0.13.7 (September 13th, 2021) + +- Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) + +## 0.13.6 (June 15th, 2021) + +### Fixed + +- Close sockets when read or write timeouts occur. (Pull #365) + +## 0.13.5 (June 14th, 2021) + +### Fixed + +- Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) + +## 0.13.4 (June 9th, 2021) + +### Added + +- Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) + +### Fixed + +- Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). + +## 0.13.3 (May 6th, 2021) + +### Added + +- Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) + +### Fixed + +- Handle cases where environment does not provide `select.poll` support. (Pull #331) + +## 0.13.2 (April 29th, 2021) + +### Added + +- Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) + +## 0.13.1 (April 28th, 2021) + +### Fixed + +- More resiliant testing for closed connections. (Pull #311) +- Don't raise exceptions on ungraceful connection closes. (Pull #310) + +## 0.13.0 (April 21st, 2021) + +The 0.13 release updates the core API in order to match the HTTPX Transport API, +introduced in HTTPX 0.18 onwards. + +An example of making requests with the new interface is: + +```python +with httpcore.SyncConnectionPool() as http: + status_code, headers, stream, extensions = http.handle_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = stream.read() + print(status_code, body) +``` + +### Changed + +- The `.request()` method is now `handle_request()`. (Pull #296) +- The `.arequest()` method is now `.handle_async_request()`. (Pull #296) +- The `headers` argument is no longer optional. (Pull #296) +- The `stream` argument is no longer optional. (Pull #296) +- The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) +- The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) +- The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) +- The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) + +### Added + +- Streams now support a `.read()` interface. (Pull #296) + +### Fixed + +- Task cancellation no longer leaks connections from the connection pool. (Pull #305) + +## 0.12.3 (December 7th, 2020) + +### Fixed + +- Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) +- Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) +- Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) +- Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) +- Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) + +## 0.12.2 (November 20th, 2020) + +### Fixed + +- Properly wrap connect errors on the asyncio backend. (Pull #235) +- Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) + +## 0.12.1 (November 7th, 2020) + +### Added + +- Add connect retries. (Pull #221) + +### Fixed + +- Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) +- Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) +- Properly wrap OS errors when using `trio`. (Pull #225) + +## 0.12.0 (October 6th, 2020) + +### Changed + +- HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) + +### Added + +- Add Python 3.9 to officially supported versions. + +### Fixed + +- Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) + +## 0.11.1 (September 28nd, 2020) + +### Fixed + +- Add await to async semaphore release() coroutine (#197) +- Drop incorrect curio classifier (#192) + +## 0.11.0 (September 22nd, 2020) + +The Transport API with 0.11.0 has a couple of significant changes. + +Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features +such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. + +The interface changes from: + +```python +def request(method, url, headers, stream, timeout): + return (http_version, status_code, reason, headers, stream) +``` + +To instead including an optional dictionary of extensions on the request and response: + +```python +def request(method, url, headers, stream, ext): + return (status_code, headers, stream, ext) +``` + +Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. + +In particular: + +* Trailing headers support. +* HTTP/2 Server Push +* sendfile. +* Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. +* Exposing debug information out of the API, including template name, template context. + +Currently extensions are limited to: + +* request: `timeout` - Optional. Timeout dictionary. +* response: `http_version` - Optional. Include the HTTP version used on the response. +* response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. + +See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. + +Secondly, the async version of `request` is now namespaced as `arequest`. + +This allows concrete transports to support both sync and async implementations on the same class. + +### Added + +- Add curio support. (Pull #168) +- Add anyio support, with `backend="anyio"`. (Pull #169) + +### Changed + +- Update the Transport API to use 'ext' for optional extensions. (Pull #190) +- Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) + +## 0.10.2 (August 20th, 2020) + +### Added + +- Added Unix Domain Socket support. (Pull #139) + +### Fixed + +- Always include the port on proxy CONNECT requests. (Pull #154) +- Fix `max_keepalive_connections` configuration. (Pull #153) +- Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) + +## 0.10.1 (August 7th, 2020) + +- Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. + +## 0.10.0 (August 7th, 2020) + +The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. + +Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. + +### Added + +- HTTP/2 support becomes optional. (Pull #121, #130) +- Add `local_address=...` support. (Pull #100, #134) +- Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) +- Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) +- Add `UnsupportedProtocol` exception. (Pull #128) +- Add `.get_connection_info()` method. (Pull #102, #137) +- Add better TRACE logs. (Pull #101) + +### Changed + +- `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) + +### Fixed + +- Improve handling of server disconnects. (Pull #112) + +## 0.9.1 (May 27th, 2020) + +### Fixed + +- Proper host resolution for sync case, including IPv6 support. (Pull #97) +- Close outstanding connections when connection pool is closed. (Pull #98) + +## 0.9.0 (May 21th, 2020) + +### Changed + +- URL port becomes an `Optional[int]` instead of `int`. (Pull #92) + +### Fixed + +- Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) +- Remove incorrect debug log. (Pull #83) + +## 0.8.4 (May 11th, 2020) + +### Added + +- Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (Pull #79) + +### Fixed + +- Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) + +## 0.8.3 (May 6rd, 2020) + +### Fixed + +- Include `Host` and `Accept` headers on proxy "CONNECT" requests. +- De-duplicate any headers also contained in proxy_headers. +- HTTP/2 flag not being passed down to proxy connections. + +## 0.8.2 (May 3rd, 2020) + +### Fixed + +- Fix connections using proxy forwarding requests not being added to the +connection pool properly. (Pull #70) + +## 0.8.1 (April 30th, 2020) + +### Changed + +- Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. + +## 0.8.0 (April 30th, 2020) + +### Fixed + +- Fixed tunnel proxy support. + +### Added + +- New `TimeoutException` base class. + +## 0.7.0 (March 5th, 2020) + +- First integration with HTTPX. diff --git a/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/RECORD b/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/RECORD new file mode 100644 index 0000000..e1cb475 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/RECORD @@ -0,0 +1,68 @@ +httpcore-1.0.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpcore-1.0.5.dist-info/METADATA,sha256=YgW4guGB2OBYCvLOVSIgUoXRbjO-aWDbHXnYIUaJzTU,20969 +httpcore-1.0.5.dist-info/RECORD,, +httpcore-1.0.5.dist-info/WHEEL,sha256=uNdcs2TADwSd5pVaP0Z_kcjcvvTUklh2S7bxZMF8Uj0,87 +httpcore-1.0.5.dist-info/licenses/LICENSE.md,sha256=_ctZFUx0y6uhahEkL3dAvqnyPW_rVUeRfYxflKgDkqU,1518 +httpcore/__init__.py,sha256=fBzHStfNOukxTWXQFx6W_fv0MAcKntslGJqhHliu45E,3337 +httpcore/__pycache__/__init__.cpython-312.pyc,, +httpcore/__pycache__/_api.cpython-312.pyc,, +httpcore/__pycache__/_exceptions.cpython-312.pyc,, +httpcore/__pycache__/_models.cpython-312.pyc,, +httpcore/__pycache__/_ssl.cpython-312.pyc,, +httpcore/__pycache__/_synchronization.cpython-312.pyc,, +httpcore/__pycache__/_trace.cpython-312.pyc,, +httpcore/__pycache__/_utils.cpython-312.pyc,, +httpcore/_api.py,sha256=IBR18qZQ8ETcghJXC1Gd-30WuKYRS0EyF2eC80_OBQ8,3167 +httpcore/_async/__init__.py,sha256=EWdl2v4thnAHzJpqjU4h2a8DUiGAvNiWrkii9pfhTf0,1221 +httpcore/_async/__pycache__/__init__.cpython-312.pyc,, +httpcore/_async/__pycache__/connection.cpython-312.pyc,, +httpcore/_async/__pycache__/connection_pool.cpython-312.pyc,, +httpcore/_async/__pycache__/http11.cpython-312.pyc,, +httpcore/_async/__pycache__/http2.cpython-312.pyc,, +httpcore/_async/__pycache__/http_proxy.cpython-312.pyc,, +httpcore/_async/__pycache__/interfaces.cpython-312.pyc,, +httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc,, +httpcore/_async/connection.py,sha256=63vgzLIgX3bjq-RsjK68UWS_DWNkdvnKP0vJRK3Prfs,8484 +httpcore/_async/connection_pool.py,sha256=vjyIrwkj3QfzndhXOBrlyplJidWnhrj0foAmIpSj5tA,15609 +httpcore/_async/http11.py,sha256=yvohHUXwdJv9gN-dEJv4C5F8_NiyOtcflua1Q3BRjew,13978 +httpcore/_async/http2.py,sha256=_AgUDRcjAIlbncbOjW0I-iqYN1PDgRcFUIGfzZ2fKcI,23881 +httpcore/_async/http_proxy.py,sha256=hl4t-PahlAuCGtKNYRx4LSgjx1ZuspE9oDBaL6BOess,14851 +httpcore/_async/interfaces.py,sha256=J2iq9rs7x3nKS6iCfntjHY0Woast6V_HuXuE8rs3HmA,4486 +httpcore/_async/socks_proxy.py,sha256=T8y927RATyy4A9GMduRVUh13ZeRq8Ts8JP24bFVQ6n8,13934 +httpcore/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_backends/__pycache__/__init__.cpython-312.pyc,, +httpcore/_backends/__pycache__/anyio.cpython-312.pyc,, +httpcore/_backends/__pycache__/auto.cpython-312.pyc,, +httpcore/_backends/__pycache__/base.cpython-312.pyc,, +httpcore/_backends/__pycache__/mock.cpython-312.pyc,, +httpcore/_backends/__pycache__/sync.cpython-312.pyc,, +httpcore/_backends/__pycache__/trio.cpython-312.pyc,, +httpcore/_backends/anyio.py,sha256=3SbX3SMTwT4SYmoTWbzWazwZg0mfQ3jDR8lI_n9JKr8,5295 +httpcore/_backends/auto.py,sha256=Q_iQjNuwJseqBxeYJYtiaGzFs08_LGI3K_egYrixEqE,1683 +httpcore/_backends/base.py,sha256=Qsb8b_PSiVP1ldHHGXHxQzJ1Qlzj2r8KR9KQeANkSbE,3218 +httpcore/_backends/mock.py,sha256=S4IADhC6kE22ge_jR_WHlEUkD6QAsXnwz26DSWZLcG4,4179 +httpcore/_backends/sync.py,sha256=LAomvc-MAlot5-S9CCFxnr561aDp9yhyfs_65WeCkZ4,8086 +httpcore/_backends/trio.py,sha256=INOeHEkA8pO6AsSqjColWcayM0FQSyGi1hpaQghjrCs,6078 +httpcore/_exceptions.py,sha256=7zb3KNiG0qmfUNIdFgdaUSbn2Pu3oztghi6Vg7i-LJU,1185 +httpcore/_models.py,sha256=7DlYrkyc2z-orQrnztCUmtBY4gLMz18FjPP9e5Q-fFg,16614 +httpcore/_ssl.py,sha256=srqmSNU4iOUvWF-SrJvb8G_YEbHFELOXQOwdDIBTS9c,187 +httpcore/_sync/__init__.py,sha256=JBDIgXt5la1LCJ1sLQeKhjKFpLnpNr8Svs6z2ni3fgg,1141 +httpcore/_sync/__pycache__/__init__.cpython-312.pyc,, +httpcore/_sync/__pycache__/connection.cpython-312.pyc,, +httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc,, +httpcore/_sync/__pycache__/http11.cpython-312.pyc,, +httpcore/_sync/__pycache__/http2.cpython-312.pyc,, +httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc,, +httpcore/_sync/__pycache__/interfaces.cpython-312.pyc,, +httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc,, +httpcore/_sync/connection.py,sha256=n7YFLjYsRv4cf0CXEIqNsUqR_NPNvFQN8dGqjj0mv9U,8273 +httpcore/_sync/connection_pool.py,sha256=lglrm_FWU9J_fwCNFqgcaWCebgZw49V7KhcS0wBZ-Ok,15277 +httpcore/_sync/http11.py,sha256=9-IgEawTTbkHuOE8O3LODhp3KCJ4tAo5vmyA4UE66pU,13564 +httpcore/_sync/http2.py,sha256=_fPbMtCAVqGXKFYo3OmNNkucDuVTF69vMEbSFE2Jodo,23345 +httpcore/_sync/http_proxy.py,sha256=82oin8vjt2a7YmmVvz7sXEZSBuajK-mHDF-EwnR_pJ0,14613 +httpcore/_sync/interfaces.py,sha256=EM4PTf-rgkclzisFcrTyx1G8FwraoffE8rbckOznX_o,4365 +httpcore/_sync/socks_proxy.py,sha256=T13QSceeEAg1PM9Yh7Nk-DoqI28TIUqDS-9O3OSC9Uc,13707 +httpcore/_synchronization.py,sha256=HjyPscK40YPQ-_nTcoBXd3S6IqbHTNuw7lTaqtgA3s4,9464 +httpcore/_trace.py,sha256=akf5PsWVq3rZjqmXniomU59OY37K7JHoeNDCQ4GU84E,3954 +httpcore/_utils.py,sha256=9QPh5ib4JilWX4dBCC_XO6wdBY4b0kbUGgfV3QfBANc,1525 +httpcore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/WHEEL new file mode 100644 index 0000000..0309176 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.22.4 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/licenses/LICENSE.md b/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000..311b2b5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore-1.0.5.dist-info/licenses/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/lib/python3.12/site-packages/httpcore/__init__.py b/myenv/lib/python3.12/site-packages/httpcore/__init__.py new file mode 100644 index 0000000..014213b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/__init__.py @@ -0,0 +1,139 @@ +from ._api import request, stream +from ._async import ( + AsyncConnectionInterface, + AsyncConnectionPool, + AsyncHTTP2Connection, + AsyncHTTP11Connection, + AsyncHTTPConnection, + AsyncHTTPProxy, + AsyncSOCKSProxy, +) +from ._backends.base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) +from ._backends.mock import AsyncMockBackend, AsyncMockStream, MockBackend, MockStream +from ._backends.sync import SyncBackend +from ._exceptions import ( + ConnectError, + ConnectionNotAvailable, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._models import URL, Origin, Request, Response +from ._ssl import default_ssl_context +from ._sync import ( + ConnectionInterface, + ConnectionPool, + HTTP2Connection, + HTTP11Connection, + HTTPConnection, + HTTPProxy, + SOCKSProxy, +) + +# The 'httpcore.AnyIOBackend' class is conditional on 'anyio' being installed. +try: + from ._backends.anyio import AnyIOBackend +except ImportError: # pragma: nocover + + class AnyIOBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = ( + "Attempted to use 'httpcore.AnyIOBackend' but 'anyio' is not installed." + ) + raise RuntimeError(msg) + + +# The 'httpcore.TrioBackend' class is conditional on 'trio' being installed. +try: + from ._backends.trio import TrioBackend +except ImportError: # pragma: nocover + + class TrioBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = "Attempted to use 'httpcore.TrioBackend' but 'trio' is not installed." + raise RuntimeError(msg) + + +__all__ = [ + # top-level requests + "request", + "stream", + # models + "Origin", + "URL", + "Request", + "Response", + # async + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", + # sync + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", + # network backends, implementations + "SyncBackend", + "AnyIOBackend", + "TrioBackend", + # network backends, mock implementations + "AsyncMockBackend", + "AsyncMockStream", + "MockBackend", + "MockStream", + # network backends, interface + "AsyncNetworkStream", + "AsyncNetworkBackend", + "NetworkStream", + "NetworkBackend", + # util + "default_ssl_context", + "SOCKET_OPTION", + # exceptions + "ConnectionNotAvailable", + "ProxyError", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "UnsupportedProtocol", + "TimeoutException", + "PoolTimeout", + "ConnectTimeout", + "ReadTimeout", + "WriteTimeout", + "NetworkError", + "ConnectError", + "ReadError", + "WriteError", +] + +__version__ = "1.0.5" + + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpcore") # noqa diff --git a/myenv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6eb57ac Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc new file mode 100644 index 0000000..44720ae Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 0000000..6cd40c0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc new file mode 100644 index 0000000..4ce72ee Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc new file mode 100644 index 0000000..649fa0a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc new file mode 100644 index 0000000..a39ca40 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc new file mode 100644 index 0000000..8a86a8c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc new file mode 100644 index 0000000..a844e62 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_api.py b/myenv/lib/python3.12/site-packages/httpcore/_api.py new file mode 100644 index 0000000..854235f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_api.py @@ -0,0 +1,92 @@ +from contextlib import contextmanager +from typing import Iterator, Optional, Union + +from ._models import URL, Extensions, HeaderTypes, Response +from ._sync.connection_pool import ConnectionPool + + +def request( + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, +) -> Response: + """ + Sends an HTTP request, returning the response. + + ``` + response = httpcore.request("GET", "https://www.example.com/") + ``` + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + return pool.request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + + +@contextmanager +def stream( + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, +) -> Iterator[Response]: + """ + Sends an HTTP request, returning the response within a content manager. + + ``` + with httpcore.stream("GET", "https://www.example.com/") as response: + ... + ``` + + When using the `stream()` function, the body of the response will not be + automatically read. If you want to access the response body you should + either use `content = response.read()`, or `for chunk in response.iter_content()`. + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + with pool.stream( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) as response: + yield response diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/__init__.py b/myenv/lib/python3.12/site-packages/httpcore/_async/__init__.py new file mode 100644 index 0000000..88dc7f0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_async/__init__.py @@ -0,0 +1,39 @@ +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .http_proxy import AsyncHTTPProxy +from .interfaces import AsyncConnectionInterface + +try: + from .http2 import AsyncHTTP2Connection +except ImportError: # pragma: nocover + + class AsyncHTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import AsyncSOCKSProxy +except ImportError: # pragma: nocover + + class AsyncSOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", +] diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d696296 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc new file mode 100644 index 0000000..1898e07 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc new file mode 100644 index 0000000..3ee8649 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc new file mode 100644 index 0000000..621bf63 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc new file mode 100644 index 0000000..2ac3743 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc new file mode 100644 index 0000000..27d3801 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc new file mode 100644 index 0000000..bfc5fdd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc new file mode 100644 index 0000000..16ecaf5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/connection.py b/myenv/lib/python3.12/site-packages/httpcore/_async/connection.py new file mode 100644 index 0000000..2f439cf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_async/connection.py @@ -0,0 +1,220 @@ +import itertools +import logging +import ssl +from types import TracebackType +from typing import Iterable, Iterator, Optional, Type + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class AsyncHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connection: Optional[AsyncConnectionInterface] = None + self._connect_failed: bool = False + self._request_lock = AsyncLock() + self._socket_options = socket_options + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + async with self._request_lock: + if self._connection is None: + stream = await self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return await self._connection.handle_async_request(request) + + async def _connect(self, request: Request) -> AsyncNetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = await self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + async with Trace("retry", logger, request, kwargs) as trace: + await self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + async def aclose(self) -> None: + if self._connection is not None: + async with Trace("close", logger, None, {}): + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> "AsyncHTTPConnection": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py b/myenv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py new file mode 100644 index 0000000..018b0ba --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py @@ -0,0 +1,380 @@ +import ssl +import sys +from types import TracebackType +from typing import AsyncIterable, AsyncIterator, Iterable, List, Optional, Type + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Request, Response +from .._synchronization import AsyncEvent, AsyncShieldCancellation, AsyncThreadLock +from .connection import AsyncHTTPConnection +from .interfaces import AsyncConnectionInterface, AsyncRequestInterface + + +class AsyncPoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: Optional[AsyncConnectionInterface] = None + self._connection_acquired = AsyncEvent() + + def assign_to_connection( + self, connection: Optional[AsyncConnectionInterface] + ) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = AsyncEvent() + + async def wait_for_connection( + self, timeout: Optional[float] = None + ) -> AsyncConnectionInterface: + if self.connection is None: + await self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class AsyncConnectionPool(AsyncRequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: List[AsyncConnectionInterface] = [] + self._requests: List[AsyncPoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = AsyncThreadLock() + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + return AsyncHTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> List[AsyncConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + async def handle_async_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = AsyncPoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + await self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = await pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = await connection.handle_async_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + await self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, AsyncIterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> List[AsyncConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + avilable_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if avilable_connections: + # log: "reusing existing connection" + connection = avilable_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + async def _close_connections(self, closing: List[AsyncConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with AsyncShieldCancellation(): + for connection in closing: + await connection.aclose() + + async def aclose(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + await self._close_connections(closing_connections) + + async def __aenter__(self) -> "AsyncConnectionPool": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: AsyncIterable[bytes], + pool_request: AsyncPoolRequest, + pool: AsyncConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + async def __aiter__(self) -> AsyncIterator[bytes]: + try: + async for part in self._stream: + yield part + except BaseException as exc: + await self.aclose() + raise exc from None + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + with AsyncShieldCancellation(): + if hasattr(self._stream, "aclose"): + await self._stream.aclose() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + await self._pool._close_connections(closing) diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/http11.py b/myenv/lib/python3.12/site-packages/httpcore/_async/http11.py new file mode 100644 index 0000000..0493a92 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_async/http11.py @@ -0,0 +1,386 @@ +import enum +import logging +import ssl +import time +from types import TracebackType +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + List, + Optional, + Tuple, + Type, + Union, +) + +import h11 + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP11Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: Optional[float] = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._expire_at: Optional[float] = None + self._state = HTTPConnectionState.NEW + self._state_lock = AsyncLock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + async with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + await self._send_request_headers(**kwargs) + async with Trace("send_request_body", logger, request, kwargs) as trace: + await self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = await self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = AsyncHTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with AsyncShieldCancellation(): + async with Trace("response_closed", logger, request) as trace: + await self._response_closed() + raise exc + + # Sending the request... + + async def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + await self._send_event(event, timeout=timeout) + + async def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, AsyncIterable) + async for chunk in request.stream: + event = h11.Data(data=chunk) + await self._send_event(event, timeout=timeout) + + await self._send_event(h11.EndOfMessage(), timeout=timeout) + + async def _send_event( + self, event: h11.Event, timeout: Optional[float] = None + ) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + await self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + async def _receive_response_headers( + self, request: Request + ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + async def _receive_response_body(self, request: Request) -> AsyncIterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + async def _receive_event( + self, timeout: Optional[float] = None + ) -> Union[h11.Event, Type[h11.PAUSED]]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = await self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + async def _response_closed(self) -> None: + async with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + await self.aclose() + + # Once the connection is no longer required... + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # The AsyncConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> "AsyncHTTP11Connection": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.aclose() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + async def __aiter__(self) -> AsyncIterator[bytes]: + kwargs = {"request": self._request} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + async with Trace("response_closed", logger, self._request): + await self._connection._response_closed() + + +class AsyncHTTP11UpgradeStream(AsyncNetworkStream): + def __init__(self, stream: AsyncNetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return await self._stream.read(max_bytes, timeout) + + async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + await self._stream.write(buffer, timeout) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> AsyncNetworkStream: + return await self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> Any: + return self._stream.get_extra_info(info) diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/http2.py b/myenv/lib/python3.12/site-packages/httpcore/_async/http2.py new file mode 100644 index 0000000..c201ee4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_async/http2.py @@ -0,0 +1,589 @@ +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP2Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: typing.Optional[float] = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: typing.Optional[float] = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: typing.Optional[float] = None + self._request_count = 0 + self._init_lock = AsyncLock() + self._state_lock = AsyncLock() + self._read_lock = AsyncLock() + self._write_lock = AsyncLock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: typing.Dict[ + int, + typing.Union[ + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = ( + None + ) + + self._read_exception: typing.Optional[Exception] = None + self._write_exception: typing.Optional[Exception] = None + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + async with self._init_lock: + if not self._sent_connection_init: + try: + kwargs = {"request": request} + async with Trace("send_connection_init", logger, request, kwargs): + await self._send_connection_init(**kwargs) + except BaseException as exc: + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + await self._max_streams_semaphore.acquire() + + await self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + async with Trace("send_request_headers", logger, request, kwargs): + await self._send_request_headers(request=request, stream_id=stream_id) + async with Trace("send_request_body", logger, request, kwargs): + await self._send_request_body(request=request, stream_id=stream_id) + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = await self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with AsyncShieldCancellation(): + kwargs = {"stream_id": stream_id} + async with Trace("response_closed", logger, request, kwargs): + await self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + async def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + await self._write_outgoing_data(request) + + # Sending the request... + + async def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + await self._write_outgoing_data(request) + + async def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.AsyncIterable) + async for data in request.stream: + await self._send_stream_data(request, stream_id, data) + await self._send_end_stream(request, stream_id) + + async def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = await self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + await self._write_outgoing_data(request) + + async def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + await self._write_outgoing_data(request) + + # Receiving the response... + + async def _receive_response( + self, request: Request, stream_id: int + ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + async def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.AsyncIterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + await self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + async def _receive_stream_event( + self, request: Request, stream_id: int + ) -> typing.Union[ + h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded + ]: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + await self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + async def _receive_events( + self, request: Request, stream_id: typing.Optional[int] = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + async with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = await self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + async with Trace( + "receive_remote_settings", logger, request + ) as trace: + await self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + await self._write_outgoing_data(request) + + async def _receive_remote_settings_change(self, event: h2.events.Event) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + await self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + await self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + async def _response_closed(self, stream_id: int) -> None: + await self._max_streams_semaphore.release() + del self._events[stream_id] + async with self._state_lock: + if self._connection_terminated and not self._events: + await self.aclose() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + await self.aclose() + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # Wrappers around network read/write operations... + + async def _read_incoming_data( + self, request: Request + ) -> typing.List[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + async def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + async with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + await self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + await self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> "AsyncHTTP2Connection": + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[types.TracebackType] = None, + ) -> None: + await self.aclose() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: AsyncHTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + async with Trace("response_closed", logger, self._request, kwargs): + await self._connection._response_closed(stream_id=self._stream_id) diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py b/myenv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py new file mode 100644 index 0000000..4aa7d87 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py @@ -0,0 +1,368 @@ +import logging +import ssl +from base64 import b64encode +from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union + +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, +) -> List[Tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +def build_auth_header(username: bytes, password: bytes) -> bytes: + userpass = username + b":" + password + return b"Basic " + b64encode(userpass) + + +class AsyncHTTPProxy(AsyncConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: Union[URL, bytes, str], + proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + authorization = build_auth_header(username, password) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if origin.scheme == b"http": + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncForwardHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + keepalive_expiry: Optional[float] = None, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + ) -> None: + self._connection = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + async def handle_async_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return await self._connection.handle_async_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class AsyncTunnelHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: Optional[AsyncNetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._connection: AsyncConnectionInterface = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = AsyncLock() + self._connected = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = await self._connection.handle_async_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + await self._connection.aclose() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/interfaces.py b/myenv/lib/python3.12/site-packages/httpcore/_async/interfaces.py new file mode 100644 index 0000000..c998dd2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_async/interfaces.py @@ -0,0 +1,135 @@ +from contextlib import asynccontextmanager +from typing import AsyncIterator, Optional, Union + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class AsyncRequestInterface: + async def request( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, AsyncIterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + await response.aread() + finally: + await response.aclose() + return response + + @asynccontextmanager + async def stream( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, AsyncIterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> AsyncIterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + yield response + finally: + await response.aclose() + + async def handle_async_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class AsyncConnectionInterface(AsyncRequestInterface): + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/myenv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py b/myenv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py new file mode 100644 index 0000000..f839603 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py @@ -0,0 +1,342 @@ +import logging +import ssl +import typing + +from socksio import socks5 + +from .._backends.auto import AutoBackend +from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +async def _init_socks5_connection( + stream: AsyncNetworkStream, + *, + host: bytes, + port: int, + auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, +) -> None: + conn = socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socks5.SOCKS5CommandRequest.from_address( + socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5Reply) + if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class AsyncSOCKSProxy(AsyncConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: typing.Union[URL, bytes, str], + proxy_auth: typing.Optional[ + typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] + ] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + max_connections: typing.Optional[int] = 10, + max_keepalive_connections: typing.Optional[int] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: typing.Optional[AsyncNetworkBackend] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + return AsyncSocks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncSocks5Connection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: typing.Optional[AsyncNetworkBackend] = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connect_lock = AsyncLock() + self._connection: typing.Optional[AsyncConnectionInterface] = None + self._connect_failed = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + async with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + await _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + if self._connection is not None: + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/__init__.py b/myenv/lib/python3.12/site-packages/httpcore/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..941f3d2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc new file mode 100644 index 0000000..183eefc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc new file mode 100644 index 0000000..5bf111b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..1ce2236 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc new file mode 100644 index 0000000..70e4d2f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc new file mode 100644 index 0000000..4bd71e5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc new file mode 100644 index 0000000..bfbf219 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/anyio.py b/myenv/lib/python3.12/site-packages/httpcore/_backends/anyio.py new file mode 100644 index 0000000..5731f5e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_backends/anyio.py @@ -0,0 +1,147 @@ +import ssl +import typing + +import anyio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AnyIOStream(AsyncNetworkStream): + def __init__(self, stream: anyio.abc.ByteStream) -> None: + self._stream = stream + + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + exc_map = { + TimeoutError: ReadTimeout, + anyio.BrokenResourceError: ReadError, + anyio.ClosedResourceError: ReadError, + anyio.EndOfStream: ReadError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + try: + return await self._stream.receive(max_bytes=max_bytes) + except anyio.EndOfStream: # pragma: nocover + return b"" + + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + if not buffer: + return + + exc_map = { + TimeoutError: WriteTimeout, + anyio.BrokenResourceError: WriteError, + anyio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + await self._stream.send(item=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> AsyncNetworkStream: + exc_map = { + TimeoutError: ConnectTimeout, + anyio.BrokenResourceError: ConnectError, + anyio.EndOfStream: ConnectError, + } + with map_exceptions(exc_map): + try: + with anyio.fail_after(timeout): + ssl_stream = await anyio.streams.tls.TLSStream.wrap( + self._stream, + ssl_context=ssl_context, + hostname=server_hostname, + standard_compatible=False, + server_side=False, + ) + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return AnyIOStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) + if info == "client_addr": + return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) + if info == "server_addr": + return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) + if info == "socket": + return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + if info == "is_readable": + sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + return is_socket_readable(sock) + return None + + +class AnyIOBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + if socket_options is None: + socket_options = [] # pragma: no cover + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_tcp( + remote_host=host, + remote_port=port, + local_host=local_address, + ) + # By default TCP sockets opened in `asyncio` include TCP_NODELAY. + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_unix(path) + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) # pragma: nocover diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/auto.py b/myenv/lib/python3.12/site-packages/httpcore/_backends/auto.py new file mode 100644 index 0000000..3ac05f4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_backends/auto.py @@ -0,0 +1,51 @@ +import typing +from typing import Optional + +from .._synchronization import current_async_library +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AutoBackend(AsyncNetworkBackend): + async def _init_backend(self) -> None: + if not (hasattr(self, "_backend")): + backend = current_async_library() + if backend == "trio": + from .trio import TrioBackend + + self._backend: AsyncNetworkBackend = TrioBackend() + else: + from .anyio import AnyIOBackend + + self._backend = AnyIOBackend() + + async def connect_tcp( + self, + host: str, + port: int, + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + await self._init_backend() + return await self._backend.connect_tcp( + host, + port, + timeout=timeout, + local_address=local_address, + socket_options=socket_options, + ) + + async def connect_unix_socket( + self, + path: str, + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + await self._init_backend() + return await self._backend.connect_unix_socket( + path, timeout=timeout, socket_options=socket_options + ) + + async def sleep(self, seconds: float) -> None: # pragma: nocover + await self._init_backend() + return await self._backend.sleep(seconds) diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/base.py b/myenv/lib/python3.12/site-packages/httpcore/_backends/base.py new file mode 100644 index 0000000..6cadedb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_backends/base.py @@ -0,0 +1,103 @@ +import ssl +import time +import typing + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + + +class NetworkStream: + def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + raise NotImplementedError() # pragma: nocover + + def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + raise NotImplementedError() # pragma: nocover + + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> "NetworkStream": + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class NetworkBackend: + def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def sleep(self, seconds: float) -> None: + time.sleep(seconds) # pragma: nocover + + +class AsyncNetworkStream: + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + raise NotImplementedError() # pragma: nocover + + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + raise NotImplementedError() # pragma: nocover + + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> "AsyncNetworkStream": + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class AsyncNetworkBackend: + async def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def sleep(self, seconds: float) -> None: + raise NotImplementedError() # pragma: nocover diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/mock.py b/myenv/lib/python3.12/site-packages/httpcore/_backends/mock.py new file mode 100644 index 0000000..f7aefeb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_backends/mock.py @@ -0,0 +1,142 @@ +import ssl +import typing +from typing import Optional + +from .._exceptions import ReadError +from .base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) + + +class MockSSLObject: + def __init__(self, http2: bool): + self._http2 = http2 + + def selected_alpn_protocol(self) -> str: + return "h2" if self._http2 else "http/1.1" + + +class MockStream(NetworkStream): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + pass + + def close(self) -> None: + self._closed = True + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> NetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class MockBackend(NetworkBackend): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + def connect_tcp( + self, + host: str, + port: int, + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def connect_unix_socket( + self, + path: str, + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def sleep(self, seconds: float) -> None: + pass + + +class AsyncMockStream(AsyncNetworkStream): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + pass + + async def aclose(self) -> None: + self._closed = True + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> AsyncNetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class AsyncMockBackend(AsyncNetworkBackend): + def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + async def connect_tcp( + self, + host: str, + port: int, + timeout: Optional[float] = None, + local_address: Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def connect_unix_socket( + self, + path: str, + timeout: Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def sleep(self, seconds: float) -> None: + pass diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/sync.py b/myenv/lib/python3.12/site-packages/httpcore/_backends/sync.py new file mode 100644 index 0000000..7b7b417 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_backends/sync.py @@ -0,0 +1,239 @@ +import socket +import ssl +import sys +import typing +from functools import partial + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, NetworkBackend, NetworkStream + + +class TLSinTLSStream(NetworkStream): # pragma: no cover + """ + Because the standard `SSLContext.wrap_socket` method does + not work for `SSLSocket` objects, we need this class + to implement TLS stream using an underlying `SSLObject` + instance in order to support TLS on top of TLS. + """ + + # Defined in RFC 8449 + TLS_RECORD_SIZE = 16384 + + def __init__( + self, + sock: socket.socket, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ): + self._sock = sock + self._incoming = ssl.MemoryBIO() + self._outgoing = ssl.MemoryBIO() + + self.ssl_obj = ssl_context.wrap_bio( + incoming=self._incoming, + outgoing=self._outgoing, + server_hostname=server_hostname, + ) + + self._sock.settimeout(timeout) + self._perform_io(self.ssl_obj.do_handshake) + + def _perform_io( + self, + func: typing.Callable[..., typing.Any], + ) -> typing.Any: + ret = None + + while True: + errno = None + try: + ret = func() + except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e: + errno = e.errno + + self._sock.sendall(self._outgoing.read()) + + if errno == ssl.SSL_ERROR_WANT_READ: + buf = self._sock.recv(self.TLS_RECORD_SIZE) + + if buf: + self._incoming.write(buf) + else: + self._incoming.write_eof() + if errno is None: + return ret + + def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return typing.cast( + bytes, self._perform_io(partial(self.ssl_obj.read, max_bytes)) + ) + + def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + while buffer: + nsent = self._perform_io(partial(self.ssl_obj.write, buffer)) + buffer = buffer[nsent:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> "NetworkStream": + raise NotImplementedError() + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self.ssl_obj + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncStream(NetworkStream): + def __init__(self, sock: socket.socket) -> None: + self._sock = sock + + def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return self._sock.recv(max_bytes) + + def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + if not buffer: + return + + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + while buffer: + self._sock.settimeout(timeout) + n = self._sock.send(buffer) + buffer = buffer[n:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> NetworkStream: + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + try: + if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover + # If the underlying socket has already been upgraded + # to the TLS layer (i.e. is an instance of SSLSocket), + # we need some additional smarts to support TLS-in-TLS. + return TLSinTLSStream( + self._sock, ssl_context, server_hostname, timeout + ) + else: + self._sock.settimeout(timeout) + sock = ssl_context.wrap_socket( + self._sock, server_hostname=server_hostname + ) + except Exception as exc: # pragma: nocover + self.close() + raise exc + return SyncStream(sock) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): + return self._sock._sslobj # type: ignore + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncBackend(NetworkBackend): + def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: + # Note that we automatically include `TCP_NODELAY` + # in addition to any other custom socket options. + if socket_options is None: + socket_options = [] # pragma: no cover + address = (host, port) + source_address = None if local_address is None else (local_address, 0) + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + + with map_exceptions(exc_map): + sock = socket.create_connection( + address, + timeout, + source_address=source_address, + ) + for option in socket_options: + sock.setsockopt(*option) # pragma: no cover + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SyncStream(sock) + + def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> NetworkStream: # pragma: nocover + if sys.platform == "win32": + raise RuntimeError( + "Attempted to connect to a UNIX socket on a Windows system." + ) + if socket_options is None: + socket_options = [] + + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + for option in socket_options: + sock.setsockopt(*option) + sock.settimeout(timeout) + sock.connect(path) + return SyncStream(sock) diff --git a/myenv/lib/python3.12/site-packages/httpcore/_backends/trio.py b/myenv/lib/python3.12/site-packages/httpcore/_backends/trio.py new file mode 100644 index 0000000..b1626d2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_backends/trio.py @@ -0,0 +1,161 @@ +import ssl +import typing + +import trio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class TrioStream(AsyncNetworkStream): + def __init__(self, stream: trio.abc.Stream) -> None: + self._stream = stream + + async def read( + self, max_bytes: int, timeout: typing.Optional[float] = None + ) -> bytes: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ReadTimeout, + trio.BrokenResourceError: ReadError, + trio.ClosedResourceError: ReadError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + data: bytes = await self._stream.receive_some(max_bytes=max_bytes) + return data + + async def write( + self, buffer: bytes, timeout: typing.Optional[float] = None + ) -> None: + if not buffer: + return + + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: WriteTimeout, + trio.BrokenResourceError: WriteError, + trio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + await self._stream.send_all(data=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: typing.Optional[str] = None, + timeout: typing.Optional[float] = None, + ) -> AsyncNetworkStream: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + ssl_stream = trio.SSLStream( + self._stream, + ssl_context=ssl_context, + server_hostname=server_hostname, + https_compatible=True, + server_side=False, + ) + with map_exceptions(exc_map): + try: + with trio.fail_after(timeout_or_inf): + await ssl_stream.do_handshake() + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return TrioStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): + # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. + # Tracked at https://github.com/python-trio/trio/issues/542 + return self._stream._ssl_object # type: ignore[attr-defined] + if info == "client_addr": + return self._get_socket_stream().socket.getsockname() + if info == "server_addr": + return self._get_socket_stream().socket.getpeername() + if info == "socket": + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream.socket + if info == "is_readable": + socket = self.get_extra_info("socket") + return socket.is_readable() + return None + + def _get_socket_stream(self) -> trio.SocketStream: + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream + + +class TrioBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: typing.Optional[float] = None, + local_address: typing.Optional[str] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: + # By default for TCP sockets, trio enables TCP_NODELAY. + # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream + if socket_options is None: + socket_options = [] # pragma: no cover + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_tcp_stream( + host=host, port=port, local_address=local_address + ) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: typing.Optional[float] = None, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_unix_socket(path) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def sleep(self, seconds: float) -> None: + await trio.sleep(seconds) # pragma: nocover diff --git a/myenv/lib/python3.12/site-packages/httpcore/_exceptions.py b/myenv/lib/python3.12/site-packages/httpcore/_exceptions.py new file mode 100644 index 0000000..81e7fc6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_exceptions.py @@ -0,0 +1,81 @@ +import contextlib +from typing import Iterator, Mapping, Type + +ExceptionMapping = Mapping[Type[Exception], Type[Exception]] + + +@contextlib.contextmanager +def map_exceptions(map: ExceptionMapping) -> Iterator[None]: + try: + yield + except Exception as exc: # noqa: PIE786 + for from_exc, to_exc in map.items(): + if isinstance(exc, from_exc): + raise to_exc(exc) from exc + raise # pragma: nocover + + +class ConnectionNotAvailable(Exception): + pass + + +class ProxyError(Exception): + pass + + +class UnsupportedProtocol(Exception): + pass + + +class ProtocolError(Exception): + pass + + +class RemoteProtocolError(ProtocolError): + pass + + +class LocalProtocolError(ProtocolError): + pass + + +# Timeout errors + + +class TimeoutException(Exception): + pass + + +class PoolTimeout(TimeoutException): + pass + + +class ConnectTimeout(TimeoutException): + pass + + +class ReadTimeout(TimeoutException): + pass + + +class WriteTimeout(TimeoutException): + pass + + +# Network errors + + +class NetworkError(Exception): + pass + + +class ConnectError(NetworkError): + pass + + +class ReadError(NetworkError): + pass + + +class WriteError(NetworkError): + pass diff --git a/myenv/lib/python3.12/site-packages/httpcore/_models.py b/myenv/lib/python3.12/site-packages/httpcore/_models.py new file mode 100644 index 0000000..dadee79 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_models.py @@ -0,0 +1,492 @@ +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + Optional, + Sequence, + Tuple, + Union, +) +from urllib.parse import urlparse + +# Functions for typechecking... + + +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] +HeaderTypes = Union[HeadersAsSequence, HeadersAsMapping, None] + +Extensions = MutableMapping[str, Any] + + +def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes: + """ + Any arguments that are ultimately represented as bytes can be specified + either as bytes or as strings. + + However we enforce that any string arguments must only contain characters in + the plain ASCII range. chr(0)...chr(127). If you need to use characters + outside that range then be precise, and use a byte-wise argument. + """ + if isinstance(value, str): + try: + return value.encode("ascii") + except UnicodeEncodeError: + raise TypeError(f"{name} strings may not include unicode characters.") + elif isinstance(value, bytes): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") + + +def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL": + """ + Type check for URL parameters. + """ + if isinstance(value, (bytes, str)): + return URL(value) + elif isinstance(value, URL): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.") + + +def enforce_headers( + value: Union[HeadersAsMapping, HeadersAsSequence, None] = None, *, name: str +) -> List[Tuple[bytes, bytes]]: + """ + Convienence function that ensure all items in request or response headers + are either bytes or strings in the plain ASCII range. + """ + if value is None: + return [] + elif isinstance(value, Mapping): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value.items() + ] + elif isinstance(value, Sequence): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value + ] + + seen_type = type(value).__name__ + raise TypeError( + f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}." + ) + + +def enforce_stream( + value: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None], *, name: str +) -> Union[Iterable[bytes], AsyncIterable[bytes]]: + if value is None: + return ByteStream(b"") + elif isinstance(value, bytes): + return ByteStream(value) + return value + + +# * https://tools.ietf.org/html/rfc3986#section-3.2.3 +# * https://url.spec.whatwg.org/#url-miscellaneous +# * https://url.spec.whatwg.org/#scheme-state +DEFAULT_PORTS = { + b"ftp": 21, + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, +} + + +def include_request_headers( + headers: List[Tuple[bytes, bytes]], + *, + url: "URL", + content: Union[None, bytes, Iterable[bytes], AsyncIterable[bytes]], +) -> List[Tuple[bytes, bytes]]: + headers_set = set(k.lower() for k, v in headers) + + if b"host" not in headers_set: + default_port = DEFAULT_PORTS.get(url.scheme) + if url.port is None or url.port == default_port: + header_value = url.host + else: + header_value = b"%b:%d" % (url.host, url.port) + headers = [(b"Host", header_value)] + headers + + if ( + content is not None + and b"content-length" not in headers_set + and b"transfer-encoding" not in headers_set + ): + if isinstance(content, bytes): + content_length = str(len(content)).encode("ascii") + headers += [(b"Content-Length", content_length)] + else: + headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover + + return headers + + +# Interfaces for byte streams... + + +class ByteStream: + """ + A container for non-streaming content, and that supports both sync and async + stream iteration. + """ + + def __init__(self, content: bytes) -> None: + self._content = content + + def __iter__(self) -> Iterator[bytes]: + yield self._content + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{len(self._content)} bytes]>" + + +class Origin: + def __init__(self, scheme: bytes, host: bytes, port: int) -> None: + self.scheme = scheme + self.host = host + self.port = port + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, Origin) + and self.scheme == other.scheme + and self.host == other.host + and self.port == other.port + ) + + def __str__(self) -> str: + scheme = self.scheme.decode("ascii") + host = self.host.decode("ascii") + port = str(self.port) + return f"{scheme}://{host}:{port}" + + +class URL: + """ + Represents the URL against which an HTTP request may be made. + + The URL may either be specified as a plain string, for convienence: + + ```python + url = httpcore.URL("https://www.example.com/") + ``` + + Or be constructed with explicitily pre-parsed components: + + ```python + url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/') + ``` + + Using this second more explicit style allows integrations that are using + `httpcore` to pass through URLs that have already been parsed in order to use + libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures + that URL parsing is treated identically at both the networking level and at any + higher layers of abstraction. + + The four components are important here, as they allow the URL to be precisely + specified in a pre-parsed format. They also allow certain types of request to + be created that could not otherwise be expressed. + + For example, an HTTP request to `http://www.example.com/` forwarded via a proxy + at `http://localhost:8080`... + + ```python + # Constructs an HTTP request with a complete URL as the target: + # GET https://www.example.com/ HTTP/1.1 + url = httpcore.URL( + scheme=b'http', + host=b'localhost', + port=8080, + target=b'https://www.example.com/' + ) + request = httpcore.Request( + method="GET", + url=url + ) + ``` + + Another example is constructing an `OPTIONS *` request... + + ```python + # Constructs an 'OPTIONS *' HTTP request: + # OPTIONS * HTTP/1.1 + url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*') + request = httpcore.Request(method="OPTIONS", url=url) + ``` + + This kind of request is not possible to formulate with a URL string, + because the `/` delimiter is always used to demark the target from the + host/port portion of the URL. + + For convenience, string-like arguments may be specified either as strings or + as bytes. However, once a request is being issue over-the-wire, the URL + components are always ultimately required to be a bytewise representation. + + In order to avoid any ambiguity over character encodings, when strings are used + as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`. + If you require a bytewise representation that is outside this range you must + handle the character encoding directly, and pass a bytes instance. + """ + + def __init__( + self, + url: Union[bytes, str] = "", + *, + scheme: Union[bytes, str] = b"", + host: Union[bytes, str] = b"", + port: Optional[int] = None, + target: Union[bytes, str] = b"", + ) -> None: + """ + Parameters: + url: The complete URL as a string or bytes. + scheme: The URL scheme as a string or bytes. + Typically either `"http"` or `"https"`. + host: The URL host as a string or bytes. Such as `"www.example.com"`. + port: The port to connect to. Either an integer or `None`. + target: The target of the HTTP request. Such as `"/items?search=red"`. + """ + if url: + parsed = urlparse(enforce_bytes(url, name="url")) + self.scheme = parsed.scheme + self.host = parsed.hostname or b"" + self.port = parsed.port + self.target = (parsed.path or b"/") + ( + b"?" + parsed.query if parsed.query else b"" + ) + else: + self.scheme = enforce_bytes(scheme, name="scheme") + self.host = enforce_bytes(host, name="host") + self.port = port + self.target = enforce_bytes(target, name="target") + + @property + def origin(self) -> Origin: + default_port = { + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, + b"socks5": 1080, + }[self.scheme] + return Origin( + scheme=self.scheme, host=self.host, port=self.port or default_port + ) + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, URL) + and other.scheme == self.scheme + and other.host == self.host + and other.port == self.port + and other.target == self.target + ) + + def __bytes__(self) -> bytes: + if self.port is None: + return b"%b://%b%b" % (self.scheme, self.host, self.target) + return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target) + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}(scheme={self.scheme!r}, " + f"host={self.host!r}, port={self.port!r}, target={self.target!r})" + ) + + +class Request: + """ + An HTTP request. + """ + + def __init__( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> None: + """ + Parameters: + method: The HTTP request method, either as a string or bytes. + For example: `GET`. + url: The request URL, either as a `URL` instance, or as a string or bytes. + For example: `"https://www.example.com".` + headers: The HTTP request headers. + content: The content of the request body. + extensions: A dictionary of optional extra information included on + the request. Possible keys include `"timeout"`, and `"trace"`. + """ + self.method: bytes = enforce_bytes(method, name="method") + self.url: URL = enforce_url(url, name="url") + self.headers: List[Tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( + content, name="content" + ) + self.extensions = {} if extensions is None else extensions + + if "target" in self.extensions: + self.url = URL( + scheme=self.url.scheme, + host=self.url.host, + port=self.url.port, + target=self.extensions["target"], + ) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.method!r}]>" + + +class Response: + """ + An HTTP response. + """ + + def __init__( + self, + status: int, + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> None: + """ + Parameters: + status: The HTTP status code of the response. For example `200`. + headers: The HTTP response headers. + content: The content of the response body. + extensions: A dictionary of optional extra information included on + the responseself.Possible keys include `"http_version"`, + `"reason_phrase"`, and `"network_stream"`. + """ + self.status: int = status + self.headers: List[Tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( + content, name="content" + ) + self.extensions = {} if extensions is None else extensions + + self._stream_consumed = False + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + if isinstance(self.stream, Iterable): + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'response.read()' first." + ) + else: + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'await response.aread()' first." + ) + return self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.status}]>" + + # Sync interface... + + def read(self) -> bytes: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an asynchronous response using 'response.read()'. " + "You should use 'await response.aread()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part for part in self.iter_stream()]) + return self._content + + def iter_stream(self) -> Iterator[bytes]: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an asynchronous response using 'for ... in " + "response.iter_stream()'. " + "You should use 'async for ... in response.aiter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'for ... in response.iter_stream()' more than once." + ) + self._stream_consumed = True + for chunk in self.stream: + yield chunk + + def close(self) -> None: + if not isinstance(self.stream, Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to close an asynchronous response using 'response.close()'. " + "You should use 'await response.aclose()' instead." + ) + if hasattr(self.stream, "close"): + self.stream.close() + + # Async interface... + + async def aread(self) -> bytes: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an synchronous response using " + "'await response.aread()'. " + "You should use 'response.read()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_stream()]) + return self._content + + async def aiter_stream(self) -> AsyncIterator[bytes]: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an synchronous response using 'async for ... in " + "response.aiter_stream()'. " + "You should use 'for ... in response.iter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'async for ... in response.aiter_stream()' " + "more than once." + ) + self._stream_consumed = True + async for chunk in self.stream: + yield chunk + + async def aclose(self) -> None: + if not isinstance(self.stream, AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to close a synchronous response using " + "'await response.aclose()'. " + "You should use 'response.close()' instead." + ) + if hasattr(self.stream, "aclose"): + await self.stream.aclose() diff --git a/myenv/lib/python3.12/site-packages/httpcore/_ssl.py b/myenv/lib/python3.12/site-packages/httpcore/_ssl.py new file mode 100644 index 0000000..c99c5a6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_ssl.py @@ -0,0 +1,9 @@ +import ssl + +import certifi + + +def default_ssl_context() -> ssl.SSLContext: + context = ssl.create_default_context() + context.load_verify_locations(certifi.where()) + return context diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/__init__.py b/myenv/lib/python3.12/site-packages/httpcore/_sync/__init__.py new file mode 100644 index 0000000..b476d76 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_sync/__init__.py @@ -0,0 +1,39 @@ +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .http_proxy import HTTPProxy +from .interfaces import ConnectionInterface + +try: + from .http2 import HTTP2Connection +except ImportError: # pragma: nocover + + class HTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import SOCKSProxy +except ImportError: # pragma: nocover + + class SOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", +] diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..867a0ac Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc new file mode 100644 index 0000000..4fa7e67 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc new file mode 100644 index 0000000..501722f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc new file mode 100644 index 0000000..b9524ae Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc new file mode 100644 index 0000000..e580a74 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc new file mode 100644 index 0000000..0dbda88 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc new file mode 100644 index 0000000..b5c8703 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc new file mode 100644 index 0000000..cf55ffd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/connection.py b/myenv/lib/python3.12/site-packages/httpcore/_sync/connection.py new file mode 100644 index 0000000..c3890f3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_sync/connection.py @@ -0,0 +1,220 @@ +import itertools +import logging +import ssl +from types import TracebackType +from typing import Iterable, Iterator, Optional, Type + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class HTTPConnection(ConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connection: Optional[ConnectionInterface] = None + self._connect_failed: bool = False + self._request_lock = Lock() + self._socket_options = socket_options + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + with self._request_lock: + if self._connection is None: + stream = self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return self._connection.handle_request(request) + + def _connect(self, request: Request) -> NetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + with Trace("retry", logger, request, kwargs) as trace: + self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def close(self) -> None: + if self._connection is not None: + with Trace("close", logger, None, {}): + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> "HTTPConnection": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py b/myenv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py new file mode 100644 index 0000000..8dcf348 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py @@ -0,0 +1,380 @@ +import ssl +import sys +from types import TracebackType +from typing import Iterable, Iterator, Iterable, List, Optional, Type + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Request, Response +from .._synchronization import Event, ShieldCancellation, ThreadLock +from .connection import HTTPConnection +from .interfaces import ConnectionInterface, RequestInterface + + +class PoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: Optional[ConnectionInterface] = None + self._connection_acquired = Event() + + def assign_to_connection( + self, connection: Optional[ConnectionInterface] + ) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = Event() + + def wait_for_connection( + self, timeout: Optional[float] = None + ) -> ConnectionInterface: + if self.connection is None: + self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class ConnectionPool(RequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: List[ConnectionInterface] = [] + self._requests: List[PoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = ThreadLock() + + def create_connection(self, origin: Origin) -> ConnectionInterface: + return HTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> List[ConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + def handle_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = PoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = connection.handle_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, Iterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> List[ConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + avilable_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if avilable_connections: + # log: "reusing existing connection" + connection = avilable_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + def _close_connections(self, closing: List[ConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with ShieldCancellation(): + for connection in closing: + connection.close() + + def close(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + self._close_connections(closing_connections) + + def __enter__(self) -> "ConnectionPool": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: Iterable[bytes], + pool_request: PoolRequest, + pool: ConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + def __iter__(self) -> Iterator[bytes]: + try: + for part in self._stream: + yield part + except BaseException as exc: + self.close() + raise exc from None + + def close(self) -> None: + if not self._closed: + self._closed = True + with ShieldCancellation(): + if hasattr(self._stream, "close"): + self._stream.close() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + self._pool._close_connections(closing) diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/http11.py b/myenv/lib/python3.12/site-packages/httpcore/_sync/http11.py new file mode 100644 index 0000000..a74ff8e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_sync/http11.py @@ -0,0 +1,386 @@ +import enum +import logging +import ssl +import time +from types import TracebackType +from typing import ( + Any, + Iterable, + Iterator, + List, + Optional, + Tuple, + Type, + Union, +) + +import h11 + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP11Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: Optional[float] = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._expire_at: Optional[float] = None + self._state = HTTPConnectionState.NEW + self._state_lock = Lock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + self._send_request_headers(**kwargs) + with Trace("send_request_body", logger, request, kwargs) as trace: + self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = HTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with ShieldCancellation(): + with Trace("response_closed", logger, request) as trace: + self._response_closed() + raise exc + + # Sending the request... + + def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + self._send_event(event, timeout=timeout) + + def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, Iterable) + for chunk in request.stream: + event = h11.Data(data=chunk) + self._send_event(event, timeout=timeout) + + self._send_event(h11.EndOfMessage(), timeout=timeout) + + def _send_event( + self, event: h11.Event, timeout: Optional[float] = None + ) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + def _receive_response_headers( + self, request: Request + ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + def _receive_response_body(self, request: Request) -> Iterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + def _receive_event( + self, timeout: Optional[float] = None + ) -> Union[h11.Event, Type[h11.PAUSED]]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + def _response_closed(self) -> None: + with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + self.close() + + # Once the connection is no longer required... + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # The ConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> "HTTP11Connection": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: HTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + def __iter__(self) -> Iterator[bytes]: + kwargs = {"request": self._request} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + with Trace("response_closed", logger, self._request): + self._connection._response_closed() + + +class HTTP11UpgradeStream(NetworkStream): + def __init__(self, stream: NetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return self._stream.read(max_bytes, timeout) + + def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + self._stream.write(buffer, timeout) + + def close(self) -> None: + self._stream.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: Optional[str] = None, + timeout: Optional[float] = None, + ) -> NetworkStream: + return self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> Any: + return self._stream.get_extra_info(info) diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/http2.py b/myenv/lib/python3.12/site-packages/httpcore/_sync/http2.py new file mode 100644 index 0000000..1ee4bbb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_sync/http2.py @@ -0,0 +1,589 @@ +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, Semaphore, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP2Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: typing.Optional[float] = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: typing.Optional[float] = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: typing.Optional[float] = None + self._request_count = 0 + self._init_lock = Lock() + self._state_lock = Lock() + self._read_lock = Lock() + self._write_lock = Lock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: typing.Dict[ + int, + typing.Union[ + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = ( + None + ) + + self._read_exception: typing.Optional[Exception] = None + self._write_exception: typing.Optional[Exception] = None + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + with self._init_lock: + if not self._sent_connection_init: + try: + kwargs = {"request": request} + with Trace("send_connection_init", logger, request, kwargs): + self._send_connection_init(**kwargs) + except BaseException as exc: + with ShieldCancellation(): + self.close() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = Semaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + self._max_streams_semaphore.acquire() + + self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + with Trace("send_request_headers", logger, request, kwargs): + self._send_request_headers(request=request, stream_id=stream_id) + with Trace("send_request_body", logger, request, kwargs): + self._send_request_body(request=request, stream_id=stream_id) + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with ShieldCancellation(): + kwargs = {"stream_id": stream_id} + with Trace("response_closed", logger, request, kwargs): + self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + self._write_outgoing_data(request) + + # Sending the request... + + def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + self._write_outgoing_data(request) + + def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.Iterable) + for data in request.stream: + self._send_stream_data(request, stream_id, data) + self._send_end_stream(request, stream_id) + + def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + self._write_outgoing_data(request) + + def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + self._write_outgoing_data(request) + + # Receiving the response... + + def _receive_response( + self, request: Request, stream_id: int + ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.Iterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + def _receive_stream_event( + self, request: Request, stream_id: int + ) -> typing.Union[ + h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded + ]: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + def _receive_events( + self, request: Request, stream_id: typing.Optional[int] = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + with Trace( + "receive_remote_settings", logger, request + ) as trace: + self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + self._write_outgoing_data(request) + + def _receive_remote_settings_change(self, event: h2.events.Event) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + def _response_closed(self, stream_id: int) -> None: + self._max_streams_semaphore.release() + del self._events[stream_id] + with self._state_lock: + if self._connection_terminated and not self._events: + self.close() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + self.close() + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # Wrappers around network read/write operations... + + def _read_incoming_data( + self, request: Request + ) -> typing.List[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> "HTTP2Connection": + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[types.TracebackType] = None, + ) -> None: + self.close() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: HTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + with Trace("response_closed", logger, self._request, kwargs): + self._connection._response_closed(stream_id=self._stream_id) diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py b/myenv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py new file mode 100644 index 0000000..6acac9a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py @@ -0,0 +1,368 @@ +import logging +import ssl +from base64 import b64encode +from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union + +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] +HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, +) -> List[Tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +def build_auth_header(username: bytes, password: bytes) -> bytes: + userpass = username + b":" + password + return b"Basic " + b64encode(userpass) + + +class HTTPProxy(ConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: Union[URL, bytes, str], + proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + max_connections: Optional[int] = 10, + max_keepalive_connections: Optional[int] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: Optional[str] = None, + uds: Optional[str] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + authorization = build_auth_header(username, password) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if origin.scheme == b"http": + return ForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return TunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class ForwardHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, + keepalive_expiry: Optional[float] = None, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + ) -> None: + self._connection = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + def handle_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return self._connection.handle_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class TunnelHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: Optional[ssl.SSLContext] = None, + proxy_ssl_context: Optional[ssl.SSLContext] = None, + proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, + keepalive_expiry: Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: Optional[NetworkBackend] = None, + socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + ) -> None: + self._connection: ConnectionInterface = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = Lock() + self._connected = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = self._connection.handle_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + self._connection.close() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py b/myenv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py new file mode 100644 index 0000000..5e95be1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py @@ -0,0 +1,135 @@ +from contextlib import contextmanager +from typing import Iterator, Optional, Union + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class RequestInterface: + def request( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + response.read() + finally: + response.close() + return response + + @contextmanager + def stream( + self, + method: Union[bytes, str], + url: Union[URL, bytes, str], + *, + headers: HeaderTypes = None, + content: Union[bytes, Iterator[bytes], None] = None, + extensions: Optional[Extensions] = None, + ) -> Iterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + yield response + finally: + response.close() + + def handle_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class ConnectionInterface(RequestInterface): + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/myenv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py b/myenv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py new file mode 100644 index 0000000..502e4d7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py @@ -0,0 +1,342 @@ +import logging +import ssl +import typing + +from socksio import socks5 + +from .._backends.sync import SyncBackend +from .._backends.base import NetworkBackend, NetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +def _init_socks5_connection( + stream: NetworkStream, + *, + host: bytes, + port: int, + auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, +) -> None: + conn = socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socks5.SOCKS5CommandRequest.from_address( + socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socks5.SOCKS5Reply) + if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class SOCKSProxy(ConnectionPool): + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: typing.Union[URL, bytes, str], + proxy_auth: typing.Optional[ + typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] + ] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + max_connections: typing.Optional[int] = 10, + max_keepalive_connections: typing.Optional[int] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: typing.Optional[NetworkBackend] = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> ConnectionInterface: + return Socks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class Socks5Connection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, + ssl_context: typing.Optional[ssl.SSLContext] = None, + keepalive_expiry: typing.Optional[float] = None, + http1: bool = True, + http2: bool = False, + network_backend: typing.Optional[NetworkBackend] = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connect_lock = Lock() + self._connection: typing.Optional[ConnectionInterface] = None + self._connect_failed = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + if self._connection is not None: + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/myenv/lib/python3.12/site-packages/httpcore/_synchronization.py b/myenv/lib/python3.12/site-packages/httpcore/_synchronization.py new file mode 100644 index 0000000..9619a39 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_synchronization.py @@ -0,0 +1,317 @@ +import threading +from types import TracebackType +from typing import Optional, Type + +from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions + +# Our async synchronization primatives use either 'anyio' or 'trio' depending +# on if they're running under asyncio or trio. + +try: + import trio +except ImportError: # pragma: nocover + trio = None # type: ignore + +try: + import anyio +except ImportError: # pragma: nocover + anyio = None # type: ignore + + +def current_async_library() -> str: + # Determine if we're running under trio or asyncio. + # See https://sniffio.readthedocs.io/en/latest/ + try: + import sniffio + except ImportError: # pragma: nocover + environment = "asyncio" + else: + environment = sniffio.current_async_library() + + if environment not in ("asyncio", "trio"): # pragma: nocover + raise RuntimeError("Running under an unsupported async environment.") + + if environment == "asyncio" and anyio is None: # pragma: nocover + raise RuntimeError( + "Running with asyncio requires installation of 'httpcore[asyncio]'." + ) + + if environment == "trio" and trio is None: # pragma: nocover + raise RuntimeError( + "Running with trio requires installation of 'httpcore[trio]'." + ) + + return environment + + +class AsyncLock: + """ + This is a standard lock. + + In the sync case `Lock` provides thread locking. + In the async case `AsyncLock` provides async locking. + """ + + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_lock = trio.Lock() + elif self._backend == "asyncio": + self._anyio_lock = anyio.Lock() + + async def __aenter__(self) -> "AsyncLock": + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_lock.acquire() + elif self._backend == "asyncio": + await self._anyio_lock.acquire() + + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self._backend == "trio": + self._trio_lock.release() + elif self._backend == "asyncio": + self._anyio_lock.release() + + +class AsyncThreadLock: + """ + This is a threading-only lock for no-I/O contexts. + + In the sync case `ThreadLock` provides thread locking. + In the async case `AsyncThreadLock` is a no-op. + """ + + def __enter__(self) -> "AsyncThreadLock": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + pass + + +class AsyncEvent: + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_event = trio.Event() + elif self._backend == "asyncio": + self._anyio_event = anyio.Event() + + def set(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + self._trio_event.set() + elif self._backend == "asyncio": + self._anyio_event.set() + + async def wait(self, timeout: Optional[float] = None) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout} + timeout_or_inf = float("inf") if timeout is None else timeout + with map_exceptions(trio_exc_map): + with trio.fail_after(timeout_or_inf): + await self._trio_event.wait() + elif self._backend == "asyncio": + anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout} + with map_exceptions(anyio_exc_map): + with anyio.fail_after(timeout): + await self._anyio_event.wait() + + +class AsyncSemaphore: + def __init__(self, bound: int) -> None: + self._bound = bound + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a semaphore with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_semaphore = trio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + elif self._backend == "asyncio": + self._anyio_semaphore = anyio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + + async def acquire(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_semaphore.acquire() + elif self._backend == "asyncio": + await self._anyio_semaphore.acquire() + + async def release(self) -> None: + if self._backend == "trio": + self._trio_semaphore.release() + elif self._backend == "asyncio": + self._anyio_semaphore.release() + + +class AsyncShieldCancellation: + # For certain portions of our codebase where we're dealing with + # closing connections during exception handling we want to shield + # the operation from being cancelled. + # + # with AsyncShieldCancellation(): + # ... # clean-up operations, shielded from cancellation. + + def __init__(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a shielded scope with the correct implementation. + """ + self._backend = current_async_library() + + if self._backend == "trio": + self._trio_shield = trio.CancelScope(shield=True) + elif self._backend == "asyncio": + self._anyio_shield = anyio.CancelScope(shield=True) + + def __enter__(self) -> "AsyncShieldCancellation": + if self._backend == "trio": + self._trio_shield.__enter__() + elif self._backend == "asyncio": + self._anyio_shield.__enter__() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self._backend == "trio": + self._trio_shield.__exit__(exc_type, exc_value, traceback) + elif self._backend == "asyncio": + self._anyio_shield.__exit__(exc_type, exc_value, traceback) + + +# Our thread-based synchronization primitives... + + +class Lock: + """ + This is a standard lock. + + In the sync case `Lock` provides thread locking. + In the async case `AsyncLock` provides async locking. + """ + + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> "Lock": + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self._lock.release() + + +class ThreadLock: + """ + This is a threading-only lock for no-I/O contexts. + + In the sync case `ThreadLock` provides thread locking. + In the async case `AsyncThreadLock` is a no-op. + """ + + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> "ThreadLock": + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self._lock.release() + + +class Event: + def __init__(self) -> None: + self._event = threading.Event() + + def set(self) -> None: + self._event.set() + + def wait(self, timeout: Optional[float] = None) -> None: + if timeout == float("inf"): # pragma: no cover + timeout = None + if not self._event.wait(timeout=timeout): + raise PoolTimeout() # pragma: nocover + + +class Semaphore: + def __init__(self, bound: int) -> None: + self._semaphore = threading.Semaphore(value=bound) + + def acquire(self) -> None: + self._semaphore.acquire() + + def release(self) -> None: + self._semaphore.release() + + +class ShieldCancellation: + # Thread-synchronous codebases don't support cancellation semantics. + # We have this class because we need to mirror the async and sync + # cases within our package, but it's just a no-op. + def __enter__(self) -> "ShieldCancellation": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + pass diff --git a/myenv/lib/python3.12/site-packages/httpcore/_trace.py b/myenv/lib/python3.12/site-packages/httpcore/_trace.py new file mode 100644 index 0000000..b122a53 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_trace.py @@ -0,0 +1,105 @@ +import inspect +import logging +from types import TracebackType +from typing import Any, Dict, Optional, Type + +from ._models import Request + + +class Trace: + def __init__( + self, + name: str, + logger: logging.Logger, + request: Optional[Request] = None, + kwargs: Optional[Dict[str, Any]] = None, + ) -> None: + self.name = name + self.logger = logger + self.trace_extension = ( + None if request is None else request.extensions.get("trace") + ) + self.debug = self.logger.isEnabledFor(logging.DEBUG) + self.kwargs = kwargs or {} + self.return_value: Any = None + self.should_trace = self.debug or self.trace_extension is not None + self.prefix = self.logger.name.split(".")[-1] + + def trace(self, name: str, info: Dict[str, Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + ret = self.trace_extension(prefix_and_name, info) + if inspect.iscoroutine(ret): # pragma: no cover + raise TypeError( + "If you are using a synchronous interface, " + "the callback of the `trace` extension should " + "be a normal function instead of an asynchronous function." + ) + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + def __enter__(self) -> "Trace": + if self.should_trace: + info = self.kwargs + self.trace(f"{self.name}.started", info) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + self.trace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + self.trace(f"{self.name}.failed", info) + + async def atrace(self, name: str, info: Dict[str, Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + coro = self.trace_extension(prefix_and_name, info) + if not inspect.iscoroutine(coro): # pragma: no cover + raise TypeError( + "If you're using an asynchronous interface, " + "the callback of the `trace` extension should " + "be an asynchronous function rather than a normal function." + ) + await coro + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + async def __aenter__(self) -> "Trace": + if self.should_trace: + info = self.kwargs + await self.atrace(f"{self.name}.started", info) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + await self.atrace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + await self.atrace(f"{self.name}.failed", info) diff --git a/myenv/lib/python3.12/site-packages/httpcore/_utils.py b/myenv/lib/python3.12/site-packages/httpcore/_utils.py new file mode 100644 index 0000000..df5dea8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpcore/_utils.py @@ -0,0 +1,36 @@ +import select +import socket +import sys +import typing + + +def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: + """ + Return whether a socket, as identifed by its file descriptor, is readable. + "A socket is readable" means that the read buffer isn't empty, i.e. that calling + .recv() on it would immediately return some data. + """ + # NOTE: we want check for readability without actually attempting to read, because + # we don't want to block forever if it's not readable. + + # In the case that the socket no longer exists, or cannot return a file + # descriptor, we treat it as being readable, as if it the next read operation + # on it is ready to return the terminating `b""`. + sock_fd = None if sock is None else sock.fileno() + if sock_fd is None or sock_fd < 0: # pragma: nocover + return True + + # The implementation below was stolen from: + # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 + # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 + + # Use select.select on Windows, and when poll is unavailable and select.poll + # everywhere else. (E.g. When eventlet is in use. See #327) + if ( + sys.platform == "win32" or getattr(select, "poll", None) is None + ): # pragma: nocover + rready, _, _ = select.select([sock_fd], [], [], 0) + return bool(rready) + p = select.poll() + p.register(sock_fd, select.POLLIN) + return bool(p.poll(0)) diff --git a/myenv/lib/python3.12/site-packages/httpcore/py.typed b/myenv/lib/python3.12/site-packages/httpcore/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/METADATA b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/METADATA new file mode 100644 index 0000000..4c6a080 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/METADATA @@ -0,0 +1,207 @@ +Metadata-Version: 2.3 +Name: httpx +Version: 0.27.2 +Summary: The next generation HTTP client. +Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md +Project-URL: Documentation, https://www.python-httpx.org +Project-URL: Homepage, https://github.com/encode/httpx +Project-URL: Source, https://github.com/encode/httpx +Author-email: Tom Christie +License-Expression: BSD-3-Clause +License-File: LICENSE.md +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Requires-Dist: anyio +Requires-Dist: certifi +Requires-Dist: httpcore==1.* +Requires-Dist: idna +Requires-Dist: sniffio +Provides-Extra: brotli +Requires-Dist: brotli; (platform_python_implementation == 'CPython') and extra == 'brotli' +Requires-Dist: brotlicffi; (platform_python_implementation != 'CPython') and extra == 'brotli' +Provides-Extra: cli +Requires-Dist: click==8.*; extra == 'cli' +Requires-Dist: pygments==2.*; extra == 'cli' +Requires-Dist: rich<14,>=10; extra == 'cli' +Provides-Extra: http2 +Requires-Dist: h2<5,>=3; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio==1.*; extra == 'socks' +Provides-Extra: zstd +Requires-Dist: zstandard>=0.18.0; extra == 'zstd' +Description-Content-Type: text/markdown + +

+ HTTPX +

+ +

HTTPX - A next-generation HTTP client for Python.

+ +

+ + Test Suite + + + Package version + +

+ +HTTPX is a fully featured HTTP client library for Python 3. It includes **an integrated +command line client**, has support for both **HTTP/1.1 and HTTP/2**, and provides both **sync +and async APIs**. + +--- + +Install HTTPX using pip: + +```shell +pip install httpx +``` + +Now, let's get started: + +```pycon +>>> import httpx +>>> r = httpx.get('https://www.example.org/') +>>> r + +>>> r.status_code +200 +>>> r.headers['content-type'] +'text/html; charset=UTF-8' +>>> r.text +'\n\n\nExample Domain...' +``` + +Or, using the command-line client. + +```shell +pip install 'httpx[cli]' # The command line client is an optional dependency. +``` + +Which now allows us to use HTTPX directly from the command-line... + +

+ httpx --help +

+ +Sending a request... + +

+ httpx http://httpbin.org/json +

+ +## Features + +HTTPX builds on the well-established usability of `requests`, and gives you: + +* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/). +* An integrated command-line client. +* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/). +* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/). +* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/transports/#wsgi-transport) or [ASGI applications](https://www.python-httpx.org/advanced/transports/#asgi-transport). +* Strict timeouts everywhere. +* Fully type annotated. +* 100% test coverage. + +Plus all the standard features of `requests`... + +* International Domains and URLs +* Keep-Alive & Connection Pooling +* Sessions with Cookie Persistence +* Browser-style SSL Verification +* Basic/Digest Authentication +* Elegant Key/Value Cookies +* Automatic Decompression +* Automatic Content Decoding +* Unicode Response Bodies +* Multipart File Uploads +* HTTP(S) Proxy Support +* Connection Timeouts +* Streaming Downloads +* .netrc Support +* Chunked Requests + +## Installation + +Install with pip: + +```shell +pip install httpx +``` + +Or, to include the optional HTTP/2 support, use: + +```shell +pip install httpx[http2] +``` + +HTTPX requires Python 3.8+. + +## Documentation + +Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/). + +For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/). + +For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section. + +The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference. + +To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third_party_packages/). + +## Contribute + +If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start. + +## Dependencies + +The HTTPX project relies on these excellent libraries: + +* `httpcore` - The underlying transport implementation for `httpx`. + * `h11` - HTTP/1.1 support. +* `certifi` - SSL certificates. +* `idna` - Internationalized domain name support. +* `sniffio` - Async library autodetection. + +As well as these optional installs: + +* `h2` - HTTP/2 support. *(Optional, with `httpx[http2]`)* +* `socksio` - SOCKS proxy support. *(Optional, with `httpx[socks]`)* +* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)* +* `click` - Command line client support. *(Optional, with `httpx[cli]`)* +* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)* +* `zstandard` - Decoding for "zstd" compressed responses. *(Optional, with `httpx[zstd]`)* + +A huge amount of credit is due to `requests` for the API layout that +much of this work follows, as well as to `urllib3` for plenty of design +inspiration around the lower-level networking details. + +--- + +

HTTPX is BSD licensed code.
Designed & crafted with care.

— 🦋 —

+ +## Release Information + +### Fixed + +* Reintroduced supposedly-private `URLTypes` shortcut. (#2673) + + +--- + +[Full changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) diff --git a/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/RECORD b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/RECORD new file mode 100644 index 0000000..fad8f8f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/RECORD @@ -0,0 +1,56 @@ +../../../bin/httpx,sha256=IMu2zYMGOYVO5KZwyo2Epokm2Q1ATmK9NPKtKQST77A,225 +httpx-0.27.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpx-0.27.2.dist-info/METADATA,sha256=vt_qxvRfNlYTOeaZ0pHUdimRoq3pHYQCfHeR2ZIsMQE,7103 +httpx-0.27.2.dist-info/RECORD,, +httpx-0.27.2.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 +httpx-0.27.2.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37 +httpx-0.27.2.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508 +httpx/__init__.py,sha256=CsaZe6yZj0rHg6322AWKWHGTMVr9txgEfD5P3_Rrz60,2171 +httpx/__pycache__/__init__.cpython-312.pyc,, +httpx/__pycache__/__version__.cpython-312.pyc,, +httpx/__pycache__/_api.cpython-312.pyc,, +httpx/__pycache__/_auth.cpython-312.pyc,, +httpx/__pycache__/_client.cpython-312.pyc,, +httpx/__pycache__/_compat.cpython-312.pyc,, +httpx/__pycache__/_config.cpython-312.pyc,, +httpx/__pycache__/_content.cpython-312.pyc,, +httpx/__pycache__/_decoders.cpython-312.pyc,, +httpx/__pycache__/_exceptions.cpython-312.pyc,, +httpx/__pycache__/_main.cpython-312.pyc,, +httpx/__pycache__/_models.cpython-312.pyc,, +httpx/__pycache__/_multipart.cpython-312.pyc,, +httpx/__pycache__/_status_codes.cpython-312.pyc,, +httpx/__pycache__/_types.cpython-312.pyc,, +httpx/__pycache__/_urlparse.cpython-312.pyc,, +httpx/__pycache__/_urls.cpython-312.pyc,, +httpx/__pycache__/_utils.cpython-312.pyc,, +httpx/__version__.py,sha256=jmvSKcTIJ9sXDdDB3XqJ_YgYJFCqEofEHgm5SeeZ3hk,108 +httpx/_api.py,sha256=t4s7Uc7FoQLyxLEWSkjYWYl_GkDCbMU_WRyOqbZz11E,13078 +httpx/_auth.py,sha256=Yr3QwaUSK17rGYx-7j-FdicFIzz4Y9FFV-1F4-7RXX4,11891 +httpx/_client.py,sha256=xKv340hcLfOPCnilexEh2gNg5BrEQqIS7_O0ZB9nxWw,68032 +httpx/_compat.py,sha256=r30ViaoqRerTF84IvzjA7k1L4144UWDLEiKFYW6F6U0,2258 +httpx/_config.py,sha256=BJZDSp3nRg7XHPhvP2vz7K_A9n4tefqQS4F-UmFLyXA,12259 +httpx/_content.py,sha256=0UsVVH6JwcQLt7STbZzS0GPh_13QSxKTOv39QaE7zck,8073 +httpx/_decoders.py,sha256=meUGWtnOoODJRAAGHTs9VdMhe3NplJg4D2qAJLj8d9c,11444 +httpx/_exceptions.py,sha256=bxW7fxzgVMAdNTbwT0Vnq04gJDW1_gI_GFiQPuMyjL0,8527 +httpx/_main.py,sha256=LcRXtGghiTux7yj0pGXQXx7PNfr3EHE3VcxBcCY4RcE,15635 +httpx/_models.py,sha256=yjVVRmy0VXhJngWbxZ8Pnn2Jpwr-22Zy_JPakfyRqWU,42372 +httpx/_multipart.py,sha256=CkS8cH5Nau1YrvizDSCRhdK13fltMV2-GnvM3msGRzw,8885 +httpx/_status_codes.py,sha256=DYn-2ufBgMeXy5s8x3_TB7wjAuAAMewTakPrm5rXEsc,5639 +httpx/_transports/__init__.py,sha256=GbUoBSAOp7z-l-9j5YhMhR3DMIcn6FVLhj072O3Nnno,275 +httpx/_transports/__pycache__/__init__.cpython-312.pyc,, +httpx/_transports/__pycache__/asgi.cpython-312.pyc,, +httpx/_transports/__pycache__/base.cpython-312.pyc,, +httpx/_transports/__pycache__/default.cpython-312.pyc,, +httpx/_transports/__pycache__/mock.cpython-312.pyc,, +httpx/_transports/__pycache__/wsgi.cpython-312.pyc,, +httpx/_transports/asgi.py,sha256=R51xA7vsZvPb4f2g8lGvYCo2vhkcBl0LK_SbwzxQ0_k,5234 +httpx/_transports/base.py,sha256=kZS_VMbViYfF570pogUCJ1bulz-ybfL51Pqs9yktebU,2523 +httpx/_transports/default.py,sha256=Mxq8Z7uTUOSYx5gXSax8gexeOYF000RAcqpllbUMdAY,13395 +httpx/_transports/mock.py,sha256=PTo0d567RITXxGrki6kN7_67wwAxfwiMDcuXJiZCjEo,1232 +httpx/_transports/wsgi.py,sha256=NcPX3Xap_EwCFZWO_OaSyQNuInCYx1QMNbO8GAei6jY,4825 +httpx/_types.py,sha256=9GNQPQu8uJwXJdthlkMqUhDDSDpUDzLuF1lwaDZhFK0,3451 +httpx/_urlparse.py,sha256=aiYG6lL323vw5iC_OoOUtUNXc1G6wRtn9eBc_G2OcHc,17918 +httpx/_urls.py,sha256=JgYkK4IuC3PviLY-Dnyam_n-MxjT5g_dJibUSWtt_Kc,21808 +httpx/_utils.py,sha256=lByQlK36pmXTJa7WxlWEfB48tcKb9fxI8xEO4ayi1JM,13858 +httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/WHEEL new file mode 100644 index 0000000..cdd68a4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.25.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/entry_points.txt b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/entry_points.txt new file mode 100644 index 0000000..8ae9600 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +httpx = httpx:main diff --git a/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/licenses/LICENSE.md b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000..ab79d16 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx-0.27.2.dist-info/licenses/LICENSE.md @@ -0,0 +1,12 @@ +Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/lib/python3.12/site-packages/httpx/__init__.py b/myenv/lib/python3.12/site-packages/httpx/__init__.py new file mode 100644 index 0000000..e9addde --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/__init__.py @@ -0,0 +1,105 @@ +from .__version__ import __description__, __title__, __version__ +from ._api import * +from ._auth import * +from ._client import * +from ._config import * +from ._content import * +from ._exceptions import * +from ._models import * +from ._status_codes import * +from ._transports import * +from ._types import * +from ._urls import * + +try: + from ._main import main +except ImportError: # pragma: no cover + + def main() -> None: # type: ignore + import sys + + print( + "The httpx command line client could not run because the required " + "dependencies were not installed.\nMake sure you've installed " + "everything with: pip install 'httpx[cli]'" + ) + sys.exit(1) + + +__all__ = [ + "__description__", + "__title__", + "__version__", + "ASGITransport", + "AsyncBaseTransport", + "AsyncByteStream", + "AsyncClient", + "AsyncHTTPTransport", + "Auth", + "BaseTransport", + "BasicAuth", + "ByteStream", + "Client", + "CloseError", + "codes", + "ConnectError", + "ConnectTimeout", + "CookieConflict", + "Cookies", + "create_ssl_context", + "DecodingError", + "delete", + "DigestAuth", + "get", + "head", + "Headers", + "HTTPError", + "HTTPStatusError", + "HTTPTransport", + "InvalidURL", + "Limits", + "LocalProtocolError", + "main", + "MockTransport", + "NetRCAuth", + "NetworkError", + "options", + "patch", + "PoolTimeout", + "post", + "ProtocolError", + "Proxy", + "ProxyError", + "put", + "QueryParams", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "request", + "Request", + "RequestError", + "RequestNotRead", + "Response", + "ResponseNotRead", + "stream", + "StreamClosed", + "StreamConsumed", + "StreamError", + "SyncByteStream", + "Timeout", + "TimeoutException", + "TooManyRedirects", + "TransportError", + "UnsupportedProtocol", + "URL", + "USE_CLIENT_DEFAULT", + "WriteError", + "WriteTimeout", + "WSGITransport", +] + + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpx") # noqa diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a39927c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc new file mode 100644 index 0000000..ef7cb43 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc new file mode 100644 index 0000000..dac7698 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc new file mode 100644 index 0000000..bbcacd8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc new file mode 100644 index 0000000..ddc29b4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_compat.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 0000000..f3c953a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_compat.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc new file mode 100644 index 0000000..8282ae5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc new file mode 100644 index 0000000..03c9f9f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc new file mode 100644 index 0000000..7b8b1f3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 0000000..c9dbf33 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc new file mode 100644 index 0000000..0fc069e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc new file mode 100644 index 0000000..643507d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc new file mode 100644 index 0000000..f83e3ea Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc new file mode 100644 index 0000000..2c03e00 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc new file mode 100644 index 0000000..347840f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc new file mode 100644 index 0000000..d070f5f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc new file mode 100644 index 0000000..c1fdcaf Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc new file mode 100644 index 0000000..ec36916 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/__version__.py b/myenv/lib/python3.12/site-packages/httpx/__version__.py new file mode 100644 index 0000000..5eaaddb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/__version__.py @@ -0,0 +1,3 @@ +__title__ = "httpx" +__description__ = "A next generation HTTP client, for Python 3." +__version__ = "0.27.2" diff --git a/myenv/lib/python3.12/site-packages/httpx/_api.py b/myenv/lib/python3.12/site-packages/httpx/_api.py new file mode 100644 index 0000000..4e98b60 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_api.py @@ -0,0 +1,479 @@ +from __future__ import annotations + +import typing +from contextlib import contextmanager + +from ._client import Client +from ._config import DEFAULT_TIMEOUT_CONFIG +from ._models import Response +from ._types import ( + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + ProxyTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestFiles, + TimeoutTypes, + VerifyTypes, +) +from ._urls import URL + +__all__ = [ + "delete", + "get", + "head", + "options", + "patch", + "post", + "put", + "request", + "stream", +] + + +def request( + method: str, + url: URL | str, + *, + params: QueryParamTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + trust_env: bool = True, +) -> Response: + """ + Sends an HTTP request. + + **Parameters:** + + * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`, + `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`. + * **url** - URL for the new `Request` object. + * **params** - *(optional)* Query parameters to include in the URL, as a + string, dictionary, or sequence of two-tuples. + * **content** - *(optional)* Binary content to include in the body of the + request, as bytes or a byte iterator. + * **data** - *(optional)* Form data to include in the body of the request, + as a dictionary. + * **files** - *(optional)* A dictionary of upload files to include in the + body of the request. + * **json** - *(optional)* A JSON serializable object to include in the body + of the request. + * **headers** - *(optional)* Dictionary of HTTP headers to include in the + request. + * **cookies** - *(optional)* Dictionary of Cookie items to include in the + request. + * **auth** - *(optional)* An authentication class to use when sending the + request. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + the request. + * **follow_redirects** - *(optional)* Enables or disables HTTP redirects. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + + **Returns:** `Response` + + Usage: + + ``` + >>> import httpx + >>> response = httpx.request('GET', 'https://httpbin.org/get') + >>> response + + ``` + """ + with Client( + cookies=cookies, + proxy=proxy, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + return client.request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) + + +@contextmanager +def stream( + method: str, + url: URL | str, + *, + params: QueryParamTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + trust_env: bool = True, +) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + with Client( + cookies=cookies, + proxy=proxy, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + with client.stream( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) as response: + yield response + + +def get( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `GET` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `GET` requests should not include a request body. + """ + return request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def options( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `OPTIONS` requests should not include a request body. + """ + return request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def head( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `HEAD` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `HEAD` requests should not include a request body. + """ + return request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def post( + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def put( + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def patch( + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def delete( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + follow_redirects: bool = False, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `DELETE` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `DELETE` requests should not include a request body. + """ + return request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) diff --git a/myenv/lib/python3.12/site-packages/httpx/_auth.py b/myenv/lib/python3.12/site-packages/httpx/_auth.py new file mode 100644 index 0000000..b03971a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_auth.py @@ -0,0 +1,348 @@ +from __future__ import annotations + +import hashlib +import os +import re +import time +import typing +from base64 import b64encode +from urllib.request import parse_http_list + +from ._exceptions import ProtocolError +from ._models import Cookies, Request, Response +from ._utils import to_bytes, to_str, unquote + +if typing.TYPE_CHECKING: # pragma: no cover + from hashlib import _Hash + + +__all__ = ["Auth", "BasicAuth", "DigestAuth", "NetRCAuth"] + + +class Auth: + """ + Base class for all authentication schemes. + + To implement a custom authentication scheme, subclass `Auth` and override + the `.auth_flow()` method. + + If the authentication scheme does I/O such as disk access or network calls, or uses + synchronization primitives such as locks, you should override `.sync_auth_flow()` + and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized + implementations that will be used by `Client` and `AsyncClient` respectively. + """ + + requires_request_body = False + requires_response_body = False + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow. + + To dispatch a request, `yield` it: + + ``` + yield request + ``` + + The client will `.send()` the response back into the flow generator. You can + access it like so: + + ``` + response = yield request + ``` + + A `return` (or reaching the end of the generator) will result in the + client returning the last response obtained from the server. + + You can dispatch as many requests as is necessary. + """ + yield request + + def sync_auth_flow( + self, request: Request + ) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow synchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + request.read() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + response.read() + + try: + request = flow.send(response) + except StopIteration: + break + + async def async_auth_flow( + self, request: Request + ) -> typing.AsyncGenerator[Request, Response]: + """ + Execute the authentication flow asynchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + await request.aread() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + await response.aread() + + try: + request = flow.send(response) + except StopIteration: + break + + +class FunctionAuth(Auth): + """ + Allows the 'auth' argument to be passed as a simple callable function, + that takes the request, and returns a new, modified request. + """ + + def __init__(self, func: typing.Callable[[Request], Request]) -> None: + self._func = func + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + yield self._func(request) + + +class BasicAuth(Auth): + """ + Allows the 'auth' argument to be passed as a (username, password) pair, + and uses HTTP Basic authentication. + """ + + def __init__(self, username: str | bytes, password: str | bytes) -> None: + self._auth_header = self._build_auth_header(username, password) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + request.headers["Authorization"] = self._auth_header + yield request + + def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class NetRCAuth(Auth): + """ + Use a 'netrc' file to lookup basic auth credentials based on the url host. + """ + + def __init__(self, file: str | None = None) -> None: + # Lazily import 'netrc'. + # There's no need for us to load this module unless 'NetRCAuth' is being used. + import netrc + + self._netrc_info = netrc.netrc(file) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + auth_info = self._netrc_info.authenticators(request.url.host) + if auth_info is None or not auth_info[2]: + # The netrc file did not have authentication credentials for this host. + yield request + else: + # Build a basic auth header with credentials from the netrc file. + request.headers["Authorization"] = self._build_auth_header( + username=auth_info[0], password=auth_info[2] + ) + yield request + + def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class DigestAuth(Auth): + _ALGORITHM_TO_HASH_FUNCTION: dict[str, typing.Callable[[bytes], _Hash]] = { + "MD5": hashlib.md5, + "MD5-SESS": hashlib.md5, + "SHA": hashlib.sha1, + "SHA-SESS": hashlib.sha1, + "SHA-256": hashlib.sha256, + "SHA-256-SESS": hashlib.sha256, + "SHA-512": hashlib.sha512, + "SHA-512-SESS": hashlib.sha512, + } + + def __init__(self, username: str | bytes, password: str | bytes) -> None: + self._username = to_bytes(username) + self._password = to_bytes(password) + self._last_challenge: _DigestAuthChallenge | None = None + self._nonce_count = 1 + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + if self._last_challenge: + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + + response = yield request + + if response.status_code != 401 or "www-authenticate" not in response.headers: + # If the response is not a 401 then we don't + # need to build an authenticated request. + return + + for auth_header in response.headers.get_list("www-authenticate"): + if auth_header.lower().startswith("digest "): + break + else: + # If the response does not include a 'WWW-Authenticate: Digest ...' + # header, then we don't need to build an authenticated request. + return + + self._last_challenge = self._parse_challenge(request, response, auth_header) + self._nonce_count = 1 + + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + if response.cookies: + Cookies(response.cookies).set_cookie_header(request=request) + yield request + + def _parse_challenge( + self, request: Request, response: Response, auth_header: str + ) -> _DigestAuthChallenge: + """ + Returns a challenge from a Digest WWW-Authenticate header. + These take the form of: + `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"` + """ + scheme, _, fields = auth_header.partition(" ") + + # This method should only ever have been called with a Digest auth header. + assert scheme.lower() == "digest" + + header_dict: dict[str, str] = {} + for field in parse_http_list(fields): + key, value = field.strip().split("=", 1) + header_dict[key] = unquote(value) + + try: + realm = header_dict["realm"].encode() + nonce = header_dict["nonce"].encode() + algorithm = header_dict.get("algorithm", "MD5") + opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None + qop = header_dict["qop"].encode() if "qop" in header_dict else None + return _DigestAuthChallenge( + realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop + ) + except KeyError as exc: + message = "Malformed Digest WWW-Authenticate header" + raise ProtocolError(message, request=request) from exc + + def _build_auth_header( + self, request: Request, challenge: _DigestAuthChallenge + ) -> str: + hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()] + + def digest(data: bytes) -> bytes: + return hash_func(data).hexdigest().encode() + + A1 = b":".join((self._username, challenge.realm, self._password)) + + path = request.url.raw_path + A2 = b":".join((request.method.encode(), path)) + # TODO: implement auth-int + HA2 = digest(A2) + + nc_value = b"%08x" % self._nonce_count + cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce) + self._nonce_count += 1 + + HA1 = digest(A1) + if challenge.algorithm.lower().endswith("-sess"): + HA1 = digest(b":".join((HA1, challenge.nonce, cnonce))) + + qop = self._resolve_qop(challenge.qop, request=request) + if qop is None: + # Following RFC 2069 + digest_data = [HA1, challenge.nonce, HA2] + else: + # Following RFC 2617/7616 + digest_data = [HA1, challenge.nonce, nc_value, cnonce, qop, HA2] + + format_args = { + "username": self._username, + "realm": challenge.realm, + "nonce": challenge.nonce, + "uri": path, + "response": digest(b":".join(digest_data)), + "algorithm": challenge.algorithm.encode(), + } + if challenge.opaque: + format_args["opaque"] = challenge.opaque + if qop: + format_args["qop"] = b"auth" + format_args["nc"] = nc_value + format_args["cnonce"] = cnonce + + return "Digest " + self._get_header_value(format_args) + + def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: + s = str(nonce_count).encode() + s += nonce + s += time.ctime().encode() + s += os.urandom(8) + + return hashlib.sha1(s).hexdigest()[:16].encode() + + def _get_header_value(self, header_fields: dict[str, bytes]) -> str: + NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") + QUOTED_TEMPLATE = '{}="{}"' + NON_QUOTED_TEMPLATE = "{}={}" + + header_value = "" + for i, (field, value) in enumerate(header_fields.items()): + if i > 0: + header_value += ", " + template = ( + QUOTED_TEMPLATE + if field not in NON_QUOTED_FIELDS + else NON_QUOTED_TEMPLATE + ) + header_value += template.format(field, to_str(value)) + + return header_value + + def _resolve_qop(self, qop: bytes | None, request: Request) -> bytes | None: + if qop is None: + return None + qops = re.split(b", ?", qop) + if b"auth" in qops: + return b"auth" + + if qops == [b"auth-int"]: + raise NotImplementedError("Digest auth-int support is not yet implemented") + + message = f'Unexpected qop value "{qop!r}" in digest auth' + raise ProtocolError(message, request=request) + + +class _DigestAuthChallenge(typing.NamedTuple): + realm: bytes + nonce: bytes + algorithm: str + opaque: bytes | None + qop: bytes | None diff --git a/myenv/lib/python3.12/site-packages/httpx/_client.py b/myenv/lib/python3.12/site-packages/httpx/_client.py new file mode 100644 index 0000000..26610f6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_client.py @@ -0,0 +1,2065 @@ +from __future__ import annotations + +import datetime +import enum +import logging +import typing +import warnings +from contextlib import asynccontextmanager, contextmanager +from types import TracebackType + +from .__version__ import __version__ +from ._auth import Auth, BasicAuth, FunctionAuth +from ._config import ( + DEFAULT_LIMITS, + DEFAULT_MAX_REDIRECTS, + DEFAULT_TIMEOUT_CONFIG, + Limits, + Proxy, + Timeout, +) +from ._decoders import SUPPORTED_DECODERS +from ._exceptions import ( + InvalidURL, + RemoteProtocolError, + TooManyRedirects, + request_context, +) +from ._models import Cookies, Headers, Request, Response +from ._status_codes import codes +from ._transports.asgi import ASGITransport +from ._transports.base import AsyncBaseTransport, BaseTransport +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._transports.wsgi import WSGITransport +from ._types import ( + AsyncByteStream, + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + ProxyTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + SyncByteStream, + TimeoutTypes, + VerifyTypes, +) +from ._urls import URL, QueryParams +from ._utils import ( + Timer, + URLPattern, + get_environment_proxies, + is_https_redirect, + same_origin, +) + +__all__ = ["USE_CLIENT_DEFAULT", "AsyncClient", "Client"] + +# The type annotation for @classmethod and context managers here follows PEP 484 +# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods +T = typing.TypeVar("T", bound="Client") +U = typing.TypeVar("U", bound="AsyncClient") + + +class UseClientDefault: + """ + For some parameters such as `auth=...` and `timeout=...` we need to be able + to indicate the default "unset" state, in a way that is distinctly different + to using `None`. + + The default "unset" state indicates that whatever default is set on the + client should be used. This is different to setting `None`, which + explicitly disables the parameter, possibly overriding a client default. + + For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature. + Omitting the `timeout` parameter will send a request using whatever default + timeout has been configured on the client. Including `timeout=None` will + ensure no timeout is used. + + Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant, + but it is used internally when a parameter is not included. + """ + + +USE_CLIENT_DEFAULT = UseClientDefault() + + +logger = logging.getLogger("httpx") + +USER_AGENT = f"python-httpx/{__version__}" +ACCEPT_ENCODING = ", ".join( + [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] +) + + +class ClientState(enum.Enum): + # UNOPENED: + # The client has been instantiated, but has not been used to send a request, + # or been opened by entering the context of a `with` block. + UNOPENED = 1 + # OPENED: + # The client has either sent a request, or is within a `with` block. + OPENED = 2 + # CLOSED: + # The client has either exited the `with` block, or `close()` has + # been called explicitly. + CLOSED = 3 + + +class BoundSyncStream(SyncByteStream): + """ + A byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: SyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self._stream: + yield chunk + + def close(self) -> None: + seconds = self._timer.sync_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + self._stream.close() + + +class BoundAsyncStream(AsyncByteStream): + """ + An async byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: AsyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + async for chunk in self._stream: + yield chunk + + async def aclose(self) -> None: + seconds = await self._timer.async_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + await self._stream.aclose() + + +EventHook = typing.Callable[..., typing.Any] + + +class BaseClient: + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + base_url: URL | str = "", + trust_env: bool = True, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + event_hooks = {} if event_hooks is None else event_hooks + + self._base_url = self._enforce_trailing_slash(URL(base_url)) + + self._auth = self._build_auth(auth) + self._params = QueryParams(params) + self.headers = Headers(headers) + self._cookies = Cookies(cookies) + self._timeout = Timeout(timeout) + self.follow_redirects = follow_redirects + self.max_redirects = max_redirects + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + self._trust_env = trust_env + self._default_encoding = default_encoding + self._state = ClientState.UNOPENED + + @property + def is_closed(self) -> bool: + """ + Check if the client being closed + """ + return self._state == ClientState.CLOSED + + @property + def trust_env(self) -> bool: + return self._trust_env + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _get_proxy_map( + self, proxies: ProxiesTypes | None, allow_env_proxies: bool + ) -> dict[str, Proxy | None]: + if proxies is None: + if allow_env_proxies: + return { + key: None if url is None else Proxy(url=url) + for key, url in get_environment_proxies().items() + } + return {} + if isinstance(proxies, dict): + new_proxies = {} + for key, value in proxies.items(): + proxy = Proxy(url=value) if isinstance(value, (str, URL)) else value + new_proxies[str(key)] = proxy + return new_proxies + else: + proxy = Proxy(url=proxies) if isinstance(proxies, (str, URL)) else proxies + return {"all://": proxy} + + @property + def timeout(self) -> Timeout: + return self._timeout + + @timeout.setter + def timeout(self, timeout: TimeoutTypes) -> None: + self._timeout = Timeout(timeout) + + @property + def event_hooks(self) -> dict[str, list[EventHook]]: + return self._event_hooks + + @event_hooks.setter + def event_hooks(self, event_hooks: dict[str, list[EventHook]]) -> None: + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + + @property + def auth(self) -> Auth | None: + """ + Authentication class used when none is passed at the request-level. + + See also [Authentication][0]. + + [0]: /quickstart/#authentication + """ + return self._auth + + @auth.setter + def auth(self, auth: AuthTypes) -> None: + self._auth = self._build_auth(auth) + + @property + def base_url(self) -> URL: + """ + Base URL to use when sending requests with relative URLs. + """ + return self._base_url + + @base_url.setter + def base_url(self, url: URL | str) -> None: + self._base_url = self._enforce_trailing_slash(URL(url)) + + @property + def headers(self) -> Headers: + """ + HTTP headers to include when sending requests. + """ + return self._headers + + @headers.setter + def headers(self, headers: HeaderTypes) -> None: + client_headers = Headers( + { + b"Accept": b"*/*", + b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"), + b"Connection": b"keep-alive", + b"User-Agent": USER_AGENT.encode("ascii"), + } + ) + client_headers.update(headers) + self._headers = client_headers + + @property + def cookies(self) -> Cookies: + """ + Cookie values to include when sending requests. + """ + return self._cookies + + @cookies.setter + def cookies(self, cookies: CookieTypes) -> None: + self._cookies = Cookies(cookies) + + @property + def params(self) -> QueryParams: + """ + Query parameters to include in the URL when sending requests. + """ + return self._params + + @params.setter + def params(self, params: QueryParamTypes) -> None: + self._params = QueryParams(params) + + def build_request( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Request: + """ + Build and return a request instance. + + * The `params`, `headers` and `cookies` arguments + are merged with any values set on the client. + * The `url` argument is merged with any `base_url` set on the client. + + See also: [Request instances][0] + + [0]: /advanced/clients/#request-instances + """ + url = self._merge_url(url) + headers = self._merge_headers(headers) + cookies = self._merge_cookies(cookies) + params = self._merge_queryparams(params) + extensions = {} if extensions is None else extensions + if "timeout" not in extensions: + timeout = ( + self.timeout + if isinstance(timeout, UseClientDefault) + else Timeout(timeout) + ) + extensions = dict(**extensions, timeout=timeout.as_dict()) + return Request( + method, + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + extensions=extensions, + ) + + def _merge_url(self, url: URL | str) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + merge_url = URL(url) + if merge_url.is_relative_url: + # To merge URLs we always append to the base URL. To get this + # behaviour correct we always ensure the base URL ends in a '/' + # separator, and strip any leading '/' from the merge URL. + # + # So, eg... + # + # >>> client = Client(base_url="https://www.example.com/subpath") + # >>> client.base_url + # URL('https://www.example.com/subpath/') + # >>> client.build_request("GET", "/path").url + # URL('https://www.example.com/subpath/path') + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + return merge_url + + def _merge_cookies(self, cookies: CookieTypes | None = None) -> CookieTypes | None: + """ + Merge a cookies argument together with any cookies on the client, + to create the cookies used for the outgoing request. + """ + if cookies or self.cookies: + merged_cookies = Cookies(self.cookies) + merged_cookies.update(cookies) + return merged_cookies + return cookies + + def _merge_headers(self, headers: HeaderTypes | None = None) -> HeaderTypes | None: + """ + Merge a headers argument together with any headers on the client, + to create the headers used for the outgoing request. + """ + merged_headers = Headers(self.headers) + merged_headers.update(headers) + return merged_headers + + def _merge_queryparams( + self, params: QueryParamTypes | None = None + ) -> QueryParamTypes | None: + """ + Merge a queryparams argument together with any queryparams on the client, + to create the queryparams used for the outgoing request. + """ + if params or self.params: + merged_queryparams = QueryParams(self.params) + return merged_queryparams.merge(params) + return params + + def _build_auth(self, auth: AuthTypes | None) -> Auth | None: + if auth is None: + return None + elif isinstance(auth, tuple): + return BasicAuth(username=auth[0], password=auth[1]) + elif isinstance(auth, Auth): + return auth + elif callable(auth): + return FunctionAuth(func=auth) + else: + raise TypeError(f'Invalid "auth" argument: {auth!r}') + + def _build_request_auth( + self, + request: Request, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + ) -> Auth: + auth = ( + self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) + ) + + if auth is not None: + return auth + + username, password = request.url.username, request.url.password + if username or password: + return BasicAuth(username=username, password=password) + + return Auth() + + def _build_redirect_request(self, request: Request, response: Response) -> Request: + """ + Given a request and a redirect response, return a new request that + should be used to effect the redirect. + """ + method = self._redirect_method(request, response) + url = self._redirect_url(request, response) + headers = self._redirect_headers(request, url, method) + stream = self._redirect_stream(request, method) + cookies = Cookies(self.cookies) + return Request( + method=method, + url=url, + headers=headers, + cookies=cookies, + stream=stream, + extensions=request.extensions, + ) + + def _redirect_method(self, request: Request, response: Response) -> str: + """ + When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.SEE_OTHER and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # Turn 302s into GETs. + if response.status_code == codes.FOUND and method != "HEAD": + method = "GET" + + # If a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in 'requests' issue 1704. + if response.status_code == codes.MOVED_PERMANENTLY and method == "POST": + method = "GET" + + return method + + def _redirect_url(self, request: Request, response: Response) -> URL: + """ + Return the URL for the redirect to follow. + """ + location = response.headers["Location"] + + try: + url = URL(location) + except InvalidURL as exc: + raise RemoteProtocolError( + f"Invalid URL in location header: {exc}.", request=request + ) from None + + # Handle malformed 'Location' headers that are "absolute" form, have no host. + # See: https://github.com/encode/httpx/issues/771 + if url.scheme and not url.host: + url = url.copy_with(host=request.url.host) + + # Facilitate relative 'Location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + if url.is_relative_url: + url = request.url.join(url) + + # Attach previous fragment if needed (RFC 7231 7.1.2) + if request.url.fragment and not url.fragment: + url = url.copy_with(fragment=request.url.fragment) + + return url + + def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: + """ + Return the headers that should be used for the redirect request. + """ + headers = Headers(request.headers) + + if not same_origin(url, request.url): + if not is_https_redirect(request.url, url): + # Strip Authorization headers when responses are redirected + # away from the origin. (Except for direct HTTP to HTTPS redirects.) + headers.pop("Authorization", None) + + # Update the Host header. + headers["Host"] = url.netloc.decode("ascii") + + if method != request.method and method == "GET": + # If we've switch to a 'GET' request, then strip any headers which + # are only relevant to the request body. + headers.pop("Content-Length", None) + headers.pop("Transfer-Encoding", None) + + # We should use the client cookie store to determine any cookie header, + # rather than whatever was on the original outgoing request. + headers.pop("Cookie", None) + + return headers + + def _redirect_stream( + self, request: Request, method: str + ) -> SyncByteStream | AsyncByteStream | None: + """ + Return the body that should be used for the redirect request. + """ + if method != request.method and method == "GET": + return None + + return request.stream + + def _set_timeout(self, request: Request) -> None: + if "timeout" not in request.extensions: + timeout = ( + self.timeout + if isinstance(self.timeout, UseClientDefault) + else Timeout(self.timeout) + ) + request.extensions = dict(**request.extensions, timeout=timeout.as_dict()) + + +class Client(BaseClient): + """ + An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. + + It can be shared between threads. + + Usage: + + ```python + >>> client = httpx.Client() + >>> response = client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An WSGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + mounts: None | (typing.Mapping[str, BaseTransport | None]) = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + base_url: URL | str = "", + transport: BaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, + trust_env: bool = True, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + if proxies: + message = ( + "The 'proxies' argument is now deprecated." + " Use 'proxy' or 'mounts' instead." + ) + warnings.warn(message, DeprecationWarning) + if proxy: + raise RuntimeError("Use either `proxy` or 'proxies', not both.") + + if app: + message = ( + "The 'app' shortcut is now deprecated." + " Use the explicit style 'transport=WSGITransport(app=...)' instead." + ) + warnings.warn(message, DeprecationWarning) + + allow_env_proxies = trust_env and app is None and transport is None + proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + self._mounts: dict[URLPattern, BaseTransport | None] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: BaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, + trust_env: bool = True, + ) -> BaseTransport: + if transport is not None: + return transport + + if app is not None: + return WSGITransport(app=app) + + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> BaseTransport: + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> BaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + def request( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = client.send(request, ...) + ``` + + See `Client.build_request()`, `Client.send()` and + [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/clients/#merging-of-configuration + """ + if cookies is not None: + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return self.send(request, auth=auth, follow_redirects=follow_redirects) + + @contextmanager + def stream( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + response.close() + + def send( + self, + request: Request, + *, + stream: bool = False, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `Client.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/clients/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + self._set_timeout(request) + + auth = self._build_request_auth(request, auth) + + response = self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + response.read() + + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: list[Response], + ) -> Response: + auth_flow = auth.sync_auth_flow(request) + try: + request = next(auth_flow) + + while True: + response = self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = auth_flow.send(response) + except StopIteration: + return response + + response.history = list(history) + response.read() + request = next_request + history.append(response) + + except BaseException as exc: + response.close() + raise exc + finally: + auth_flow.close() + + def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: list[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + hook(request) + + response = self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + hook(response) + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + response.read() + else: + response.next_request = request + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + timer.sync_start() + + if not isinstance(request.stream, SyncByteStream): + raise RuntimeError( + "Attempted to send an async request with a sync Client instance." + ) + + with request_context(request=request): + response = transport.handle_request(request) + + assert isinstance(response.stream, SyncByteStream) + + response.request = request + response.stream = BoundSyncStream( + response.stream, response=response, timer=timer + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + def get( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def options( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def head( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def post( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def put( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def patch( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def delete( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def close(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + self._transport.close() + for transport in self._mounts.values(): + if transport is not None: + transport.close() + + def __enter__(self: T) -> T: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: ( + "Cannot reopen a client instance, once it has been closed." + ), + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + self._transport.__enter__() + for transport in self._mounts.values(): + if transport is not None: + transport.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self._state = ClientState.CLOSED + + self._transport.__exit__(exc_type, exc_value, traceback) + for transport in self._mounts.values(): + if transport is not None: + transport.__exit__(exc_type, exc_value, traceback) + + +class AsyncClient(BaseClient): + """ + An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, + cookie persistence, etc. + + It can be shared between tasks. + + Usage: + + ```python + >>> async with httpx.AsyncClient() as client: + >>> response = await client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An ASGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + proxy: ProxyTypes | None = None, + proxies: ProxiesTypes | None = None, + mounts: None | (typing.Mapping[str, AsyncBaseTransport | None]) = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + base_url: URL | str = "", + transport: AsyncBaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, + trust_env: bool = True, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + if proxies: + message = ( + "The 'proxies' argument is now deprecated." + " Use 'proxy' or 'mounts' instead." + ) + warnings.warn(message, DeprecationWarning) + if proxy: + raise RuntimeError("Use either `proxy` or 'proxies', not both.") + + if app: + message = ( + "The 'app' shortcut is now deprecated." + " Use the explicit style 'transport=ASGITransport(app=...)' instead." + ) + warnings.warn(message, DeprecationWarning) + + allow_env_proxies = trust_env and app is None and transport is None + proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + + self._mounts: dict[URLPattern, AsyncBaseTransport | None] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: AsyncBaseTransport | None = None, + app: typing.Callable[..., typing.Any] | None = None, + trust_env: bool = True, + ) -> AsyncBaseTransport: + if transport is not None: + return transport + + if app is not None: + return ASGITransport(app=app) + + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> AsyncBaseTransport: + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> AsyncBaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + async def request( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = await client.send(request, ...) + ``` + + See `AsyncClient.build_request()`, `AsyncClient.send()` + and [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/clients/#merging-of-configuration + """ + + if cookies is not None: # pragma: no cover + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return await self.send(request, auth=auth, follow_redirects=follow_redirects) + + @asynccontextmanager + async def stream( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> typing.AsyncIterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = await self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + await response.aclose() + + async def send( + self, + request: Request, + *, + stream: bool = False, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `AsyncClient.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/clients/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + self._set_timeout(request) + + auth = self._build_request_auth(request, auth) + + response = await self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + await response.aread() + + return response + + except BaseException as exc: + await response.aclose() + raise exc + + async def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: list[Response], + ) -> Response: + auth_flow = auth.async_auth_flow(request) + try: + request = await auth_flow.__anext__() + + while True: + response = await self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = await auth_flow.asend(response) + except StopAsyncIteration: + return response + + response.history = list(history) + await response.aread() + request = next_request + history.append(response) + + except BaseException as exc: + await response.aclose() + raise exc + finally: + await auth_flow.aclose() + + async def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: list[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + await hook(request) + + response = await self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + await hook(response) + + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + await response.aread() + else: + response.next_request = request + return response + + except BaseException as exc: + await response.aclose() + raise exc + + async def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + await timer.async_start() + + if not isinstance(request.stream, AsyncByteStream): + raise RuntimeError( + "Attempted to send an sync request with an AsyncClient instance." + ) + + with request_context(request=request): + response = await transport.handle_async_request(request) + + assert isinstance(response.stream, AsyncByteStream) + response.request = request + response.stream = BoundAsyncStream( + response.stream, response=response, timer=timer + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + async def get( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def options( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def head( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def post( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def put( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def patch( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def delete( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def aclose(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + await self._transport.aclose() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.aclose() + + async def __aenter__(self: U) -> U: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: ( + "Cannot reopen a client instance, once it has been closed." + ), + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + await self._transport.__aenter__() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self._state = ClientState.CLOSED + + await self._transport.__aexit__(exc_type, exc_value, traceback) + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aexit__(exc_type, exc_value, traceback) diff --git a/myenv/lib/python3.12/site-packages/httpx/_compat.py b/myenv/lib/python3.12/site-packages/httpx/_compat.py new file mode 100644 index 0000000..7d86dce --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_compat.py @@ -0,0 +1,63 @@ +""" +The _compat module is used for code which requires branching between different +Python environments. It is excluded from the code coverage checks. +""" + +import re +import ssl +import sys +from types import ModuleType +from typing import Optional + +# Brotli support is optional +# The C bindings in `brotli` are recommended for CPython. +# The CFFI bindings in `brotlicffi` are recommended for PyPy and everything else. +try: + import brotlicffi as brotli +except ImportError: # pragma: no cover + try: + import brotli + except ImportError: + brotli = None + +# Zstandard support is optional +zstd: Optional[ModuleType] = None +try: + import zstandard as zstd +except (AttributeError, ImportError, ValueError): # Defensive: + zstd = None +else: + # The package 'zstandard' added the 'eof' property starting + # in v0.18.0 which we require to ensure a complete and + # valid zstd stream was fed into the ZstdDecoder. + # See: https://github.com/urllib3/urllib3/pull/2624 + _zstd_version = tuple( + map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr] + ) + if _zstd_version < (0, 18): # Defensive: + zstd = None + + +if sys.version_info >= (3, 10) or ssl.OPENSSL_VERSION_INFO >= (1, 1, 0, 7): + + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # The OP_NO_SSL* and OP_NO_TLS* become deprecated in favor of + # 'SSLContext.minimum_version' from Python 3.7 onwards, however + # this attribute is not available unless the ssl module is compiled + # with OpenSSL 1.1.0g or newer. + # https://docs.python.org/3.10/library/ssl.html#ssl.SSLContext.minimum_version + # https://docs.python.org/3.7/library/ssl.html#ssl.SSLContext.minimum_version + context.minimum_version = ssl.TLSVersion.TLSv1_2 + +else: + + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # If 'minimum_version' isn't available, we configure these options with + # the older deprecated variants. + context.options |= ssl.OP_NO_SSLv2 + context.options |= ssl.OP_NO_SSLv3 + context.options |= ssl.OP_NO_TLSv1 + context.options |= ssl.OP_NO_TLSv1_1 + + +__all__ = ["brotli", "set_minimum_tls_version_1_2"] diff --git a/myenv/lib/python3.12/site-packages/httpx/_config.py b/myenv/lib/python3.12/site-packages/httpx/_config.py new file mode 100644 index 0000000..1b12911 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_config.py @@ -0,0 +1,372 @@ +from __future__ import annotations + +import logging +import os +import ssl +import typing +from pathlib import Path + +import certifi + +from ._compat import set_minimum_tls_version_1_2 +from ._models import Headers +from ._types import CertTypes, HeaderTypes, TimeoutTypes, VerifyTypes +from ._urls import URL +from ._utils import get_ca_bundle_from_env + +__all__ = ["Limits", "Proxy", "Timeout", "create_ssl_context"] + +DEFAULT_CIPHERS = ":".join( + [ + "ECDHE+AESGCM", + "ECDHE+CHACHA20", + "DHE+AESGCM", + "DHE+CHACHA20", + "ECDH+AESGCM", + "DH+AESGCM", + "ECDH+AES", + "DH+AES", + "RSA+AESGCM", + "RSA+AES", + "!aNULL", + "!eNULL", + "!MD5", + "!DSS", + ] +) + + +logger = logging.getLogger("httpx") + + +class UnsetType: + pass # pragma: no cover + + +UNSET = UnsetType() + + +def create_ssl_context( + cert: CertTypes | None = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, +) -> ssl.SSLContext: + return SSLConfig( + cert=cert, verify=verify, trust_env=trust_env, http2=http2 + ).ssl_context + + +class SSLConfig: + """ + SSL Configuration. + """ + + DEFAULT_CA_BUNDLE_PATH = Path(certifi.where()) + + def __init__( + self, + *, + cert: CertTypes | None = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, + ) -> None: + self.cert = cert + self.verify = verify + self.trust_env = trust_env + self.http2 = http2 + self.ssl_context = self.load_ssl_context() + + def load_ssl_context(self) -> ssl.SSLContext: + logger.debug( + "load_ssl_context verify=%r cert=%r trust_env=%r http2=%r", + self.verify, + self.cert, + self.trust_env, + self.http2, + ) + + if self.verify: + return self.load_ssl_context_verify() + return self.load_ssl_context_no_verify() + + def load_ssl_context_no_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for unverified connections. + """ + context = self._create_default_ssl_context() + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + self._load_client_certs(context) + return context + + def load_ssl_context_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for verified connections. + """ + if self.trust_env and self.verify is True: + ca_bundle = get_ca_bundle_from_env() + if ca_bundle is not None: + self.verify = ca_bundle + + if isinstance(self.verify, ssl.SSLContext): + # Allow passing in our own SSLContext object that's pre-configured. + context = self.verify + self._load_client_certs(context) + return context + elif isinstance(self.verify, bool): + ca_bundle_path = self.DEFAULT_CA_BUNDLE_PATH + elif Path(self.verify).exists(): + ca_bundle_path = Path(self.verify) + else: + raise IOError( + "Could not find a suitable TLS CA certificate bundle, " + "invalid path: {}".format(self.verify) + ) + + context = self._create_default_ssl_context() + context.verify_mode = ssl.CERT_REQUIRED + context.check_hostname = True + + # Signal to server support for PHA in TLS 1.3. Raises an + # AttributeError if only read-only access is implemented. + try: + context.post_handshake_auth = True + except AttributeError: # pragma: no cover + pass + + # Disable using 'commonName' for SSLContext.check_hostname + # when the 'subjectAltName' extension isn't available. + try: + context.hostname_checks_common_name = False + except AttributeError: # pragma: no cover + pass + + if ca_bundle_path.is_file(): + cafile = str(ca_bundle_path) + logger.debug("load_verify_locations cafile=%r", cafile) + context.load_verify_locations(cafile=cafile) + elif ca_bundle_path.is_dir(): + capath = str(ca_bundle_path) + logger.debug("load_verify_locations capath=%r", capath) + context.load_verify_locations(capath=capath) + + self._load_client_certs(context) + + return context + + def _create_default_ssl_context(self) -> ssl.SSLContext: + """ + Creates the default SSLContext object that's used for both verified + and unverified connections. + """ + context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + set_minimum_tls_version_1_2(context) + context.options |= ssl.OP_NO_COMPRESSION + context.set_ciphers(DEFAULT_CIPHERS) + + if ssl.HAS_ALPN: + alpn_idents = ["http/1.1", "h2"] if self.http2 else ["http/1.1"] + context.set_alpn_protocols(alpn_idents) + + keylogfile = os.environ.get("SSLKEYLOGFILE") + if keylogfile and self.trust_env: + context.keylog_filename = keylogfile + + return context + + def _load_client_certs(self, ssl_context: ssl.SSLContext) -> None: + """ + Loads client certificates into our SSLContext object + """ + if self.cert is not None: + if isinstance(self.cert, str): + ssl_context.load_cert_chain(certfile=self.cert) + elif isinstance(self.cert, tuple) and len(self.cert) == 2: + ssl_context.load_cert_chain(certfile=self.cert[0], keyfile=self.cert[1]) + elif isinstance(self.cert, tuple) and len(self.cert) == 3: + ssl_context.load_cert_chain( + certfile=self.cert[0], + keyfile=self.cert[1], + password=self.cert[2], + ) + + +class Timeout: + """ + Timeout configuration. + + **Usage**: + + Timeout(None) # No timeouts. + Timeout(5.0) # 5s timeout on all operations. + Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts. + Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere. + Timeout(5.0, pool=None) # No timeout on acquiring connection from pool. + # 5s timeout elsewhere. + """ + + def __init__( + self, + timeout: TimeoutTypes | UnsetType = UNSET, + *, + connect: None | float | UnsetType = UNSET, + read: None | float | UnsetType = UNSET, + write: None | float | UnsetType = UNSET, + pool: None | float | UnsetType = UNSET, + ) -> None: + if isinstance(timeout, Timeout): + # Passed as a single explicit Timeout. + assert connect is UNSET + assert read is UNSET + assert write is UNSET + assert pool is UNSET + self.connect = timeout.connect # type: typing.Optional[float] + self.read = timeout.read # type: typing.Optional[float] + self.write = timeout.write # type: typing.Optional[float] + self.pool = timeout.pool # type: typing.Optional[float] + elif isinstance(timeout, tuple): + # Passed as a tuple. + self.connect = timeout[0] + self.read = timeout[1] + self.write = None if len(timeout) < 3 else timeout[2] + self.pool = None if len(timeout) < 4 else timeout[3] + elif not ( + isinstance(connect, UnsetType) + or isinstance(read, UnsetType) + or isinstance(write, UnsetType) + or isinstance(pool, UnsetType) + ): + self.connect = connect + self.read = read + self.write = write + self.pool = pool + else: + if isinstance(timeout, UnsetType): + raise ValueError( + "httpx.Timeout must either include a default, or set all " + "four parameters explicitly." + ) + self.connect = timeout if isinstance(connect, UnsetType) else connect + self.read = timeout if isinstance(read, UnsetType) else read + self.write = timeout if isinstance(write, UnsetType) else write + self.pool = timeout if isinstance(pool, UnsetType) else pool + + def as_dict(self) -> dict[str, float | None]: + return { + "connect": self.connect, + "read": self.read, + "write": self.write, + "pool": self.pool, + } + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.connect == other.connect + and self.read == other.read + and self.write == other.write + and self.pool == other.pool + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + if len({self.connect, self.read, self.write, self.pool}) == 1: + return f"{class_name}(timeout={self.connect})" + return ( + f"{class_name}(connect={self.connect}, " + f"read={self.read}, write={self.write}, pool={self.pool})" + ) + + +class Limits: + """ + Configuration for limits to various client behaviors. + + **Parameters:** + + * **max_connections** - The maximum number of concurrent connections that may be + established. + * **max_keepalive_connections** - Allow the connection pool to maintain + keep-alive connections below this point. Should be less than or equal + to `max_connections`. + * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds. + """ + + def __init__( + self, + *, + max_connections: int | None = None, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = 5.0, + ) -> None: + self.max_connections = max_connections + self.max_keepalive_connections = max_keepalive_connections + self.keepalive_expiry = keepalive_expiry + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.max_connections == other.max_connections + and self.max_keepalive_connections == other.max_keepalive_connections + and self.keepalive_expiry == other.keepalive_expiry + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + return ( + f"{class_name}(max_connections={self.max_connections}, " + f"max_keepalive_connections={self.max_keepalive_connections}, " + f"keepalive_expiry={self.keepalive_expiry})" + ) + + +class Proxy: + def __init__( + self, + url: URL | str, + *, + ssl_context: ssl.SSLContext | None = None, + auth: tuple[str, str] | None = None, + headers: HeaderTypes | None = None, + ) -> None: + url = URL(url) + headers = Headers(headers) + + if url.scheme not in ("http", "https", "socks5"): + raise ValueError(f"Unknown scheme for proxy URL {url!r}") + + if url.username or url.password: + # Remove any auth credentials from the URL. + auth = (url.username, url.password) + url = url.copy_with(username=None, password=None) + + self.url = url + self.auth = auth + self.headers = headers + self.ssl_context = ssl_context + + @property + def raw_auth(self) -> tuple[bytes, bytes] | None: + # The proxy authentication as raw bytes. + return ( + None + if self.auth is None + else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8")) + ) + + def __repr__(self) -> str: + # The authentication is represented with the password component masked. + auth = (self.auth[0], "********") if self.auth else None + + # Build a nice concise representation. + url_str = f"{str(self.url)!r}" + auth_str = f", auth={auth!r}" if auth else "" + headers_str = f", headers={dict(self.headers)!r}" if self.headers else "" + return f"Proxy({url_str}{auth_str}{headers_str})" + + +DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) +DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) +DEFAULT_MAX_REDIRECTS = 20 diff --git a/myenv/lib/python3.12/site-packages/httpx/_content.py b/myenv/lib/python3.12/site-packages/httpx/_content.py new file mode 100644 index 0000000..786699f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_content.py @@ -0,0 +1,238 @@ +from __future__ import annotations + +import inspect +import warnings +from json import dumps as json_dumps +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Iterable, + Iterator, + Mapping, +) +from urllib.parse import urlencode + +from ._exceptions import StreamClosed, StreamConsumed +from ._multipart import MultipartStream +from ._types import ( + AsyncByteStream, + RequestContent, + RequestData, + RequestFiles, + ResponseContent, + SyncByteStream, +) +from ._utils import peek_filelike_length, primitive_value_to_str + +__all__ = ["ByteStream"] + + +class ByteStream(AsyncByteStream, SyncByteStream): + def __init__(self, stream: bytes) -> None: + self._stream = stream + + def __iter__(self) -> Iterator[bytes]: + yield self._stream + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._stream + + +class IteratorByteStream(SyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: Iterable[bytes]) -> None: + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isgenerator(stream) + + def __iter__(self) -> Iterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "read"): + # File-like interfaces should use 'read' directly. + chunk = self._stream.read(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = self._stream.read(self.CHUNK_SIZE) + else: + # Otherwise iterate. + for part in self._stream: + yield part + + +class AsyncIteratorByteStream(AsyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: AsyncIterable[bytes]) -> None: + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isasyncgen(stream) + + async def __aiter__(self) -> AsyncIterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "aread"): + # File-like interfaces should use 'aread' directly. + chunk = await self._stream.aread(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = await self._stream.aread(self.CHUNK_SIZE) + else: + # Otherwise iterate. + async for part in self._stream: + yield part + + +class UnattachedStream(AsyncByteStream, SyncByteStream): + """ + If a request or response is serialized using pickle, then it is no longer + attached to a stream for I/O purposes. Any stream operations should result + in `httpx.StreamClosed`. + """ + + def __iter__(self) -> Iterator[bytes]: + raise StreamClosed() + + async def __aiter__(self) -> AsyncIterator[bytes]: + raise StreamClosed() + yield b"" # pragma: no cover + + +def encode_content( + content: str | bytes | Iterable[bytes] | AsyncIterable[bytes], +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + if isinstance(content, (bytes, str)): + body = content.encode("utf-8") if isinstance(content, str) else content + content_length = len(body) + headers = {"Content-Length": str(content_length)} if body else {} + return headers, ByteStream(body) + + elif isinstance(content, Iterable) and not isinstance(content, dict): + # `not isinstance(content, dict)` is a bit oddly specific, but it + # catches a case that's easy for users to make in error, and would + # otherwise pass through here, like any other bytes-iterable, + # because `dict` happens to be iterable. See issue #2491. + content_length_or_none = peek_filelike_length(content) + + if content_length_or_none is None: + headers = {"Transfer-Encoding": "chunked"} + else: + headers = {"Content-Length": str(content_length_or_none)} + return headers, IteratorByteStream(content) # type: ignore + + elif isinstance(content, AsyncIterable): + headers = {"Transfer-Encoding": "chunked"} + return headers, AsyncIteratorByteStream(content) + + raise TypeError(f"Unexpected type for 'content', {type(content)!r}") + + +def encode_urlencoded_data( + data: RequestData, +) -> tuple[dict[str, str], ByteStream]: + plain_data = [] + for key, value in data.items(): + if isinstance(value, (list, tuple)): + plain_data.extend([(key, primitive_value_to_str(item)) for item in value]) + else: + plain_data.append((key, primitive_value_to_str(value))) + body = urlencode(plain_data, doseq=True).encode("utf-8") + content_length = str(len(body)) + content_type = "application/x-www-form-urlencoded" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_multipart_data( + data: RequestData, files: RequestFiles, boundary: bytes | None +) -> tuple[dict[str, str], MultipartStream]: + multipart = MultipartStream(data=data, files=files, boundary=boundary) + headers = multipart.get_headers() + return headers, multipart + + +def encode_text(text: str) -> tuple[dict[str, str], ByteStream]: + body = text.encode("utf-8") + content_length = str(len(body)) + content_type = "text/plain; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_html(html: str) -> tuple[dict[str, str], ByteStream]: + body = html.encode("utf-8") + content_length = str(len(body)) + content_type = "text/html; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_json(json: Any) -> tuple[dict[str, str], ByteStream]: + body = json_dumps(json).encode("utf-8") + content_length = str(len(body)) + content_type = "application/json" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_request( + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: Any | None = None, + boundary: bytes | None = None, +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + """ + Handles encoding the given `content`, `data`, `files`, and `json`, + returning a two-tuple of (, ). + """ + if data is not None and not isinstance(data, Mapping): + # We prefer to separate `content=` + # for raw request content, and `data=
` for url encoded or + # multipart form content. + # + # However for compat with requests, we *do* still support + # `data=` usages. We deal with that case here, treating it + # as if `content=<...>` had been supplied instead. + message = "Use 'content=<...>' to upload raw bytes/text content." + warnings.warn(message, DeprecationWarning) + return encode_content(data) + + if content is not None: + return encode_content(content) + elif files: + return encode_multipart_data(data or {}, files, boundary) + elif data: + return encode_urlencoded_data(data) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") + + +def encode_response( + content: ResponseContent | None = None, + text: str | None = None, + html: str | None = None, + json: Any | None = None, +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + """ + Handles encoding the given `content`, returning a two-tuple of + (, ). + """ + if content is not None: + return encode_content(content) + elif text is not None: + return encode_text(text) + elif html is not None: + return encode_html(html) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") diff --git a/myenv/lib/python3.12/site-packages/httpx/_decoders.py b/myenv/lib/python3.12/site-packages/httpx/_decoders.py new file mode 100644 index 0000000..62f2c0b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_decoders.py @@ -0,0 +1,371 @@ +""" +Handlers for Content-Encoding. + +See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding +""" + +from __future__ import annotations + +import codecs +import io +import typing +import zlib + +from ._compat import brotli, zstd +from ._exceptions import DecodingError + + +class ContentDecoder: + def decode(self, data: bytes) -> bytes: + raise NotImplementedError() # pragma: no cover + + def flush(self) -> bytes: + raise NotImplementedError() # pragma: no cover + + +class IdentityDecoder(ContentDecoder): + """ + Handle unencoded data. + """ + + def decode(self, data: bytes) -> bytes: + return data + + def flush(self) -> bytes: + return b"" + + +class DeflateDecoder(ContentDecoder): + """ + Handle 'deflate' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.first_attempt = True + self.decompressor = zlib.decompressobj() + + def decode(self, data: bytes) -> bytes: + was_first_attempt = self.first_attempt + self.first_attempt = False + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + if was_first_attempt: + self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) + return self.decode(data) + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class GZipDecoder(ContentDecoder): + """ + Handle 'gzip' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) + + def decode(self, data: bytes) -> bytes: + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class BrotliDecoder(ContentDecoder): + """ + Handle 'brotli' decoding. + + Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ + or `pip install brotli`. See https://github.com/google/brotli + Supports both 'brotlipy' and 'Brotli' packages since they share an import + name. The top branches are for 'brotlipy' and bottom branches for 'Brotli' + """ + + def __init__(self) -> None: + if brotli is None: # pragma: no cover + raise ImportError( + "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' " + "packages have been installed. " + "Make sure to install httpx using `pip install httpx[brotli]`." + ) from None + + self.decompressor = brotli.Decompressor() + self.seen_data = False + self._decompress: typing.Callable[[bytes], bytes] + if hasattr(self.decompressor, "decompress"): + # The 'brotlicffi' package. + self._decompress = self.decompressor.decompress # pragma: no cover + else: + # The 'brotli' package. + self._decompress = self.decompressor.process # pragma: no cover + + def decode(self, data: bytes) -> bytes: + if not data: + return b"" + self.seen_data = True + try: + return self._decompress(data) + except brotli.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + if not self.seen_data: + return b"" + try: + if hasattr(self.decompressor, "finish"): + # Only available in the 'brotlicffi' package. + + # As the decompressor decompresses eagerly, this + # will never actually emit any data. However, it will potentially throw + # errors if a truncated or damaged data stream has been used. + self.decompressor.finish() # pragma: no cover + return b"" + except brotli.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class ZStandardDecoder(ContentDecoder): + """ + Handle 'zstd' RFC 8878 decoding. + + Requires `pip install zstandard`. + Can be installed as a dependency of httpx using `pip install httpx[zstd]`. + """ + + # inspired by the ZstdDecoder implementation in urllib3 + def __init__(self) -> None: + if zstd is None: # pragma: no cover + raise ImportError( + "Using 'ZStandardDecoder', ..." + "Make sure to install httpx using `pip install httpx[zstd]`." + ) from None + + self.decompressor = zstd.ZstdDecompressor().decompressobj() + + def decode(self, data: bytes) -> bytes: + assert zstd is not None + output = io.BytesIO() + try: + output.write(self.decompressor.decompress(data)) + while self.decompressor.eof and self.decompressor.unused_data: + unused_data = self.decompressor.unused_data + self.decompressor = zstd.ZstdDecompressor().decompressobj() + output.write(self.decompressor.decompress(unused_data)) + except zstd.ZstdError as exc: + raise DecodingError(str(exc)) from exc + return output.getvalue() + + def flush(self) -> bytes: + ret = self.decompressor.flush() # note: this is a no-op + if not self.decompressor.eof: + raise DecodingError("Zstandard data is incomplete") # pragma: no cover + return bytes(ret) + + +class MultiDecoder(ContentDecoder): + """ + Handle the case where multiple encodings have been applied. + """ + + def __init__(self, children: typing.Sequence[ContentDecoder]) -> None: + """ + 'children' should be a sequence of decoders in the order in which + each was applied. + """ + # Note that we reverse the order for decoding. + self.children = list(reversed(children)) + + def decode(self, data: bytes) -> bytes: + for child in self.children: + data = child.decode(data) + return data + + def flush(self) -> bytes: + data = b"" + for child in self.children: + data = child.decode(data) + child.flush() + return data + + +class ByteChunker: + """ + Handles returning byte content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int | None = None) -> None: + self._buffer = io.BytesIO() + self._chunk_size = chunk_size + + def decode(self, content: bytes) -> list[bytes]: + if self._chunk_size is None: + return [content] if content else [] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> list[bytes]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextChunker: + """ + Handles returning text content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int | None = None) -> None: + self._buffer = io.StringIO() + self._chunk_size = chunk_size + + def decode(self, content: str) -> list[str]: + if self._chunk_size is None: + return [content] if content else [] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> list[str]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextDecoder: + """ + Handles incrementally decoding bytes into text + """ + + def __init__(self, encoding: str = "utf-8") -> None: + self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace") + + def decode(self, data: bytes) -> str: + return self.decoder.decode(data) + + def flush(self) -> str: + return self.decoder.decode(b"", True) + + +class LineDecoder: + """ + Handles incrementally reading lines from text. + + Has the same behaviour as the stdllib splitlines, + but handling the input iteratively. + """ + + def __init__(self) -> None: + self.buffer: list[str] = [] + self.trailing_cr: bool = False + + def decode(self, text: str) -> list[str]: + # See https://docs.python.org/3/library/stdtypes.html#str.splitlines + NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029" + + # We always push a trailing `\r` into the next decode iteration. + if self.trailing_cr: + text = "\r" + text + self.trailing_cr = False + if text.endswith("\r"): + self.trailing_cr = True + text = text[:-1] + + if not text: + # NOTE: the edge case input of empty text doesn't occur in practice, + # because other httpx internals filter out this value + return [] # pragma: no cover + + trailing_newline = text[-1] in NEWLINE_CHARS + lines = text.splitlines() + + if len(lines) == 1 and not trailing_newline: + # No new lines, buffer the input and continue. + self.buffer.append(lines[0]) + return [] + + if self.buffer: + # Include any existing buffer in the first portion of the + # splitlines result. + lines = ["".join(self.buffer) + lines[0]] + lines[1:] + self.buffer = [] + + if not trailing_newline: + # If the last segment of splitlines is not newline terminated, + # then drop it from our output and start a new buffer. + self.buffer = [lines.pop()] + + return lines + + def flush(self) -> list[str]: + if not self.buffer and not self.trailing_cr: + return [] + + lines = ["".join(self.buffer)] + self.buffer = [] + self.trailing_cr = False + return lines + + +SUPPORTED_DECODERS = { + "identity": IdentityDecoder, + "gzip": GZipDecoder, + "deflate": DeflateDecoder, + "br": BrotliDecoder, + "zstd": ZStandardDecoder, +} + + +if brotli is None: + SUPPORTED_DECODERS.pop("br") # pragma: no cover +if zstd is None: + SUPPORTED_DECODERS.pop("zstd") # pragma: no cover diff --git a/myenv/lib/python3.12/site-packages/httpx/_exceptions.py b/myenv/lib/python3.12/site-packages/httpx/_exceptions.py new file mode 100644 index 0000000..77f45a6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_exceptions.py @@ -0,0 +1,379 @@ +""" +Our exception hierarchy: + +* HTTPError + x RequestError + + TransportError + - TimeoutException + · ConnectTimeout + · ReadTimeout + · WriteTimeout + · PoolTimeout + - NetworkError + · ConnectError + · ReadError + · WriteError + · CloseError + - ProtocolError + · LocalProtocolError + · RemoteProtocolError + - ProxyError + - UnsupportedProtocol + + DecodingError + + TooManyRedirects + x HTTPStatusError +* InvalidURL +* CookieConflict +* StreamError + x StreamConsumed + x StreamClosed + x ResponseNotRead + x RequestNotRead +""" + +from __future__ import annotations + +import contextlib +import typing + +if typing.TYPE_CHECKING: + from ._models import Request, Response # pragma: no cover + +__all__ = [ + "CloseError", + "ConnectError", + "ConnectTimeout", + "CookieConflict", + "DecodingError", + "HTTPError", + "HTTPStatusError", + "InvalidURL", + "LocalProtocolError", + "NetworkError", + "PoolTimeout", + "ProtocolError", + "ProxyError", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "RequestError", + "RequestNotRead", + "ResponseNotRead", + "StreamClosed", + "StreamConsumed", + "StreamError", + "TimeoutException", + "TooManyRedirects", + "TransportError", + "UnsupportedProtocol", + "WriteError", + "WriteTimeout", +] + + +class HTTPError(Exception): + """ + Base class for `RequestError` and `HTTPStatusError`. + + Useful for `try...except` blocks when issuing a request, + and then calling `.raise_for_status()`. + + For example: + + ``` + try: + response = httpx.get("https://www.example.com") + response.raise_for_status() + except httpx.HTTPError as exc: + print(f"HTTP Exception for {exc.request.url} - {exc}") + ``` + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + self._request: Request | None = None + + @property + def request(self) -> Request: + if self._request is None: + raise RuntimeError("The .request property has not been set.") + return self._request + + @request.setter + def request(self, request: Request) -> None: + self._request = request + + +class RequestError(HTTPError): + """ + Base class for all exceptions that may occur when issuing a `.request()`. + """ + + def __init__(self, message: str, *, request: Request | None = None) -> None: + super().__init__(message) + # At the point an exception is raised we won't typically have a request + # instance to associate it with. + # + # The 'request_context' context manager is used within the Client and + # Response methods in order to ensure that any raised exceptions + # have a `.request` property set on them. + self._request = request + + +class TransportError(RequestError): + """ + Base class for all exceptions that occur at the level of the Transport API. + """ + + +# Timeout exceptions... + + +class TimeoutException(TransportError): + """ + The base class for timeout errors. + + An operation has timed out. + """ + + +class ConnectTimeout(TimeoutException): + """ + Timed out while connecting to the host. + """ + + +class ReadTimeout(TimeoutException): + """ + Timed out while receiving data from the host. + """ + + +class WriteTimeout(TimeoutException): + """ + Timed out while sending data to the host. + """ + + +class PoolTimeout(TimeoutException): + """ + Timed out waiting to acquire a connection from the pool. + """ + + +# Core networking exceptions... + + +class NetworkError(TransportError): + """ + The base class for network-related errors. + + An error occurred while interacting with the network. + """ + + +class ReadError(NetworkError): + """ + Failed to receive data from the network. + """ + + +class WriteError(NetworkError): + """ + Failed to send data through the network. + """ + + +class ConnectError(NetworkError): + """ + Failed to establish a connection. + """ + + +class CloseError(NetworkError): + """ + Failed to close a connection. + """ + + +# Other transport exceptions... + + +class ProxyError(TransportError): + """ + An error occurred while establishing a proxy connection. + """ + + +class UnsupportedProtocol(TransportError): + """ + Attempted to make a request to an unsupported protocol. + + For example issuing a request to `ftp://www.example.com`. + """ + + +class ProtocolError(TransportError): + """ + The protocol was violated. + """ + + +class LocalProtocolError(ProtocolError): + """ + A protocol was violated by the client. + + For example if the user instantiated a `Request` instance explicitly, + failed to include the mandatory `Host:` header, and then issued it directly + using `client.send()`. + """ + + +class RemoteProtocolError(ProtocolError): + """ + The protocol was violated by the server. + + For example, returning malformed HTTP. + """ + + +# Other request exceptions... + + +class DecodingError(RequestError): + """ + Decoding of the response failed, due to a malformed encoding. + """ + + +class TooManyRedirects(RequestError): + """ + Too many redirects. + """ + + +# Client errors + + +class HTTPStatusError(HTTPError): + """ + The response had an error HTTP status of 4xx or 5xx. + + May be raised when calling `response.raise_for_status()` + """ + + def __init__(self, message: str, *, request: Request, response: Response) -> None: + super().__init__(message) + self.request = request + self.response = response + + +class InvalidURL(Exception): + """ + URL is improperly formed or cannot be parsed. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class CookieConflict(Exception): + """ + Attempted to lookup a cookie by name, but multiple cookies existed. + + Can occur when calling `response.cookies.get(...)`. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +# Stream exceptions... + +# These may occur as the result of a programming error, by accessing +# the request/response stream in an invalid manner. + + +class StreamError(RuntimeError): + """ + The base class for stream exceptions. + + The developer made an error in accessing the request stream in + an invalid way. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class StreamConsumed(StreamError): + """ + Attempted to read or stream content, but the content has already + been streamed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. For requests, this could be due to passing " + "a generator as request content, and then receiving a redirect " + "response or a secondary request as part of an authentication flow." + "For responses, this could be due to attempting to stream the response " + "content more than once." + ) + super().__init__(message) + + +class StreamClosed(StreamError): + """ + Attempted to read or stream response content, but the request has been + closed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream content, but the stream has " "been closed." + ) + super().__init__(message) + + +class ResponseNotRead(StreamError): + """ + Attempted to access streaming response content, without having called `read()`. + """ + + def __init__(self) -> None: + message = ( + "Attempted to access streaming response content," + " without having called `read()`." + ) + super().__init__(message) + + +class RequestNotRead(StreamError): + """ + Attempted to access streaming request content, without having called `read()`. + """ + + def __init__(self) -> None: + message = ( + "Attempted to access streaming request content," + " without having called `read()`." + ) + super().__init__(message) + + +@contextlib.contextmanager +def request_context( + request: Request | None = None, +) -> typing.Iterator[None]: + """ + A context manager that can be used to attach the given request context + to any `RequestError` exceptions that are raised within the block. + """ + try: + yield + except RequestError as exc: + if request is not None: + exc.request = request + raise exc diff --git a/myenv/lib/python3.12/site-packages/httpx/_main.py b/myenv/lib/python3.12/site-packages/httpx/_main.py new file mode 100644 index 0000000..72657f8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_main.py @@ -0,0 +1,509 @@ +from __future__ import annotations + +import functools +import json +import sys +import typing + +import click +import httpcore +import pygments.lexers +import pygments.util +import rich.console +import rich.markup +import rich.progress +import rich.syntax +import rich.table + +from ._client import Client +from ._exceptions import RequestError +from ._models import Response +from ._status_codes import codes + + +def print_help() -> None: + console = rich.console.Console() + + console.print("[bold]HTTPX :butterfly:", justify="center") + console.print() + console.print("A next generation HTTP client.", justify="center") + console.print() + console.print( + "Usage: [bold]httpx[/bold] [cyan] [OPTIONS][/cyan] ", justify="left" + ) + console.print() + + table = rich.table.Table.grid(padding=1, pad_edge=True) + table.add_column("Parameter", no_wrap=True, justify="left", style="bold") + table.add_column("Description") + table.add_row( + "-m, --method [cyan]METHOD", + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n" + "[Default: GET, or POST if a request body is included]", + ) + table.add_row( + "-p, --params [cyan] ...", + "Query parameters to include in the request URL.", + ) + table.add_row( + "-c, --content [cyan]TEXT", "Byte content to include in the request body." + ) + table.add_row( + "-d, --data [cyan] ...", "Form data to include in the request body." + ) + table.add_row( + "-f, --files [cyan] ...", + "Form files to include in the request body.", + ) + table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.") + table.add_row( + "-h, --headers [cyan] ...", + "Include additional HTTP headers in the request.", + ) + table.add_row( + "--cookies [cyan] ...", "Cookies to include in the request." + ) + table.add_row( + "--auth [cyan]", + "Username and password to include in the request. Specify '-' for the password" + " to use a password prompt. Note that using --verbose/-v will expose" + " the Authorization header, including the password encoding" + " in a trivially reversible format.", + ) + + table.add_row( + "--proxy [cyan]URL", + "Send the request via a proxy. Should be the URL giving the proxy address.", + ) + + table.add_row( + "--timeout [cyan]FLOAT", + "Timeout value to use for network operations, such as establishing the" + " connection, reading some data, etc... [Default: 5.0]", + ) + + table.add_row("--follow-redirects", "Automatically follow redirects.") + table.add_row("--no-verify", "Disable SSL verification.") + table.add_row( + "--http2", "Send the request using HTTP/2, if the remote server supports it." + ) + + table.add_row( + "--download [cyan]FILE", + "Save the response content as a file, rather than displaying it.", + ) + + table.add_row("-v, --verbose", "Verbose output. Show request as well as response.") + table.add_row("--help", "Show this message and exit.") + console.print(table) + + +def get_lexer_for_response(response: Response) -> str: + content_type = response.headers.get("Content-Type") + if content_type is not None: + mime_type, _, _ = content_type.partition(";") + try: + return typing.cast( + str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name + ) + except pygments.util.ClassNotFound: # pragma: no cover + pass + return "" # pragma: no cover + + +def format_request_headers(request: httpcore.Request, http2: bool = False) -> str: + version = "HTTP/2" if http2 else "HTTP/1.1" + headers = [ + (name.lower() if http2 else name, value) for name, value in request.headers + ] + method = request.method.decode("ascii") + target = request.url.target.decode("ascii") + lines = [f"{method} {target} {version}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def format_response_headers( + http_version: bytes, + status: int, + reason_phrase: bytes | None, + headers: list[tuple[bytes, bytes]], +) -> str: + version = http_version.decode("ascii") + reason = ( + codes.get_reason_phrase(status) + if reason_phrase is None + else reason_phrase.decode("ascii") + ) + lines = [f"{version} {status} {reason}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def print_request_headers(request: httpcore.Request, http2: bool = False) -> None: + console = rich.console.Console() + http_text = format_request_headers(request, http2=http2) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response_headers( + http_version: bytes, + status: int, + reason_phrase: bytes | None, + headers: list[tuple[bytes, bytes]], +) -> None: + console = rich.console.Console() + http_text = format_response_headers(http_version, status, reason_phrase, headers) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response(response: Response) -> None: + console = rich.console.Console() + lexer_name = get_lexer_for_response(response) + if lexer_name: + if lexer_name.lower() == "json": + try: + data = response.json() + text = json.dumps(data, indent=4) + except ValueError: # pragma: no cover + text = response.text + else: + text = response.text + + syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True) + console.print(syntax) + else: + console.print(f"<{len(response.content)} bytes of binary data>") + + +_PCTRTT = typing.Tuple[typing.Tuple[str, str], ...] +_PCTRTTT = typing.Tuple[_PCTRTT, ...] +_PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]] + + +def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover + lines = [] + for key, value in cert.items(): + if isinstance(value, (list, tuple)): + lines.append(f"* {key}:") + for item in value: + if key in ("subject", "issuer"): + for sub_item in item: + lines.append(f"* {sub_item[0]}: {sub_item[1]!r}") + elif isinstance(item, tuple) and len(item) == 2: + lines.append(f"* {item[0]}: {item[1]!r}") + else: + lines.append(f"* {item!r}") + else: + lines.append(f"* {key}: {value!r}") + return "\n".join(lines) + + +def trace( + name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False +) -> None: + console = rich.console.Console() + if name == "connection.connect_tcp.started" and verbose: + host = info["host"] + console.print(f"* Connecting to {host!r}") + elif name == "connection.connect_tcp.complete" and verbose: + stream = info["return_value"] + server_addr = stream.get_extra_info("server_addr") + console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}") + elif name == "connection.start_tls.complete" and verbose: # pragma: no cover + stream = info["return_value"] + ssl_object = stream.get_extra_info("ssl_object") + version = ssl_object.version() + cipher = ssl_object.cipher() + server_cert = ssl_object.getpeercert() + alpn = ssl_object.selected_alpn_protocol() + console.print(f"* SSL established using {version!r} / {cipher[0]!r}") + console.print(f"* Selected ALPN protocol: {alpn!r}") + if server_cert: + console.print("* Server certificate:") + console.print(format_certificate(server_cert)) + elif name == "http11.send_request_headers.started" and verbose: + request = info["request"] + print_request_headers(request, http2=False) + elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover + request = info["request"] + print_request_headers(request, http2=True) + elif name == "http11.receive_response_headers.complete": + http_version, status, reason_phrase, headers = info["return_value"] + print_response_headers(http_version, status, reason_phrase, headers) + elif name == "http2.receive_response_headers.complete": # pragma: no cover + status, headers = info["return_value"] + http_version = b"HTTP/2" + reason_phrase = None + print_response_headers(http_version, status, reason_phrase, headers) + + +def download_response(response: Response, download: typing.BinaryIO) -> None: + console = rich.console.Console() + console.print() + content_length = response.headers.get("Content-Length") + with rich.progress.Progress( + "[progress.description]{task.description}", + "[progress.percentage]{task.percentage:>3.0f}%", + rich.progress.BarColumn(bar_width=None), + rich.progress.DownloadColumn(), + rich.progress.TransferSpeedColumn(), + ) as progress: + description = f"Downloading [bold]{rich.markup.escape(download.name)}" + download_task = progress.add_task( + description, + total=int(content_length or 0), + start=content_length is not None, + ) + for chunk in response.iter_bytes(): + download.write(chunk) + progress.update(download_task, completed=response.num_bytes_downloaded) + + +def validate_json( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> typing.Any: + if value is None: + return None + + try: + return json.loads(value) + except json.JSONDecodeError: # pragma: no cover + raise click.BadParameter("Not valid JSON") + + +def validate_auth( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> typing.Any: + if value == (None, None): + return None + + username, password = value + if password == "-": # pragma: no cover + password = click.prompt("Password", hide_input=True) + return (username, password) + + +def handle_help( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> None: + if not value or ctx.resilient_parsing: + return + + print_help() + ctx.exit() + + +@click.command(add_help_option=False) +@click.argument("url", type=str) +@click.option( + "--method", + "-m", + "method", + type=str, + help=( + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. " + "[Default: GET, or POST if a request body is included]" + ), +) +@click.option( + "--params", + "-p", + "params", + type=(str, str), + multiple=True, + help="Query parameters to include in the request URL.", +) +@click.option( + "--content", + "-c", + "content", + type=str, + help="Byte content to include in the request body.", +) +@click.option( + "--data", + "-d", + "data", + type=(str, str), + multiple=True, + help="Form data to include in the request body.", +) +@click.option( + "--files", + "-f", + "files", + type=(str, click.File(mode="rb")), + multiple=True, + help="Form files to include in the request body.", +) +@click.option( + "--json", + "-j", + "json", + type=str, + callback=validate_json, + help="JSON data to include in the request body.", +) +@click.option( + "--headers", + "-h", + "headers", + type=(str, str), + multiple=True, + help="Include additional HTTP headers in the request.", +) +@click.option( + "--cookies", + "cookies", + type=(str, str), + multiple=True, + help="Cookies to include in the request.", +) +@click.option( + "--auth", + "auth", + type=(str, str), + default=(None, None), + callback=validate_auth, + help=( + "Username and password to include in the request. " + "Specify '-' for the password to use a password prompt. " + "Note that using --verbose/-v will expose the Authorization header, " + "including the password encoding in a trivially reversible format." + ), +) +@click.option( + "--proxy", + "proxy", + type=str, + default=None, + help="Send the request via a proxy. Should be the URL giving the proxy address.", +) +@click.option( + "--timeout", + "timeout", + type=float, + default=5.0, + help=( + "Timeout value to use for network operations, such as establishing the " + "connection, reading some data, etc... [Default: 5.0]" + ), +) +@click.option( + "--follow-redirects", + "follow_redirects", + is_flag=True, + default=False, + help="Automatically follow redirects.", +) +@click.option( + "--no-verify", + "verify", + is_flag=True, + default=True, + help="Disable SSL verification.", +) +@click.option( + "--http2", + "http2", + type=bool, + is_flag=True, + default=False, + help="Send the request using HTTP/2, if the remote server supports it.", +) +@click.option( + "--download", + type=click.File("wb"), + help="Save the response content as a file, rather than displaying it.", +) +@click.option( + "--verbose", + "-v", + type=bool, + is_flag=True, + default=False, + help="Verbose. Show request as well as response.", +) +@click.option( + "--help", + is_flag=True, + is_eager=True, + expose_value=False, + callback=handle_help, + help="Show this message and exit.", +) +def main( + url: str, + method: str, + params: list[tuple[str, str]], + content: str, + data: list[tuple[str, str]], + files: list[tuple[str, click.File]], + json: str, + headers: list[tuple[str, str]], + cookies: list[tuple[str, str]], + auth: tuple[str, str] | None, + proxy: str, + timeout: float, + follow_redirects: bool, + verify: bool, + http2: bool, + download: typing.BinaryIO | None, + verbose: bool, +) -> None: + """ + An HTTP command line client. + Sends a request and displays the response. + """ + if not method: + method = "POST" if content or data or files or json else "GET" + + try: + with Client( + proxy=proxy, + timeout=timeout, + verify=verify, + http2=http2, + ) as client: + with client.stream( + method, + url, + params=list(params), + content=content, + data=dict(data), + files=files, # type: ignore + json=json, + headers=headers, + cookies=dict(cookies), + auth=auth, + follow_redirects=follow_redirects, + extensions={"trace": functools.partial(trace, verbose=verbose)}, + ) as response: + if download is not None: + download_response(response, download) + else: + response.read() + if response.content: + print_response(response) + + except RequestError as exc: + console = rich.console.Console() + console.print(f"[red]{type(exc).__name__}[/red]: {exc}") + sys.exit(1) + + sys.exit(0 if response.is_success else 1) diff --git a/myenv/lib/python3.12/site-packages/httpx/_models.py b/myenv/lib/python3.12/site-packages/httpx/_models.py new file mode 100644 index 0000000..01d9583 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_models.py @@ -0,0 +1,1211 @@ +from __future__ import annotations + +import datetime +import email.message +import json as jsonlib +import typing +import urllib.request +from collections.abc import Mapping +from http.cookiejar import Cookie, CookieJar + +from ._content import ByteStream, UnattachedStream, encode_request, encode_response +from ._decoders import ( + SUPPORTED_DECODERS, + ByteChunker, + ContentDecoder, + IdentityDecoder, + LineDecoder, + MultiDecoder, + TextChunker, + TextDecoder, +) +from ._exceptions import ( + CookieConflict, + HTTPStatusError, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + request_context, +) +from ._multipart import get_multipart_boundary_from_content_type +from ._status_codes import codes +from ._types import ( + AsyncByteStream, + CookieTypes, + HeaderTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + ResponseContent, + ResponseExtensions, + SyncByteStream, +) +from ._urls import URL +from ._utils import ( + is_known_encoding, + normalize_header_key, + normalize_header_value, + obfuscate_sensitive_headers, + parse_content_type_charset, + parse_header_links, +) + +__all__ = ["Cookies", "Headers", "Request", "Response"] + + +class Headers(typing.MutableMapping[str, str]): + """ + HTTP headers, as a case-insensitive multi-dict. + """ + + def __init__( + self, + headers: HeaderTypes | None = None, + encoding: str | None = None, + ) -> None: + if headers is None: + self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] + elif isinstance(headers, Headers): + self._list = list(headers._list) + elif isinstance(headers, Mapping): + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers.items() + ] + else: + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers + ] + + self._encoding = encoding + + @property + def encoding(self) -> str: + """ + Header encoding is mandated as ascii, but we allow fallbacks to utf-8 + or iso-8859-1. + """ + if self._encoding is None: + for encoding in ["ascii", "utf-8"]: + for key, value in self.raw: + try: + key.decode(encoding) + value.decode(encoding) + except UnicodeDecodeError: + break + else: + # The else block runs if 'break' did not occur, meaning + # all values fitted the encoding. + self._encoding = encoding + break + else: + # The ISO-8859-1 encoding covers all 256 code points in a byte, + # so will never raise decode errors. + self._encoding = "iso-8859-1" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + self._encoding = value + + @property + def raw(self) -> list[tuple[bytes, bytes]]: + """ + Returns a list of the raw header items, as byte pairs. + """ + return [(raw_key, value) for raw_key, _, value in self._list] + + def keys(self) -> typing.KeysView[str]: + return {key.decode(self.encoding): None for _, key, value in self._list}.keys() + + def values(self) -> typing.ValuesView[str]: + values_dict: dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return `(key, value)` items of headers. Concatenate headers + into a single comma separated value when a key occurs multiple times. + """ + values_dict: dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.items() + + def multi_items(self) -> list[tuple[str, str]]: + """ + Return a list of `(key, value)` pairs of headers. Allow multiple + occurrences of the same key without concatenating into a single + comma separated value. + """ + return [ + (key.decode(self.encoding), value.decode(self.encoding)) + for _, key, value in self._list + ] + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Return a header value. If multiple occurrences of the header occur + then concatenate them together with commas. + """ + try: + return self[key] + except KeyError: + return default + + def get_list(self, key: str, split_commas: bool = False) -> list[str]: + """ + Return a list of all header values for a given key. + If `split_commas=True` is passed, then any comma separated header + values are split into multiple return strings. + """ + get_header_key = key.lower().encode(self.encoding) + + values = [ + item_value.decode(self.encoding) + for _, item_key, item_value in self._list + if item_key.lower() == get_header_key + ] + + if not split_commas: + return values + + split_values = [] + for value in values: + split_values.extend([item.strip() for item in value.split(",")]) + return split_values + + def update(self, headers: HeaderTypes | None = None) -> None: # type: ignore + headers = Headers(headers) + for key in headers.keys(): + if key in self: + self.pop(key) + self._list.extend(headers._list) + + def copy(self) -> Headers: + return Headers(self, encoding=self.encoding) + + def __getitem__(self, key: str) -> str: + """ + Return a single header value. + + If there are multiple headers with the same key, then we concatenate + them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2 + """ + normalized_key = key.lower().encode(self.encoding) + + items = [ + header_value.decode(self.encoding) + for _, header_key, header_value in self._list + if header_key == normalized_key + ] + + if items: + return ", ".join(items) + + raise KeyError(key) + + def __setitem__(self, key: str, value: str) -> None: + """ + Set the header `key` to `value`, removing any duplicate entries. + Retains insertion order. + """ + set_key = key.encode(self._encoding or "utf-8") + set_value = value.encode(self._encoding or "utf-8") + lookup_key = set_key.lower() + + found_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key == lookup_key + ] + + for idx in reversed(found_indexes[1:]): + del self._list[idx] + + if found_indexes: + idx = found_indexes[0] + self._list[idx] = (set_key, lookup_key, set_value) + else: + self._list.append((set_key, lookup_key, set_value)) + + def __delitem__(self, key: str) -> None: + """ + Remove the header `key`. + """ + del_key = key.lower().encode(self.encoding) + + pop_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key.lower() == del_key + ] + + if not pop_indexes: + raise KeyError(key) + + for idx in reversed(pop_indexes): + del self._list[idx] + + def __contains__(self, key: typing.Any) -> bool: + header_key = key.lower().encode(self.encoding) + return header_key in [key for _, key, _ in self._list] + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._list) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_headers = Headers(other) + except ValueError: + return False + + self_list = [(key, value) for _, key, value in self._list] + other_list = [(key, value) for _, key, value in other_headers._list] + return sorted(self_list) == sorted(other_list) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + + encoding_str = "" + if self.encoding != "ascii": + encoding_str = f", encoding={self.encoding!r}" + + as_list = list(obfuscate_sensitive_headers(self.multi_items())) + as_dict = dict(as_list) + + no_duplicate_keys = len(as_dict) == len(as_list) + if no_duplicate_keys: + return f"{class_name}({as_dict!r}{encoding_str})" + return f"{class_name}({as_list!r}{encoding_str})" + + +class Request: + def __init__( + self, + method: str | bytes, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + stream: SyncByteStream | AsyncByteStream | None = None, + extensions: RequestExtensions | None = None, + ) -> None: + self.method = ( + method.decode("ascii").upper() + if isinstance(method, bytes) + else method.upper() + ) + self.url = URL(url) + if params is not None: + self.url = self.url.copy_merge_params(params=params) + self.headers = Headers(headers) + self.extensions = {} if extensions is None else extensions + + if cookies: + Cookies(cookies).set_cookie_header(self) + + if stream is None: + content_type: str | None = self.headers.get("content-type") + headers, stream = encode_request( + content=content, + data=data, + files=files, + json=json, + boundary=get_multipart_boundary_from_content_type( + content_type=content_type.encode(self.headers.encoding) + if content_type + else None + ), + ) + self._prepare(headers) + self.stream = stream + # Load the request body, except for streaming content. + if isinstance(stream, ByteStream): + self.read() + else: + # There's an important distinction between `Request(content=...)`, + # and `Request(stream=...)`. + # + # Using `content=...` implies automatically populated `Host` and content + # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include *any* + # auto-populated headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when: + # + # * Preserving the request stream when copying requests, eg for redirects. + # * Creating request instances on the *server-side* of the transport API. + self.stream = stream + + def _prepare(self, default_headers: dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: + continue + self.headers.setdefault(key, value) + + auto_headers: list[tuple[bytes, bytes]] = [] + + has_host = "Host" in self.headers + has_content_length = ( + "Content-Length" in self.headers or "Transfer-Encoding" in self.headers + ) + + if not has_host and self.url.host: + auto_headers.append((b"Host", self.url.netloc)) + if not has_content_length and self.method in ("POST", "PUT", "PATCH"): + auto_headers.append((b"Content-Length", b"0")) + + self.headers = Headers(auto_headers + self.headers.raw) + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise RequestNotRead() + return self._content + + def read(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.Iterable) + self._content = b"".join(self.stream) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + async def aread(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.AsyncIterable) + self._content = b"".join([part async for part in self.stream]) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + url = str(self.url) + return f"<{class_name}({self.method!r}, {url!r})>" + + def __getstate__(self) -> dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream"] + } + + def __setstate__(self, state: dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.extensions = {} + self.stream = UnattachedStream() + + +class Response: + def __init__( + self, + status_code: int, + *, + headers: HeaderTypes | None = None, + content: ResponseContent | None = None, + text: str | None = None, + html: str | None = None, + json: typing.Any = None, + stream: SyncByteStream | AsyncByteStream | None = None, + request: Request | None = None, + extensions: ResponseExtensions | None = None, + history: list[Response] | None = None, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + self.status_code = status_code + self.headers = Headers(headers) + + self._request: Request | None = request + + # When follow_redirects=False and a redirect is received, + # the client will set `response.next_request`. + self.next_request: Request | None = None + + self.extensions: ResponseExtensions = {} if extensions is None else extensions + self.history = [] if history is None else list(history) + + self.is_closed = False + self.is_stream_consumed = False + + self.default_encoding = default_encoding + + if stream is None: + headers, stream = encode_response(content, text, html, json) + self._prepare(headers) + self.stream = stream + if isinstance(stream, ByteStream): + # Load the response body, except for streaming content. + self.read() + else: + # There's an important distinction between `Response(content=...)`, + # and `Response(stream=...)`. + # + # Using `content=...` implies automatically populated content headers, + # of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include any content headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when creating response instances having received a stream + # from the transport API. + self.stream = stream + + self._num_bytes_downloaded = 0 + + def _prepare(self, default_headers: dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "content-length" in self.headers: + continue + self.headers.setdefault(key, value) + + @property + def elapsed(self) -> datetime.timedelta: + """ + Returns the time taken for the complete request/response + cycle to complete. + """ + if not hasattr(self, "_elapsed"): + raise RuntimeError( + "'.elapsed' may only be accessed after the response " + "has been read or closed." + ) + return self._elapsed + + @elapsed.setter + def elapsed(self, elapsed: datetime.timedelta) -> None: + self._elapsed = elapsed + + @property + def request(self) -> Request: + """ + Returns the request instance associated to the current response. + """ + if self._request is None: + raise RuntimeError( + "The request instance has not been set on this response." + ) + return self._request + + @request.setter + def request(self, value: Request) -> None: + self._request = value + + @property + def http_version(self) -> str: + try: + http_version: bytes = self.extensions["http_version"] + except KeyError: + return "HTTP/1.1" + else: + return http_version.decode("ascii", errors="ignore") + + @property + def reason_phrase(self) -> str: + try: + reason_phrase: bytes = self.extensions["reason_phrase"] + except KeyError: + return codes.get_reason_phrase(self.status_code) + else: + return reason_phrase.decode("ascii", errors="ignore") + + @property + def url(self) -> URL: + """ + Returns the URL for which the request was made. + """ + return self.request.url + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise ResponseNotRead() + return self._content + + @property + def text(self) -> str: + if not hasattr(self, "_text"): + content = self.content + if not content: + self._text = "" + else: + decoder = TextDecoder(encoding=self.encoding or "utf-8") + self._text = "".join([decoder.decode(self.content), decoder.flush()]) + return self._text + + @property + def encoding(self) -> str | None: + """ + Return an encoding to use for decoding the byte content into text. + The priority for determining this is given by... + + * `.encoding = <>` has been set explicitly. + * The encoding as specified by the charset parameter in the Content-Type header. + * The encoding as determined by `default_encoding`, which may either be + a string like "utf-8" indicating the encoding to use, or may be a callable + which enables charset autodetection. + """ + if not hasattr(self, "_encoding"): + encoding = self.charset_encoding + if encoding is None or not is_known_encoding(encoding): + if isinstance(self.default_encoding, str): + encoding = self.default_encoding + elif hasattr(self, "_content"): + encoding = self.default_encoding(self._content) + self._encoding = encoding or "utf-8" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + """ + Set the encoding to use for decoding the byte content into text. + + If the `text` attribute has been accessed, attempting to set the + encoding will throw a ValueError. + """ + if hasattr(self, "_text"): + raise ValueError( + "Setting encoding after `text` has been accessed is not allowed." + ) + self._encoding = value + + @property + def charset_encoding(self) -> str | None: + """ + Return the encoding, as specified by the Content-Type header. + """ + content_type = self.headers.get("Content-Type") + if content_type is None: + return None + + return parse_content_type_charset(content_type) + + def _get_content_decoder(self) -> ContentDecoder: + """ + Returns a decoder instance which can be used to decode the raw byte + content, depending on the Content-Encoding used in the response. + """ + if not hasattr(self, "_decoder"): + decoders: list[ContentDecoder] = [] + values = self.headers.get_list("content-encoding", split_commas=True) + for value in values: + value = value.strip().lower() + try: + decoder_cls = SUPPORTED_DECODERS[value] + decoders.append(decoder_cls()) + except KeyError: + continue + + if len(decoders) == 1: + self._decoder = decoders[0] + elif len(decoders) > 1: + self._decoder = MultiDecoder(children=decoders) + else: + self._decoder = IdentityDecoder() + + return self._decoder + + @property + def is_informational(self) -> bool: + """ + A property which is `True` for 1xx status codes, `False` otherwise. + """ + return codes.is_informational(self.status_code) + + @property + def is_success(self) -> bool: + """ + A property which is `True` for 2xx status codes, `False` otherwise. + """ + return codes.is_success(self.status_code) + + @property + def is_redirect(self) -> bool: + """ + A property which is `True` for 3xx status codes, `False` otherwise. + + Note that not all responses with a 3xx status code indicate a URL redirect. + + Use `response.has_redirect_location` to determine responses with a properly + formed URL redirection. + """ + return codes.is_redirect(self.status_code) + + @property + def is_client_error(self) -> bool: + """ + A property which is `True` for 4xx status codes, `False` otherwise. + """ + return codes.is_client_error(self.status_code) + + @property + def is_server_error(self) -> bool: + """ + A property which is `True` for 5xx status codes, `False` otherwise. + """ + return codes.is_server_error(self.status_code) + + @property + def is_error(self) -> bool: + """ + A property which is `True` for 4xx and 5xx status codes, `False` otherwise. + """ + return codes.is_error(self.status_code) + + @property + def has_redirect_location(self) -> bool: + """ + Returns True for 3xx responses with a properly formed URL redirection, + `False` otherwise. + """ + return ( + self.status_code + in ( + # 301 (Cacheable redirect. Method may change to GET.) + codes.MOVED_PERMANENTLY, + # 302 (Uncacheable redirect. Method may change to GET.) + codes.FOUND, + # 303 (Client should make a GET or HEAD request.) + codes.SEE_OTHER, + # 307 (Equiv. 302, but retain method) + codes.TEMPORARY_REDIRECT, + # 308 (Equiv. 301, but retain method) + codes.PERMANENT_REDIRECT, + ) + and "Location" in self.headers + ) + + def raise_for_status(self) -> Response: + """ + Raise the `HTTPStatusError` if one occurred. + """ + request = self._request + if request is None: + raise RuntimeError( + "Cannot call `raise_for_status` as the request " + "instance has not been set on this response." + ) + + if self.is_success: + return self + + if self.has_redirect_location: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "Redirect location: '{0.headers[location]}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + else: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + + status_class = self.status_code // 100 + error_types = { + 1: "Informational response", + 3: "Redirect response", + 4: "Client error", + 5: "Server error", + } + error_type = error_types.get(status_class, "Invalid status code") + message = message.format(self, error_type=error_type) + raise HTTPStatusError(message, request=request, response=self) + + def json(self, **kwargs: typing.Any) -> typing.Any: + return jsonlib.loads(self.content, **kwargs) + + @property + def cookies(self) -> Cookies: + if not hasattr(self, "_cookies"): + self._cookies = Cookies() + self._cookies.extract_cookies(self) + return self._cookies + + @property + def links(self) -> dict[str | None, dict[str, str]]: + """ + Returns the parsed header links of the response, if any + """ + header = self.headers.get("link") + if header is None: + return {} + + return { + (link.get("rel") or link.get("url")): link + for link in parse_header_links(header) + } + + @property + def num_bytes_downloaded(self) -> int: + return self._num_bytes_downloaded + + def __repr__(self) -> str: + return f"" + + def __getstate__(self) -> dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream", "is_closed", "_decoder"] + } + + def __setstate__(self, state: dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.is_closed = True + self.extensions = {} + self.stream = UnattachedStream() + + def read(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join(self.iter_bytes()) + return self._content + + def iter_bytes(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, brotli, and zstd encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for raw_bytes in self.iter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + def iter_text(self, chunk_size: int | None = None) -> typing.Iterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for byte_content in self.iter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + def iter_lines(self) -> typing.Iterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + for text in self.iter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + def iter_raw(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call a sync iterator on an async stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call an sync close on an async stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + self.stream.close() + + async def aread(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_bytes()]) + return self._content + + async def aiter_bytes( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, brotli, and zstd encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for raw_bytes in self.aiter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + async def aiter_text( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for byte_content in self.aiter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + async def aiter_lines(self) -> typing.AsyncIterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + async for text in self.aiter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + async def aiter_raw( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async iterator on an sync stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + async for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + await self.aclose() + + async def aclose(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async close on an sync stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + await self.stream.aclose() + + +class Cookies(typing.MutableMapping[str, str]): + """ + HTTP Cookies, as a mutable mapping. + """ + + def __init__(self, cookies: CookieTypes | None = None) -> None: + if cookies is None or isinstance(cookies, dict): + self.jar = CookieJar() + if isinstance(cookies, dict): + for key, value in cookies.items(): + self.set(key, value) + elif isinstance(cookies, list): + self.jar = CookieJar() + for key, value in cookies: + self.set(key, value) + elif isinstance(cookies, Cookies): + self.jar = CookieJar() + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + else: + self.jar = cookies + + def extract_cookies(self, response: Response) -> None: + """ + Loads any cookies based on the response `Set-Cookie` headers. + """ + urllib_response = self._CookieCompatResponse(response) + urllib_request = self._CookieCompatRequest(response.request) + + self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore + + def set_cookie_header(self, request: Request) -> None: + """ + Sets an appropriate 'Cookie:' HTTP header on the `Request`. + """ + urllib_request = self._CookieCompatRequest(request) + self.jar.add_cookie_header(urllib_request) + + def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: + """ + Set a cookie value by name. May optionally include domain and path. + """ + kwargs = { + "version": 0, + "name": name, + "value": value, + "port": None, + "port_specified": False, + "domain": domain, + "domain_specified": bool(domain), + "domain_initial_dot": domain.startswith("."), + "path": path, + "path_specified": bool(path), + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + cookie = Cookie(**kwargs) # type: ignore + self.jar.set_cookie(cookie) + + def get( # type: ignore + self, + name: str, + default: str | None = None, + domain: str | None = None, + path: str | None = None, + ) -> str | None: + """ + Get a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to retrieve. + """ + value = None + for cookie in self.jar: + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if value is not None: + message = f"Multiple cookies exist with name={name}" + raise CookieConflict(message) + value = cookie.value + + if value is None: + return default + return value + + def delete( + self, + name: str, + domain: str | None = None, + path: str | None = None, + ) -> None: + """ + Delete a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to delete. + """ + if domain is not None and path is not None: + return self.jar.clear(domain, path, name) + + remove = [ + cookie + for cookie in self.jar + if cookie.name == name + and (domain is None or cookie.domain == domain) + and (path is None or cookie.path == path) + ] + + for cookie in remove: + self.jar.clear(cookie.domain, cookie.path, cookie.name) + + def clear(self, domain: str | None = None, path: str | None = None) -> None: + """ + Delete all cookies. Optionally include a domain and path in + order to only delete a subset of all the cookies. + """ + args = [] + if domain is not None: + args.append(domain) + if path is not None: + assert domain is not None + args.append(path) + self.jar.clear(*args) + + def update(self, cookies: CookieTypes | None = None) -> None: # type: ignore + cookies = Cookies(cookies) + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + + def __setitem__(self, name: str, value: str) -> None: + return self.set(name, value) + + def __getitem__(self, name: str) -> str: + value = self.get(name) + if value is None: + raise KeyError(name) + return value + + def __delitem__(self, name: str) -> None: + return self.delete(name) + + def __len__(self) -> int: + return len(self.jar) + + def __iter__(self) -> typing.Iterator[str]: + return (cookie.name for cookie in self.jar) + + def __bool__(self) -> bool: + for _ in self.jar: + return True + return False + + def __repr__(self) -> str: + cookies_repr = ", ".join( + [ + f"" + for cookie in self.jar + ] + ) + + return f"" + + class _CookieCompatRequest(urllib.request.Request): + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, request: Request) -> None: + super().__init__( + url=str(request.url), + headers=dict(request.headers), + method=request.method, + ) + self.request = request + + def add_unredirected_header(self, key: str, value: str) -> None: + super().add_unredirected_header(key, value) + self.request.headers[key] = value + + class _CookieCompatResponse: + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, response: Response) -> None: + self.response = response + + def info(self) -> email.message.Message: + info = email.message.Message() + for key, value in self.response.headers.multi_items(): + # Note that setting `info[key]` here is an "append" operation, + # not a "replace" operation. + # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__ + info[key] = value + return info diff --git a/myenv/lib/python3.12/site-packages/httpx/_multipart.py b/myenv/lib/python3.12/site-packages/httpx/_multipart.py new file mode 100644 index 0000000..8edb622 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_multipart.py @@ -0,0 +1,269 @@ +from __future__ import annotations + +import io +import os +import typing +from pathlib import Path + +from ._types import ( + AsyncByteStream, + FileContent, + FileTypes, + RequestData, + RequestFiles, + SyncByteStream, +) +from ._utils import ( + format_form_param, + guess_content_type, + peek_filelike_length, + primitive_value_to_str, + to_bytes, +) + + +def get_multipart_boundary_from_content_type( + content_type: bytes | None, +) -> bytes | None: + if not content_type or not content_type.startswith(b"multipart/form-data"): + return None + # parse boundary according to + # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1 + if b";" in content_type: + for section in content_type.split(b";"): + if section.strip().lower().startswith(b"boundary="): + return section.strip()[len(b"boundary=") :].strip(b'"') + return None + + +class DataField: + """ + A single form field item, within a multipart form field. + """ + + def __init__(self, name: str, value: str | bytes | int | float | None) -> None: + if not isinstance(name, str): + raise TypeError( + f"Invalid type for name. Expected str, got {type(name)}: {name!r}" + ) + if value is not None and not isinstance(value, (str, bytes, int, float)): + raise TypeError( + "Invalid type for value. Expected primitive type," + f" got {type(value)}: {value!r}" + ) + self.name = name + self.value: str | bytes = ( + value if isinstance(value, bytes) else primitive_value_to_str(value) + ) + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + name = format_form_param("name", self.name) + self._headers = b"".join( + [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"] + ) + + return self._headers + + def render_data(self) -> bytes: + if not hasattr(self, "_data"): + self._data = to_bytes(self.value) + + return self._data + + def get_length(self) -> int: + headers = self.render_headers() + data = self.render_data() + return len(headers) + len(data) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield self.render_data() + + +class FileField: + """ + A single file field item, within a multipart form field. + """ + + CHUNK_SIZE = 64 * 1024 + + def __init__(self, name: str, value: FileTypes) -> None: + self.name = name + + fileobj: FileContent + + headers: dict[str, str] = {} + content_type: str | None = None + + # This large tuple based API largely mirror's requests' API + # It would be good to think of better APIs for this that we could + # include in httpx 2.0 since variable length tuples(especially of 4 elements) + # are quite unwieldly + if isinstance(value, tuple): + if len(value) == 2: + # neither the 3rd parameter (content_type) nor the 4th (headers) + # was included + filename, fileobj = value + elif len(value) == 3: + filename, fileobj, content_type = value + else: + # all 4 parameters included + filename, fileobj, content_type, headers = value # type: ignore + else: + filename = Path(str(getattr(value, "name", "upload"))).name + fileobj = value + + if content_type is None: + content_type = guess_content_type(filename) + + has_content_type_header = any("content-type" in key.lower() for key in headers) + if content_type is not None and not has_content_type_header: + # note that unlike requests, we ignore the content_type provided in the 3rd + # tuple element if it is also included in the headers requests does + # the opposite (it overwrites the headerwith the 3rd tuple element) + headers["Content-Type"] = content_type + + if isinstance(fileobj, io.StringIO): + raise TypeError( + "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'." + ) + if isinstance(fileobj, io.TextIOBase): + raise TypeError( + "Multipart file uploads must be opened in binary mode, not text mode." + ) + + self.filename = filename + self.file = fileobj + self.headers = headers + + def get_length(self) -> int | None: + headers = self.render_headers() + + if isinstance(self.file, (str, bytes)): + return len(headers) + len(to_bytes(self.file)) + + file_length = peek_filelike_length(self.file) + + # If we can't determine the filesize without reading it into memory, + # then return `None` here, to indicate an unknown file length. + if file_length is None: + return None + + return len(headers) + file_length + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + parts = [ + b"Content-Disposition: form-data; ", + format_form_param("name", self.name), + ] + if self.filename: + filename = format_form_param("filename", self.filename) + parts.extend([b"; ", filename]) + for header_name, header_value in self.headers.items(): + key, val = f"\r\n{header_name}: ".encode(), header_value.encode() + parts.extend([key, val]) + parts.append(b"\r\n\r\n") + self._headers = b"".join(parts) + + return self._headers + + def render_data(self) -> typing.Iterator[bytes]: + if isinstance(self.file, (str, bytes)): + yield to_bytes(self.file) + return + + if hasattr(self.file, "seek"): + try: + self.file.seek(0) + except io.UnsupportedOperation: + pass + + chunk = self.file.read(self.CHUNK_SIZE) + while chunk: + yield to_bytes(chunk) + chunk = self.file.read(self.CHUNK_SIZE) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield from self.render_data() + + +class MultipartStream(SyncByteStream, AsyncByteStream): + """ + Request content as streaming multipart encoded form data. + """ + + def __init__( + self, + data: RequestData, + files: RequestFiles, + boundary: bytes | None = None, + ) -> None: + if boundary is None: + boundary = os.urandom(16).hex().encode("ascii") + + self.boundary = boundary + self.content_type = "multipart/form-data; boundary=%s" % boundary.decode( + "ascii" + ) + self.fields = list(self._iter_fields(data, files)) + + def _iter_fields( + self, data: RequestData, files: RequestFiles + ) -> typing.Iterator[FileField | DataField]: + for name, value in data.items(): + if isinstance(value, (tuple, list)): + for item in value: + yield DataField(name=name, value=item) + else: + yield DataField(name=name, value=value) + + file_items = files.items() if isinstance(files, typing.Mapping) else files + for name, value in file_items: + yield FileField(name=name, value=value) + + def iter_chunks(self) -> typing.Iterator[bytes]: + for field in self.fields: + yield b"--%s\r\n" % self.boundary + yield from field.render() + yield b"\r\n" + yield b"--%s--\r\n" % self.boundary + + def get_content_length(self) -> int | None: + """ + Return the length of the multipart encoded content, or `None` if + any of the files have a length that cannot be determined upfront. + """ + boundary_length = len(self.boundary) + length = 0 + + for field in self.fields: + field_length = field.get_length() + if field_length is None: + return None + + length += 2 + boundary_length + 2 # b"--{boundary}\r\n" + length += field_length + length += 2 # b"\r\n" + + length += 2 + boundary_length + 4 # b"--{boundary}--\r\n" + return length + + # Content stream interface. + + def get_headers(self) -> dict[str, str]: + content_length = self.get_content_length() + content_type = self.content_type + if content_length is None: + return {"Transfer-Encoding": "chunked", "Content-Type": content_type} + return {"Content-Length": str(content_length), "Content-Type": content_type} + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk diff --git a/myenv/lib/python3.12/site-packages/httpx/_status_codes.py b/myenv/lib/python3.12/site-packages/httpx/_status_codes.py new file mode 100644 index 0000000..133a623 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_status_codes.py @@ -0,0 +1,162 @@ +from __future__ import annotations + +from enum import IntEnum + +__all__ = ["codes"] + + +class codes(IntEnum): + """HTTP status codes and reason phrases + + Status codes from the following RFCs are all observed: + + * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 + * RFC 6585: Additional HTTP Status Codes + * RFC 3229: Delta encoding in HTTP + * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 + * RFC 5842: Binding Extensions to WebDAV + * RFC 7238: Permanent Redirect + * RFC 2295: Transparent Content Negotiation in HTTP + * RFC 2774: An HTTP Extension Framework + * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) + * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) + * RFC 7725: An HTTP Status Code to Report Legal Obstacles + * RFC 8297: An HTTP Status Code for Indicating Hints + * RFC 8470: Using Early Data in HTTP + """ + + def __new__(cls, value: int, phrase: str = "") -> codes: + obj = int.__new__(cls, value) + obj._value_ = value + + obj.phrase = phrase # type: ignore[attr-defined] + return obj + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def get_reason_phrase(cls, value: int) -> str: + try: + return codes(value).phrase # type: ignore + except ValueError: + return "" + + @classmethod + def is_informational(cls, value: int) -> bool: + """ + Returns `True` for 1xx status codes, `False` otherwise. + """ + return 100 <= value <= 199 + + @classmethod + def is_success(cls, value: int) -> bool: + """ + Returns `True` for 2xx status codes, `False` otherwise. + """ + return 200 <= value <= 299 + + @classmethod + def is_redirect(cls, value: int) -> bool: + """ + Returns `True` for 3xx status codes, `False` otherwise. + """ + return 300 <= value <= 399 + + @classmethod + def is_client_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx status codes, `False` otherwise. + """ + return 400 <= value <= 499 + + @classmethod + def is_server_error(cls, value: int) -> bool: + """ + Returns `True` for 5xx status codes, `False` otherwise. + """ + return 500 <= value <= 599 + + @classmethod + def is_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx or 5xx status codes, `False` otherwise. + """ + return 400 <= value <= 599 + + # informational + CONTINUE = 100, "Continue" + SWITCHING_PROTOCOLS = 101, "Switching Protocols" + PROCESSING = 102, "Processing" + EARLY_HINTS = 103, "Early Hints" + + # success + OK = 200, "OK" + CREATED = 201, "Created" + ACCEPTED = 202, "Accepted" + NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" + NO_CONTENT = 204, "No Content" + RESET_CONTENT = 205, "Reset Content" + PARTIAL_CONTENT = 206, "Partial Content" + MULTI_STATUS = 207, "Multi-Status" + ALREADY_REPORTED = 208, "Already Reported" + IM_USED = 226, "IM Used" + + # redirection + MULTIPLE_CHOICES = 300, "Multiple Choices" + MOVED_PERMANENTLY = 301, "Moved Permanently" + FOUND = 302, "Found" + SEE_OTHER = 303, "See Other" + NOT_MODIFIED = 304, "Not Modified" + USE_PROXY = 305, "Use Proxy" + TEMPORARY_REDIRECT = 307, "Temporary Redirect" + PERMANENT_REDIRECT = 308, "Permanent Redirect" + + # client error + BAD_REQUEST = 400, "Bad Request" + UNAUTHORIZED = 401, "Unauthorized" + PAYMENT_REQUIRED = 402, "Payment Required" + FORBIDDEN = 403, "Forbidden" + NOT_FOUND = 404, "Not Found" + METHOD_NOT_ALLOWED = 405, "Method Not Allowed" + NOT_ACCEPTABLE = 406, "Not Acceptable" + PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" + REQUEST_TIMEOUT = 408, "Request Timeout" + CONFLICT = 409, "Conflict" + GONE = 410, "Gone" + LENGTH_REQUIRED = 411, "Length Required" + PRECONDITION_FAILED = 412, "Precondition Failed" + REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" + REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" + UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" + REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" + EXPECTATION_FAILED = 417, "Expectation Failed" + IM_A_TEAPOT = 418, "I'm a teapot" + MISDIRECTED_REQUEST = 421, "Misdirected Request" + UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" + LOCKED = 423, "Locked" + FAILED_DEPENDENCY = 424, "Failed Dependency" + TOO_EARLY = 425, "Too Early" + UPGRADE_REQUIRED = 426, "Upgrade Required" + PRECONDITION_REQUIRED = 428, "Precondition Required" + TOO_MANY_REQUESTS = 429, "Too Many Requests" + REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" + UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons" + + # server errors + INTERNAL_SERVER_ERROR = 500, "Internal Server Error" + NOT_IMPLEMENTED = 501, "Not Implemented" + BAD_GATEWAY = 502, "Bad Gateway" + SERVICE_UNAVAILABLE = 503, "Service Unavailable" + GATEWAY_TIMEOUT = 504, "Gateway Timeout" + HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" + VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" + INSUFFICIENT_STORAGE = 507, "Insufficient Storage" + LOOP_DETECTED = 508, "Loop Detected" + NOT_EXTENDED = 510, "Not Extended" + NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" + + +# Include lower-case styles for `requests` compatibility. +for code in codes: + setattr(codes, code._name_.lower(), int(code)) diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/__init__.py b/myenv/lib/python3.12/site-packages/httpx/_transports/__init__.py new file mode 100644 index 0000000..7a32105 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_transports/__init__.py @@ -0,0 +1,15 @@ +from .asgi import * +from .base import * +from .default import * +from .mock import * +from .wsgi import * + +__all__ = [ + "ASGITransport", + "AsyncBaseTransport", + "BaseTransport", + "AsyncHTTPTransport", + "HTTPTransport", + "MockTransport", + "WSGITransport", +] diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3987430 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc new file mode 100644 index 0000000..0009da0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..08fd92a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc new file mode 100644 index 0000000..703a168 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc new file mode 100644 index 0000000..dedb05d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc new file mode 100644 index 0000000..d709f05 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/asgi.py b/myenv/lib/python3.12/site-packages/httpx/_transports/asgi.py new file mode 100644 index 0000000..8578d4a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_transports/asgi.py @@ -0,0 +1,174 @@ +from __future__ import annotations + +import typing + +import sniffio + +from .._models import Request, Response +from .._types import AsyncByteStream +from .base import AsyncBaseTransport + +if typing.TYPE_CHECKING: # pragma: no cover + import asyncio + + import trio + + Event = typing.Union[asyncio.Event, trio.Event] + + +_Message = typing.MutableMapping[str, typing.Any] +_Receive = typing.Callable[[], typing.Awaitable[_Message]] +_Send = typing.Callable[ + [typing.MutableMapping[str, typing.Any]], typing.Awaitable[None] +] +_ASGIApp = typing.Callable[ + [typing.MutableMapping[str, typing.Any], _Receive, _Send], typing.Awaitable[None] +] + +__all__ = ["ASGITransport"] + + +def create_event() -> Event: + if sniffio.current_async_library() == "trio": + import trio + + return trio.Event() + else: + import asyncio + + return asyncio.Event() + + +class ASGIResponseStream(AsyncByteStream): + def __init__(self, body: list[bytes]) -> None: + self._body = body + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield b"".join(self._body) + + +class ASGITransport(AsyncBaseTransport): + """ + A custom AsyncTransport that handles sending requests directly to an ASGI app. + + ```python + transport = httpx.ASGITransport( + app=app, + root_path="/submount", + client=("1.2.3.4", 123) + ) + client = httpx.AsyncClient(transport=transport) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `root_path` - The root path on which the ASGI application should be mounted. + * `client` - A two-tuple indicating the client IP and port of incoming requests. + ``` + """ + + def __init__( + self, + app: _ASGIApp, + raise_app_exceptions: bool = True, + root_path: str = "", + client: tuple[str, int] = ("127.0.0.1", 123), + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.root_path = root_path + self.client = client + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + # ASGI scope. + scope = { + "type": "http", + "asgi": {"version": "3.0"}, + "http_version": "1.1", + "method": request.method, + "headers": [(k.lower(), v) for (k, v) in request.headers.raw], + "scheme": request.url.scheme, + "path": request.url.path, + "raw_path": request.url.raw_path.split(b"?")[0], + "query_string": request.url.query, + "server": (request.url.host, request.url.port), + "client": self.client, + "root_path": self.root_path, + } + + # Request. + request_body_chunks = request.stream.__aiter__() + request_complete = False + + # Response. + status_code = None + response_headers = None + body_parts = [] + response_started = False + response_complete = create_event() + + # ASGI callables. + + async def receive() -> dict[str, typing.Any]: + nonlocal request_complete + + if request_complete: + await response_complete.wait() + return {"type": "http.disconnect"} + + try: + body = await request_body_chunks.__anext__() + except StopAsyncIteration: + request_complete = True + return {"type": "http.request", "body": b"", "more_body": False} + return {"type": "http.request", "body": body, "more_body": True} + + async def send(message: typing.MutableMapping[str, typing.Any]) -> None: + nonlocal status_code, response_headers, response_started + + if message["type"] == "http.response.start": + assert not response_started + + status_code = message["status"] + response_headers = message.get("headers", []) + response_started = True + + elif message["type"] == "http.response.body": + assert not response_complete.is_set() + body = message.get("body", b"") + more_body = message.get("more_body", False) + + if body and request.method != "HEAD": + body_parts.append(body) + + if not more_body: + response_complete.set() + + try: + await self.app(scope, receive, send) + except Exception: # noqa: PIE-786 + if self.raise_app_exceptions: + raise + + response_complete.set() + if status_code is None: + status_code = 500 + if response_headers is None: + response_headers = {} + + assert response_complete.is_set() + assert status_code is not None + assert response_headers is not None + + stream = ASGIResponseStream(body_parts) + + return Response(status_code, headers=response_headers, stream=stream) diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/base.py b/myenv/lib/python3.12/site-packages/httpx/_transports/base.py new file mode 100644 index 0000000..66fd99d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_transports/base.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import typing +from types import TracebackType + +from .._models import Request, Response + +T = typing.TypeVar("T", bound="BaseTransport") +A = typing.TypeVar("A", bound="AsyncBaseTransport") + +__all__ = ["AsyncBaseTransport", "BaseTransport"] + + +class BaseTransport: + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self.close() + + def handle_request(self, request: Request) -> Response: + """ + Send a single HTTP request and return a response. + + Developers shouldn't typically ever need to call into this API directly, + since the Client class provides all the higher level user-facing API + niceties. + + In order to properly release any network resources, the response + stream should *either* be consumed immediately, with a call to + `response.stream.read()`, or else the `handle_request` call should + be followed with a try/finally block to ensuring the stream is + always closed. + + Example usage: + + with httpx.HTTPTransport() as transport: + req = httpx.Request( + method=b"GET", + url=(b"https", b"www.example.com", 443, b"/"), + headers=[(b"Host", b"www.example.com")], + ) + resp = transport.handle_request(req) + body = resp.stream.read() + print(resp.status_code, resp.headers, body) + + + Takes a `Request` instance as the only argument. + + Returns a `Response` instance. + """ + raise NotImplementedError( + "The 'handle_request' method must be implemented." + ) # pragma: no cover + + def close(self) -> None: + pass + + +class AsyncBaseTransport: + async def __aenter__(self: A) -> A: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + await self.aclose() + + async def handle_async_request( + self, + request: Request, + ) -> Response: + raise NotImplementedError( + "The 'handle_async_request' method must be implemented." + ) # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/default.py b/myenv/lib/python3.12/site-packages/httpx/_transports/default.py new file mode 100644 index 0000000..33db416 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_transports/default.py @@ -0,0 +1,389 @@ +""" +Custom transports, with nicely configured defaults. + +The following additional keyword arguments are currently supported by httpcore... + +* uds: str +* local_address: str +* retries: int + +Example usages... + +# Disable HTTP/2 on a single specific domain. +mounts = { + "all://": httpx.HTTPTransport(http2=True), + "all://*example.org": httpx.HTTPTransport() +} + +# Using advanced httpcore configuration, with connection retries. +transport = httpx.HTTPTransport(retries=1) +client = httpx.Client(transport=transport) + +# Using advanced httpcore configuration, with unix domain sockets. +transport = httpx.HTTPTransport(uds="socket.uds") +client = httpx.Client(transport=transport) +""" + +from __future__ import annotations + +import contextlib +import typing +from types import TracebackType + +import httpcore + +from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context +from .._exceptions import ( + ConnectError, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from .._models import Request, Response +from .._types import AsyncByteStream, CertTypes, ProxyTypes, SyncByteStream, VerifyTypes +from .._urls import URL +from .base import AsyncBaseTransport, BaseTransport + +T = typing.TypeVar("T", bound="HTTPTransport") +A = typing.TypeVar("A", bound="AsyncHTTPTransport") + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + +__all__ = ["AsyncHTTPTransport", "HTTPTransport"] + + +@contextlib.contextmanager +def map_httpcore_exceptions() -> typing.Iterator[None]: + try: + yield + except Exception as exc: + mapped_exc = None + + for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): + if not isinstance(exc, from_exc): + continue + # We want to map to the most specific exception we can find. + # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to + # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. + if mapped_exc is None or issubclass(to_exc, mapped_exc): + mapped_exc = to_exc + + if mapped_exc is None: # pragma: no cover + raise + + message = str(exc) + raise mapped_exc(message) from exc + + +HTTPCORE_EXC_MAP = { + httpcore.TimeoutException: TimeoutException, + httpcore.ConnectTimeout: ConnectTimeout, + httpcore.ReadTimeout: ReadTimeout, + httpcore.WriteTimeout: WriteTimeout, + httpcore.PoolTimeout: PoolTimeout, + httpcore.NetworkError: NetworkError, + httpcore.ConnectError: ConnectError, + httpcore.ReadError: ReadError, + httpcore.WriteError: WriteError, + httpcore.ProxyError: ProxyError, + httpcore.UnsupportedProtocol: UnsupportedProtocol, + httpcore.ProtocolError: ProtocolError, + httpcore.LocalProtocolError: LocalProtocolError, + httpcore.RemoteProtocolError: RemoteProtocolError, +} + + +class ResponseStream(SyncByteStream): + def __init__(self, httpcore_stream: typing.Iterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + def __iter__(self) -> typing.Iterator[bytes]: + with map_httpcore_exceptions(): + for part in self._httpcore_stream: + yield part + + def close(self) -> None: + if hasattr(self._httpcore_stream, "close"): + self._httpcore_stream.close() + + +class HTTPTransport(BaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, + retries: int = 0, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + + if proxy is None: + self._pool = httpcore.ConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.HTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + proxy_ssl_context=proxy.ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme == "socks5": + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.SOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', or 'socks5'," + f" but got {proxy.url.scheme!r}." + ) + + def __enter__(self: T) -> T: # Use generics for subclass support. + self._pool.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + with map_httpcore_exceptions(): + self._pool.__exit__(exc_type, exc_value, traceback) + + def handle_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, SyncByteStream) + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = self._pool.handle_request(req) + + assert isinstance(resp.stream, typing.Iterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=ResponseStream(resp.stream), + extensions=resp.extensions, + ) + + def close(self) -> None: + self._pool.close() + + +class AsyncResponseStream(AsyncByteStream): + def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + with map_httpcore_exceptions(): + async for part in self._httpcore_stream: + yield part + + async def aclose(self) -> None: + if hasattr(self._httpcore_stream, "aclose"): + await self._httpcore_stream.aclose() + + +class AsyncHTTPTransport(AsyncBaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, + retries: int = 0, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + + if proxy is None: + self._pool = httpcore.AsyncConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.AsyncHTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + proxy_ssl_context=proxy.ssl_context, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme == "socks5": + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.AsyncSOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', or 'socks5'," + " but got {proxy.url.scheme!r}." + ) + + async def __aenter__(self: A) -> A: # Use generics for subclass support. + await self._pool.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + with map_httpcore_exceptions(): + await self._pool.__aexit__(exc_type, exc_value, traceback) + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = await self._pool.handle_async_request(req) + + assert isinstance(resp.stream, typing.AsyncIterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=AsyncResponseStream(resp.stream), + extensions=resp.extensions, + ) + + async def aclose(self) -> None: + await self._pool.aclose() diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/mock.py b/myenv/lib/python3.12/site-packages/httpx/_transports/mock.py new file mode 100644 index 0000000..8c418f5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_transports/mock.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +import typing + +from .._models import Request, Response +from .base import AsyncBaseTransport, BaseTransport + +SyncHandler = typing.Callable[[Request], Response] +AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]] + + +__all__ = ["MockTransport"] + + +class MockTransport(AsyncBaseTransport, BaseTransport): + def __init__(self, handler: SyncHandler | AsyncHandler) -> None: + self.handler = handler + + def handle_request( + self, + request: Request, + ) -> Response: + request.read() + response = self.handler(request) + if not isinstance(response, Response): # pragma: no cover + raise TypeError("Cannot use an async handler in a sync Client") + return response + + async def handle_async_request( + self, + request: Request, + ) -> Response: + await request.aread() + response = self.handler(request) + + # Allow handler to *optionally* be an `async` function. + # If it is, then the `response` variable need to be awaited to actually + # return the result. + + if not isinstance(response, Response): + response = await response + + return response diff --git a/myenv/lib/python3.12/site-packages/httpx/_transports/wsgi.py b/myenv/lib/python3.12/site-packages/httpx/_transports/wsgi.py new file mode 100644 index 0000000..8592ffe --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_transports/wsgi.py @@ -0,0 +1,149 @@ +from __future__ import annotations + +import io +import itertools +import sys +import typing + +from .._models import Request, Response +from .._types import SyncByteStream +from .base import BaseTransport + +if typing.TYPE_CHECKING: + from _typeshed import OptExcInfo # pragma: no cover + from _typeshed.wsgi import WSGIApplication # pragma: no cover + +_T = typing.TypeVar("_T") + + +__all__ = ["WSGITransport"] + + +def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]: + body = iter(body) + for chunk in body: + if chunk: + return itertools.chain([chunk], body) + return [] + + +class WSGIByteStream(SyncByteStream): + def __init__(self, result: typing.Iterable[bytes]) -> None: + self._close = getattr(result, "close", None) + self._result = _skip_leading_empty_chunks(result) + + def __iter__(self) -> typing.Iterator[bytes]: + for part in self._result: + yield part + + def close(self) -> None: + if self._close is not None: + self._close() + + +class WSGITransport(BaseTransport): + """ + A custom transport that handles sending requests directly to an WSGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.Client(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the WSGITransport class: + + ``` + transport = httpx.WSGITransport( + app=app, + script_name="/submount", + remote_addr="1.2.3.4" + ) + client = httpx.Client(transport=transport) + ``` + + Arguments: + + * `app` - The WSGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `script_name` - The root path on which the WSGI application should be mounted. + * `remote_addr` - A string indicating the client IP of incoming requests. + ``` + """ + + def __init__( + self, + app: WSGIApplication, + raise_app_exceptions: bool = True, + script_name: str = "", + remote_addr: str = "127.0.0.1", + wsgi_errors: typing.TextIO | None = None, + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.script_name = script_name + self.remote_addr = remote_addr + self.wsgi_errors = wsgi_errors + + def handle_request(self, request: Request) -> Response: + request.read() + wsgi_input = io.BytesIO(request.content) + + port = request.url.port or {"http": 80, "https": 443}[request.url.scheme] + environ = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": request.url.scheme, + "wsgi.input": wsgi_input, + "wsgi.errors": self.wsgi_errors or sys.stderr, + "wsgi.multithread": True, + "wsgi.multiprocess": False, + "wsgi.run_once": False, + "REQUEST_METHOD": request.method, + "SCRIPT_NAME": self.script_name, + "PATH_INFO": request.url.path, + "QUERY_STRING": request.url.query.decode("ascii"), + "SERVER_NAME": request.url.host, + "SERVER_PORT": str(port), + "SERVER_PROTOCOL": "HTTP/1.1", + "REMOTE_ADDR": self.remote_addr, + } + for header_key, header_value in request.headers.raw: + key = header_key.decode("ascii").upper().replace("-", "_") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = "HTTP_" + key + environ[key] = header_value.decode("ascii") + + seen_status = None + seen_response_headers = None + seen_exc_info = None + + def start_response( + status: str, + response_headers: list[tuple[str, str]], + exc_info: OptExcInfo | None = None, + ) -> typing.Callable[[bytes], typing.Any]: + nonlocal seen_status, seen_response_headers, seen_exc_info + seen_status = status + seen_response_headers = response_headers + seen_exc_info = exc_info + return lambda _: None + + result = self.app(environ, start_response) + + stream = WSGIByteStream(result) + + assert seen_status is not None + assert seen_response_headers is not None + if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions: + raise seen_exc_info[1] + + status_code = int(seen_status.split()[0]) + headers = [ + (key.encode("ascii"), value.encode("ascii")) + for key, value in seen_response_headers + ] + + return Response(status_code, headers=headers, stream=stream) diff --git a/myenv/lib/python3.12/site-packages/httpx/_types.py b/myenv/lib/python3.12/site-packages/httpx/_types.py new file mode 100644 index 0000000..661af26 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_types.py @@ -0,0 +1,136 @@ +""" +Type definitions for type checking purposes. +""" + +import ssl +from http.cookiejar import CookieJar +from typing import ( + IO, + TYPE_CHECKING, + Any, + AsyncIterable, + AsyncIterator, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + NamedTuple, + Optional, + Sequence, + Tuple, + Union, +) + +if TYPE_CHECKING: # pragma: no cover + from ._auth import Auth # noqa: F401 + from ._config import Proxy, Timeout # noqa: F401 + from ._models import Cookies, Headers, Request # noqa: F401 + from ._urls import URL, QueryParams # noqa: F401 + + +PrimitiveData = Optional[Union[str, int, float, bool]] + +RawURL = NamedTuple( + "RawURL", + [ + ("raw_scheme", bytes), + ("raw_host", bytes), + ("port", Optional[int]), + ("raw_path", bytes), + ], +) + +URLTypes = Union["URL", str] + +QueryParamTypes = Union[ + "QueryParams", + Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], + List[Tuple[str, PrimitiveData]], + Tuple[Tuple[str, PrimitiveData], ...], + str, + bytes, +] + +HeaderTypes = Union[ + "Headers", + Mapping[str, str], + Mapping[bytes, bytes], + Sequence[Tuple[str, str]], + Sequence[Tuple[bytes, bytes]], +] + +CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]] + +CertTypes = Union[ + # certfile + str, + # (certfile, keyfile) + Tuple[str, Optional[str]], + # (certfile, keyfile, password) + Tuple[str, Optional[str], Optional[str]], +] +VerifyTypes = Union[str, bool, ssl.SSLContext] +TimeoutTypes = Union[ + Optional[float], + Tuple[Optional[float], Optional[float], Optional[float], Optional[float]], + "Timeout", +] +ProxyTypes = Union["URL", str, "Proxy"] +ProxiesTypes = Union[ProxyTypes, Dict[Union["URL", str], Union[None, ProxyTypes]]] + +AuthTypes = Union[ + Tuple[Union[str, bytes], Union[str, bytes]], + Callable[["Request"], "Request"], + "Auth", +] + +RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseExtensions = MutableMapping[str, Any] + +RequestData = Mapping[str, Any] + +FileContent = Union[IO[bytes], bytes, str] +FileTypes = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] + +RequestExtensions = MutableMapping[str, Any] + +__all__ = ["AsyncByteStream", "SyncByteStream"] + + +class SyncByteStream: + def __iter__(self) -> Iterator[bytes]: + raise NotImplementedError( + "The '__iter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + def close(self) -> None: + """ + Subclasses can override this method to release any network resources + after a request/response cycle is complete. + """ + + +class AsyncByteStream: + async def __aiter__(self) -> AsyncIterator[bytes]: + raise NotImplementedError( + "The '__aiter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/myenv/lib/python3.12/site-packages/httpx/_urlparse.py b/myenv/lib/python3.12/site-packages/httpx/_urlparse.py new file mode 100644 index 0000000..479c2ef --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_urlparse.py @@ -0,0 +1,505 @@ +""" +An implementation of `urlparse` that provides URL validation and normalization +as described by RFC3986. + +We rely on this implementation rather than the one in Python's stdlib, because: + +* It provides more complete URL validation. +* It properly differentiates between an empty querystring and an absent querystring, + to distinguish URLs with a trailing '?'. +* It handles scheme, hostname, port, and path normalization. +* It supports IDNA hostnames, normalizing them to their encoded form. +* The API supports passing individual components, as well as the complete URL string. + +Previously we relied on the excellent `rfc3986` package to handle URL parsing and +validation, but this module provides a simpler alternative, with less indirection +required. +""" + +from __future__ import annotations + +import ipaddress +import re +import typing + +import idna + +from ._exceptions import InvalidURL + +MAX_URL_LENGTH = 65536 + +# https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3 +UNRESERVED_CHARACTERS = ( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" +) +SUB_DELIMS = "!$&'()*+,;=" + +PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}") + + +# {scheme}: (optional) +# //{authority} (optional) +# {path} +# ?{query} (optional) +# #{fragment} (optional) +URL_REGEX = re.compile( + ( + r"(?:(?P{scheme}):)?" + r"(?://(?P{authority}))?" + r"(?P{path})" + r"(?:\?(?P{query}))?" + r"(?:#(?P{fragment}))?" + ).format( + scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?", + authority="[^/?#]*", + path="[^?#]*", + query="[^#]*", + fragment=".*", + ) +) + +# {userinfo}@ (optional) +# {host} +# :{port} (optional) +AUTHORITY_REGEX = re.compile( + ( + r"(?:(?P{userinfo})@)?" r"(?P{host})" r":?(?P{port})?" + ).format( + userinfo=".*", # Any character sequence. + host="(\\[.*\\]|[^:@]*)", # Either any character sequence excluding ':' or '@', + # or an IPv6 address enclosed within square brackets. + port=".*", # Any character sequence. + ) +) + + +# If we call urlparse with an individual component, then we need to regex +# validate that component individually. +# Note that we're duplicating the same strings as above. Shock! Horror!! +COMPONENT_REGEX = { + "scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"), + "authority": re.compile("[^/?#]*"), + "path": re.compile("[^?#]*"), + "query": re.compile("[^#]*"), + "fragment": re.compile(".*"), + "userinfo": re.compile("[^@]*"), + "host": re.compile("(\\[.*\\]|[^:]*)"), + "port": re.compile(".*"), +} + + +# We use these simple regexs as a first pass before handing off to +# the stdlib 'ipaddress' module for IP address validation. +IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$") +IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$") + + +class ParseResult(typing.NamedTuple): + scheme: str + userinfo: str + host: str + port: int | None + path: str + query: str | None + fragment: str | None + + @property + def authority(self) -> str: + return "".join( + [ + f"{self.userinfo}@" if self.userinfo else "", + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + @property + def netloc(self) -> str: + return "".join( + [ + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + def copy_with(self, **kwargs: str | None) -> ParseResult: + if not kwargs: + return self + + defaults = { + "scheme": self.scheme, + "authority": self.authority, + "path": self.path, + "query": self.query, + "fragment": self.fragment, + } + defaults.update(kwargs) + return urlparse("", **defaults) + + def __str__(self) -> str: + authority = self.authority + return "".join( + [ + f"{self.scheme}:" if self.scheme else "", + f"//{authority}" if authority else "", + self.path, + f"?{self.query}" if self.query is not None else "", + f"#{self.fragment}" if self.fragment is not None else "", + ] + ) + + +def urlparse(url: str = "", **kwargs: str | None) -> ParseResult: + # Initial basic checks on allowable URLs. + # --------------------------------------- + + # Hard limit the maximum allowable URL length. + if len(url) > MAX_URL_LENGTH: + raise InvalidURL("URL too long") + + # If a URL includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in url): + char = next(char for char in url if char.isascii() and not char.isprintable()) + idx = url.find(char) + error = ( + f"Invalid non-printable ASCII character in URL, {char!r} at position {idx}." + ) + raise InvalidURL(error) + + # Some keyword arguments require special handling. + # ------------------------------------------------ + + # Coerce "port" to a string, if it is provided as an integer. + if "port" in kwargs: + port = kwargs["port"] + kwargs["port"] = str(port) if isinstance(port, int) else port + + # Replace "netloc" with "host and "port". + if "netloc" in kwargs: + netloc = kwargs.pop("netloc") or "" + kwargs["host"], _, kwargs["port"] = netloc.partition(":") + + # Replace "username" and/or "password" with "userinfo". + if "username" in kwargs or "password" in kwargs: + username = quote(kwargs.pop("username", "") or "") + password = quote(kwargs.pop("password", "") or "") + kwargs["userinfo"] = f"{username}:{password}" if password else username + + # Replace "raw_path" with "path" and "query". + if "raw_path" in kwargs: + raw_path = kwargs.pop("raw_path") or "" + kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?") + if not seperator: + kwargs["query"] = None + + # Ensure that IPv6 "host" addresses are always escaped with "[...]". + if "host" in kwargs: + host = kwargs.get("host") or "" + if ":" in host and not (host.startswith("[") and host.endswith("]")): + kwargs["host"] = f"[{host}]" + + # If any keyword arguments are provided, ensure they are valid. + # ------------------------------------------------------------- + + for key, value in kwargs.items(): + if value is not None: + if len(value) > MAX_URL_LENGTH: + raise InvalidURL(f"URL component '{key}' too long") + + # If a component includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in value): + char = next( + char for char in value if char.isascii() and not char.isprintable() + ) + idx = value.find(char) + error = ( + f"Invalid non-printable ASCII character in URL {key} component, " + f"{char!r} at position {idx}." + ) + raise InvalidURL(error) + + # Ensure that keyword arguments match as a valid regex. + if not COMPONENT_REGEX[key].fullmatch(value): + raise InvalidURL(f"Invalid URL component '{key}'") + + # The URL_REGEX will always match, but may have empty components. + url_match = URL_REGEX.match(url) + assert url_match is not None + url_dict = url_match.groupdict() + + # * 'scheme', 'authority', and 'path' may be empty strings. + # * 'query' may be 'None', indicating no trailing "?" portion. + # Any string including the empty string, indicates a trailing "?". + # * 'fragment' may be 'None', indicating no trailing "#" portion. + # Any string including the empty string, indicates a trailing "#". + scheme = kwargs.get("scheme", url_dict["scheme"]) or "" + authority = kwargs.get("authority", url_dict["authority"]) or "" + path = kwargs.get("path", url_dict["path"]) or "" + query = kwargs.get("query", url_dict["query"]) + fragment = kwargs.get("fragment", url_dict["fragment"]) + + # The AUTHORITY_REGEX will always match, but may have empty components. + authority_match = AUTHORITY_REGEX.match(authority) + assert authority_match is not None + authority_dict = authority_match.groupdict() + + # * 'userinfo' and 'host' may be empty strings. + # * 'port' may be 'None'. + userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or "" + host = kwargs.get("host", authority_dict["host"]) or "" + port = kwargs.get("port", authority_dict["port"]) + + # Normalize and validate each component. + # We end up with a parsed representation of the URL, + # with components that are plain ASCII bytestrings. + parsed_scheme: str = scheme.lower() + parsed_userinfo: str = quote(userinfo, safe=SUB_DELIMS + ":") + parsed_host: str = encode_host(host) + parsed_port: int | None = normalize_port(port, scheme) + + has_scheme = parsed_scheme != "" + has_authority = ( + parsed_userinfo != "" or parsed_host != "" or parsed_port is not None + ) + validate_path(path, has_scheme=has_scheme, has_authority=has_authority) + if has_scheme or has_authority: + path = normalize_path(path) + + # The GEN_DELIMS set is... : / ? # [ ] @ + # These do not need to be percent-quoted unless they serve as delimiters for the + # specific component. + WHATWG_SAFE = '`{}%|^\\"' + + # For 'path' we need to drop ? and # from the GEN_DELIMS set. + parsed_path: str = quote(path, safe=SUB_DELIMS + WHATWG_SAFE + ":/[]@") + # For 'query' we need to drop '#' from the GEN_DELIMS set. + parsed_query: str | None = ( + None + if query is None + else quote(query, safe=SUB_DELIMS + WHATWG_SAFE + ":/?[]@") + ) + # For 'fragment' we can include all of the GEN_DELIMS set. + parsed_fragment: str | None = ( + None + if fragment is None + else quote(fragment, safe=SUB_DELIMS + WHATWG_SAFE + ":/?#[]@") + ) + + # The parsed ASCII bytestrings are our canonical form. + # All properties of the URL are derived from these. + return ParseResult( + parsed_scheme, + parsed_userinfo, + parsed_host, + parsed_port, + parsed_path, + parsed_query, + parsed_fragment, + ) + + +def encode_host(host: str) -> str: + if not host: + return "" + + elif IPv4_STYLE_HOSTNAME.match(host): + # Validate IPv4 hostnames like #.#.#.# + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet + try: + ipaddress.IPv4Address(host) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv4 address: {host!r}") + return host + + elif IPv6_STYLE_HOSTNAME.match(host): + # Validate IPv6 hostnames like [...] + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # "A host identified by an Internet Protocol literal address, version 6 + # [RFC3513] or later, is distinguished by enclosing the IP literal + # within square brackets ("[" and "]"). This is the only place where + # square bracket characters are allowed in the URI syntax." + try: + ipaddress.IPv6Address(host[1:-1]) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv6 address: {host!r}") + return host[1:-1] + + elif host.isascii(): + # Regular ASCII hostnames + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # reg-name = *( unreserved / pct-encoded / sub-delims ) + WHATWG_SAFE = '"`{}%|\\' + return quote(host.lower(), safe=SUB_DELIMS + WHATWG_SAFE) + + # IDNA hostnames + try: + return idna.encode(host.lower()).decode("ascii") + except idna.IDNAError: + raise InvalidURL(f"Invalid IDNA hostname: {host!r}") + + +def normalize_port(port: str | int | None, scheme: str) -> int | None: + # From https://tools.ietf.org/html/rfc3986#section-3.2.3 + # + # "A scheme may define a default port. For example, the "http" scheme + # defines a default port of "80", corresponding to its reserved TCP + # port number. The type of port designated by the port number (e.g., + # TCP, UDP, SCTP) is defined by the URI scheme. URI producers and + # normalizers should omit the port component and its ":" delimiter if + # port is empty or if its value would be the same as that of the + # scheme's default." + if port is None or port == "": + return None + + try: + port_as_int = int(port) + except ValueError: + raise InvalidURL(f"Invalid port: {port!r}") + + # See https://url.spec.whatwg.org/#url-miscellaneous + default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get( + scheme + ) + if port_as_int == default_port: + return None + return port_as_int + + +def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None: + """ + Path validation rules that depend on if the URL contains + a scheme or authority component. + + See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3 + """ + if has_authority: + # If a URI contains an authority component, then the path component + # must either be empty or begin with a slash ("/") character." + if path and not path.startswith("/"): + raise InvalidURL("For absolute URLs, path must be empty or begin with '/'") + + if not has_scheme and not has_authority: + # If a URI does not contain an authority component, then the path cannot begin + # with two slash characters ("//"). + if path.startswith("//"): + raise InvalidURL("Relative URLs cannot have a path starting with '//'") + + # In addition, a URI reference (Section 4.1) may be a relative-path reference, + # in which case the first path segment cannot contain a colon (":") character. + if path.startswith(":"): + raise InvalidURL("Relative URLs cannot have a path starting with ':'") + + +def normalize_path(path: str) -> str: + """ + Drop "." and ".." segments from a URL path. + + For example: + + normalize_path("/path/./to/somewhere/..") == "/path/to" + """ + # Fast return when no '.' characters in the path. + if "." not in path: + return path + + components = path.split("/") + + # Fast return when no '.' or '..' components in the path. + if "." not in components and ".." not in components: + return path + + # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4 + output: list[str] = [] + for component in components: + if component == ".": + pass + elif component == "..": + if output and output != [""]: + output.pop() + else: + output.append(component) + return "/".join(output) + + +def PERCENT(string: str) -> str: + return "".join([f"%{byte:02X}" for byte in string.encode("utf-8")]) + + +def percent_encoded(string: str, safe: str = "/") -> str: + """ + Use percent-encoding to quote a string. + """ + NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + + # Fast path for strings that don't need escaping. + if not string.rstrip(NON_ESCAPED_CHARS): + return string + + return "".join( + [char if char in NON_ESCAPED_CHARS else PERCENT(char) for char in string] + ) + + +def quote(string: str, safe: str = "/") -> str: + """ + Use percent-encoding to quote a string, omitting existing '%xx' escape sequences. + + See: https://www.rfc-editor.org/rfc/rfc3986#section-2.1 + + * `string`: The string to be percent-escaped. + * `safe`: A string containing characters that may be treated as safe, and do not + need to be escaped. Unreserved characters are always treated as safe. + See: https://www.rfc-editor.org/rfc/rfc3986#section-2.3 + """ + parts = [] + current_position = 0 + for match in re.finditer(PERCENT_ENCODED_REGEX, string): + start_position, end_position = match.start(), match.end() + matched_text = match.group(0) + # Add any text up to the '%xx' escape sequence. + if start_position != current_position: + leading_text = string[current_position:start_position] + parts.append(percent_encoded(leading_text, safe=safe)) + + # Add the '%xx' escape sequence. + parts.append(matched_text) + current_position = end_position + + # Add any text after the final '%xx' escape sequence. + if current_position != len(string): + trailing_text = string[current_position:] + parts.append(percent_encoded(trailing_text, safe=safe)) + + return "".join(parts) + + +def urlencode(items: list[tuple[str, str]]) -> str: + """ + We can use a much simpler version of the stdlib urlencode here because + we don't need to handle a bunch of different typing cases, such as bytes vs str. + + https://github.com/python/cpython/blob/b2f7b2ef0b5421e01efb8c7bee2ef95d3bab77eb/Lib/urllib/parse.py#L926 + + Note that we use '%20' encoding for spaces. and '%2F for '/'. + This is slightly different than `requests`, but is the behaviour that browsers use. + + See + - https://github.com/encode/httpx/issues/2536 + - https://github.com/encode/httpx/issues/2721 + - https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode + """ + return "&".join( + [ + percent_encoded(k, safe="") + "=" + percent_encoded(v, safe="") + for k, v in items + ] + ) diff --git a/myenv/lib/python3.12/site-packages/httpx/_urls.py b/myenv/lib/python3.12/site-packages/httpx/_urls.py new file mode 100644 index 0000000..ec4ea6b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_urls.py @@ -0,0 +1,648 @@ +from __future__ import annotations + +import typing +from urllib.parse import parse_qs, unquote + +import idna + +from ._types import QueryParamTypes, RawURL +from ._urlparse import urlencode, urlparse +from ._utils import primitive_value_to_str + +__all__ = ["URL", "QueryParams"] + + +class URL: + """ + url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") + + assert url.scheme == "https" + assert url.username == "jo@email.com" + assert url.password == "a secret" + assert url.userinfo == b"jo%40email.com:a%20secret" + assert url.host == "müller.de" + assert url.raw_host == b"xn--mller-kva.de" + assert url.port == 1234 + assert url.netloc == b"xn--mller-kva.de:1234" + assert url.path == "/pa th" + assert url.query == b"?search=ab" + assert url.raw_path == b"/pa%20th?search=ab" + assert url.fragment == "anchorlink" + + The components of a URL are broken down like this: + + https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink + [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] + [ userinfo ] [ netloc ][ raw_path ] + + Note that: + + * `url.scheme` is normalized to always be lowercased. + + * `url.host` is normalized to always be lowercased. Internationalized domain + names are represented in unicode, without IDNA encoding applied. For instance: + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + * `url.port` is either None or an integer. URLs that include the default port for + "http", "https", "ws", "wss", and "ftp" schemes have their port + normalized to `None`. + + assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") + assert httpx.URL("http://example.com").port is None + assert httpx.URL("http://example.com:80").port is None + + * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work + with `url.username` and `url.password` instead, which handle the URL escaping. + + * `url.raw_path` is raw bytes of both the path and query, without URL escaping. + This portion is used as the target when constructing HTTP requests. Usually you'll + want to work with `url.path` instead. + + * `url.query` is raw bytes, without URL escaping. A URL query string portion can + only be properly URL escaped when decoding the parameter names and values + themselves. + """ + + def __init__(self, url: URL | str = "", **kwargs: typing.Any) -> None: + if kwargs: + allowed = { + "scheme": str, + "username": str, + "password": str, + "userinfo": bytes, + "host": str, + "port": int, + "netloc": bytes, + "path": str, + "query": bytes, + "raw_path": bytes, + "fragment": str, + "params": object, + } + + # Perform type checking for all supported keyword arguments. + for key, value in kwargs.items(): + if key not in allowed: + message = f"{key!r} is an invalid keyword argument for URL()" + raise TypeError(message) + if value is not None and not isinstance(value, allowed[key]): + expected = allowed[key].__name__ + seen = type(value).__name__ + message = f"Argument {key!r} must be {expected} but got {seen}" + raise TypeError(message) + if isinstance(value, bytes): + kwargs[key] = value.decode("ascii") + + if "params" in kwargs: + # Replace any "params" keyword with the raw "query" instead. + # + # Ensure that empty params use `kwargs["query"] = None` rather + # than `kwargs["query"] = ""`, so that generated URLs do not + # include an empty trailing "?". + params = kwargs.pop("params") + kwargs["query"] = None if not params else str(QueryParams(params)) + + if isinstance(url, str): + self._uri_reference = urlparse(url, **kwargs) + elif isinstance(url, URL): + self._uri_reference = url._uri_reference.copy_with(**kwargs) + else: + raise TypeError( + "Invalid type for url. Expected str or httpx.URL," + f" got {type(url)}: {url!r}" + ) + + @property + def scheme(self) -> str: + """ + The URL scheme, such as "http", "https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme + + @property + def raw_scheme(self) -> bytes: + """ + The raw bytes representation of the URL scheme, such as b"http", b"https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme.encode("ascii") + + @property + def userinfo(self) -> bytes: + """ + The URL userinfo as a raw bytestring. + For example: b"jo%40email.com:a%20secret". + """ + return self._uri_reference.userinfo.encode("ascii") + + @property + def username(self) -> str: + """ + The URL username as a string, with URL decoding applied. + For example: "jo@email.com" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[0]) + + @property + def password(self) -> str: + """ + The URL password as a string, with URL decoding applied. + For example: "a secret" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[2]) + + @property + def host(self) -> str: + """ + The URL host as a string. + Always normalized to lowercase, with IDNA hosts decoded into unicode. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.host == "www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.host == "::ffff:192.168.0.1" + """ + host: str = self._uri_reference.host + + if host.startswith("xn--"): + host = idna.decode(host) + + return host + + @property + def raw_host(self) -> bytes: + """ + The raw bytes representation of the URL host. + Always normalized to lowercase, and IDNA encoded. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.raw_host == b"www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.raw_host == b"::ffff:192.168.0.1" + """ + return self._uri_reference.host.encode("ascii") + + @property + def port(self) -> int | None: + """ + The URL port as an integer. + + Note that the URL class performs port normalization as per the WHATWG spec. + Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always + treated as `None`. + + For example: + + assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") + assert httpx.URL("http://www.example.com:80").port is None + """ + return self._uri_reference.port + + @property + def netloc(self) -> bytes: + """ + Either `` or `:` as bytes. + Always normalized to lowercase, and IDNA encoded. + + This property may be used for generating the value of a request + "Host" header. + """ + return self._uri_reference.netloc.encode("ascii") + + @property + def path(self) -> str: + """ + The URL path as a string. Excluding the query string, and URL decoded. + + For example: + + url = httpx.URL("https://example.com/pa%20th") + assert url.path == "/pa th" + """ + path = self._uri_reference.path or "/" + return unquote(path) + + @property + def query(self) -> bytes: + """ + The URL query string, as raw bytes, excluding the leading b"?". + + This is necessarily a bytewise interface, because we cannot + perform URL decoding of this representation until we've parsed + the keys and values into a QueryParams instance. + + For example: + + url = httpx.URL("https://example.com/?filter=some%20search%20terms") + assert url.query == b"filter=some%20search%20terms" + """ + query = self._uri_reference.query or "" + return query.encode("ascii") + + @property + def params(self) -> QueryParams: + """ + The URL query parameters, neatly parsed and packaged into an immutable + multidict representation. + """ + return QueryParams(self._uri_reference.query) + + @property + def raw_path(self) -> bytes: + """ + The complete URL path and query string as raw bytes. + Used as the target when constructing HTTP requests. + + For example: + + GET /users?search=some%20text HTTP/1.1 + Host: www.example.org + Connection: close + """ + path = self._uri_reference.path or "/" + if self._uri_reference.query is not None: + path += "?" + self._uri_reference.query + return path.encode("ascii") + + @property + def fragment(self) -> str: + """ + The URL fragments, as used in HTML anchors. + As a string, without the leading '#'. + """ + return unquote(self._uri_reference.fragment or "") + + @property + def raw(self) -> RawURL: + """ + Provides the (scheme, host, port, target) for the outgoing request. + + In older versions of `httpx` this was used in the low-level transport API. + We no longer use `RawURL`, and this property will be deprecated + in a future release. + """ + return RawURL( + self.raw_scheme, + self.raw_host, + self.port, + self.raw_path, + ) + + @property + def is_absolute_url(self) -> bool: + """ + Return `True` for absolute URLs such as 'http://example.com/path', + and `False` for relative URLs such as '/path'. + """ + # We don't use `.is_absolute` from `rfc3986` because it treats + # URLs with a fragment portion as not absolute. + # What we actually care about is if the URL provides + # a scheme and hostname to which connections should be made. + return bool(self._uri_reference.scheme and self._uri_reference.host) + + @property + def is_relative_url(self) -> bool: + """ + Return `False` for absolute URLs such as 'http://example.com/path', + and `True` for relative URLs such as '/path'. + """ + return not self.is_absolute_url + + def copy_with(self, **kwargs: typing.Any) -> URL: + """ + Copy this URL, returning a new URL with some components altered. + Accepts the same set of parameters as the components that are made + available via properties on the `URL` class. + + For example: + + url = httpx.URL("https://www.example.com").copy_with( + username="jo@gmail.com", password="a secret" + ) + assert url == "https://jo%40email.com:a%20secret@www.example.com" + """ + return URL(self, **kwargs) + + def copy_set_param(self, key: str, value: typing.Any = None) -> URL: + return self.copy_with(params=self.params.set(key, value)) + + def copy_add_param(self, key: str, value: typing.Any = None) -> URL: + return self.copy_with(params=self.params.add(key, value)) + + def copy_remove_param(self, key: str) -> URL: + return self.copy_with(params=self.params.remove(key)) + + def copy_merge_params(self, params: QueryParamTypes) -> URL: + return self.copy_with(params=self.params.merge(params)) + + def join(self, url: URL | str) -> URL: + """ + Return an absolute URL, using this URL as the base. + + Eg. + + url = httpx.URL("https://www.example.com/test") + url = url.join("/new/path") + assert url == "https://www.example.com/new/path" + """ + from urllib.parse import urljoin + + return URL(urljoin(str(self), str(URL(url)))) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, (URL, str)) and str(self) == str(URL(other)) + + def __str__(self) -> str: + return str(self._uri_reference) + + def __repr__(self) -> str: + scheme, userinfo, host, port, path, query, fragment = self._uri_reference + + if ":" in userinfo: + # Mask any password component. + userinfo = f'{userinfo.split(":")[0]}:[secure]' + + authority = "".join( + [ + f"{userinfo}@" if userinfo else "", + f"[{host}]" if ":" in host else host, + f":{port}" if port is not None else "", + ] + ) + url = "".join( + [ + f"{self.scheme}:" if scheme else "", + f"//{authority}" if authority else "", + path, + f"?{query}" if query is not None else "", + f"#{fragment}" if fragment is not None else "", + ] + ) + + return f"{self.__class__.__name__}({url!r})" + + +class QueryParams(typing.Mapping[str, str]): + """ + URL query parameters, as a multi-dict. + """ + + def __init__(self, *args: QueryParamTypes | None, **kwargs: typing.Any) -> None: + assert len(args) < 2, "Too many arguments." + assert not (args and kwargs), "Cannot mix named and unnamed arguments." + + value = args[0] if args else kwargs + + if value is None or isinstance(value, (str, bytes)): + value = value.decode("ascii") if isinstance(value, bytes) else value + self._dict = parse_qs(value, keep_blank_values=True) + elif isinstance(value, QueryParams): + self._dict = {k: list(v) for k, v in value._dict.items()} + else: + dict_value: dict[typing.Any, list[typing.Any]] = {} + if isinstance(value, (list, tuple)): + # Convert list inputs like: + # [("a", "123"), ("a", "456"), ("b", "789")] + # To a dict representation, like: + # {"a": ["123", "456"], "b": ["789"]} + for item in value: + dict_value.setdefault(item[0], []).append(item[1]) + else: + # Convert dict inputs like: + # {"a": "123", "b": ["456", "789"]} + # To dict inputs where values are always lists, like: + # {"a": ["123"], "b": ["456", "789"]} + dict_value = { + k: list(v) if isinstance(v, (list, tuple)) else [v] + for k, v in value.items() + } + + # Ensure that keys and values are neatly coerced to strings. + # We coerce values `True` and `False` to JSON-like "true" and "false" + # representations, and coerce `None` values to the empty string. + self._dict = { + str(k): [primitive_value_to_str(item) for item in v] + for k, v in dict_value.items() + } + + def keys(self) -> typing.KeysView[str]: + """ + Return all the keys in the query params. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.keys()) == ["a", "b"] + """ + return self._dict.keys() + + def values(self) -> typing.ValuesView[str]: + """ + Return all the values in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.values()) == ["123", "789"] + """ + return {k: v[0] for k, v in self._dict.items()}.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return all items in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.items()) == [("a", "123"), ("b", "789")] + """ + return {k: v[0] for k, v in self._dict.items()}.items() + + def multi_items(self) -> list[tuple[str, str]]: + """ + Return all items in the query params. Allow duplicate keys to occur. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] + """ + multi_items: list[tuple[str, str]] = [] + for k, v in self._dict.items(): + multi_items.extend([(k, i) for i in v]) + return multi_items + + def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + """ + Get a value from the query param for a given key. If the key occurs + more than once, then only the first value is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get("a") == "123" + """ + if key in self._dict: + return self._dict[str(key)][0] + return default + + def get_list(self, key: str) -> list[str]: + """ + Get all values from the query param for a given key. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get_list("a") == ["123", "456"] + """ + return list(self._dict.get(str(key), [])) + + def set(self, key: str, value: typing.Any = None) -> QueryParams: + """ + Return a new QueryParams instance, setting the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.set("a", "456") + assert q == httpx.QueryParams("a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = [primitive_value_to_str(value)] + return q + + def add(self, key: str, value: typing.Any = None) -> QueryParams: + """ + Return a new QueryParams instance, setting or appending the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.add("a", "456") + assert q == httpx.QueryParams("a=123&a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] + return q + + def remove(self, key: str) -> QueryParams: + """ + Return a new QueryParams instance, removing the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.remove("a") + assert q == httpx.QueryParams("") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict.pop(str(key), None) + return q + + def merge(self, params: QueryParamTypes | None = None) -> QueryParams: + """ + Return a new QueryParams instance, updated with. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.merge({"b": "456"}) + assert q == httpx.QueryParams("a=123&b=456") + + q = httpx.QueryParams("a=123") + q = q.merge({"a": "456", "b": "789"}) + assert q == httpx.QueryParams("a=456&b=789") + """ + q = QueryParams(params) + q._dict = {**self._dict, **q._dict} + return q + + def __getitem__(self, key: typing.Any) -> str: + return self._dict[key][0] + + def __contains__(self, key: typing.Any) -> bool: + return key in self._dict + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._dict) + + def __bool__(self) -> bool: + return bool(self._dict) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + if not isinstance(other, self.__class__): + return False + return sorted(self.multi_items()) == sorted(other.multi_items()) + + def __str__(self) -> str: + """ + Note that we use '%20' encoding for spaces, and treat '/' as a safe + character. + + See https://github.com/encode/httpx/issues/2536 and + https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode + """ + return urlencode(self.multi_items()) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + query_string = str(self) + return f"{class_name}({query_string!r})" + + def update(self, params: QueryParamTypes | None = None) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.merge(...)` to create an updated copy." + ) + + def __setitem__(self, key: str, value: str) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.set(key, value)` to create an updated copy." + ) diff --git a/myenv/lib/python3.12/site-packages/httpx/_utils.py b/myenv/lib/python3.12/site-packages/httpx/_utils.py new file mode 100644 index 0000000..a9ece19 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/httpx/_utils.py @@ -0,0 +1,440 @@ +from __future__ import annotations + +import codecs +import email.message +import ipaddress +import mimetypes +import os +import re +import time +import typing +from pathlib import Path +from urllib.request import getproxies + +import sniffio + +from ._types import PrimitiveData + +if typing.TYPE_CHECKING: # pragma: no cover + from ._urls import URL + + +_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} +_HTML5_FORM_ENCODING_REPLACEMENTS.update( + {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B} +) +_HTML5_FORM_ENCODING_RE = re.compile( + r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) +) + + +def normalize_header_key( + value: str | bytes, + lower: bool, + encoding: str | None = None, +) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header key. + """ + if isinstance(value, bytes): + bytes_value = value + else: + bytes_value = value.encode(encoding or "ascii") + + return bytes_value.lower() if lower else bytes_value + + +def normalize_header_value(value: str | bytes, encoding: str | None = None) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header value. + """ + if isinstance(value, bytes): + return value + return value.encode(encoding or "ascii") + + +def primitive_value_to_str(value: PrimitiveData) -> str: + """ + Coerce a primitive data type into a string value. + + Note that we prefer JSON-style 'true'/'false' for boolean values here. + """ + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +def is_known_encoding(encoding: str) -> bool: + """ + Return `True` if `encoding` is a known codec. + """ + try: + codecs.lookup(encoding) + except LookupError: + return False + return True + + +def format_form_param(name: str, value: str) -> bytes: + """ + Encode a name/value pair within a multipart form. + """ + + def replacer(match: typing.Match[str]) -> str: + return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] + + value = _HTML5_FORM_ENCODING_RE.sub(replacer, value) + return f'{name}="{value}"'.encode() + + +def get_ca_bundle_from_env() -> str | None: + if "SSL_CERT_FILE" in os.environ: + ssl_file = Path(os.environ["SSL_CERT_FILE"]) + if ssl_file.is_file(): + return str(ssl_file) + if "SSL_CERT_DIR" in os.environ: + ssl_path = Path(os.environ["SSL_CERT_DIR"]) + if ssl_path.is_dir(): + return str(ssl_path) + return None + + +def parse_header_links(value: str) -> list[dict[str, str]]: + """ + Returns a list of parsed link headers, for more info see: + https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link + The generic syntax of those is: + Link: < uri-reference >; param1=value1; param2="value2" + So for instance: + Link; '; type="image/jpeg",;' + would return + [ + {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, + {"url": "http://.../back.jpeg"}, + ] + :param value: HTTP Link entity-header field + :return: list of parsed link headers + """ + links: list[dict[str, str]] = [] + replace_chars = " '\"" + value = value.strip(replace_chars) + if not value: + return links + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + link = {"url": url.strip("<> '\"")} + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + link[key.strip(replace_chars)] = value.strip(replace_chars) + links.append(link) + return links + + +def parse_content_type_charset(content_type: str) -> str | None: + # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery. + # See: https://peps.python.org/pep-0594/#cgi + msg = email.message.Message() + msg["content-type"] = content_type + return msg.get_content_charset(failobj=None) + + +SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} + + +def obfuscate_sensitive_headers( + items: typing.Iterable[tuple[typing.AnyStr, typing.AnyStr]], +) -> typing.Iterator[tuple[typing.AnyStr, typing.AnyStr]]: + for k, v in items: + if to_str(k.lower()) in SENSITIVE_HEADERS: + v = to_bytes_or_str("[secure]", match_type_of=v) + yield k, v + + +def port_or_default(url: URL) -> int | None: + if url.port is not None: + return url.port + return {"http": 80, "https": 443}.get(url.scheme) + + +def same_origin(url: URL, other: URL) -> bool: + """ + Return 'True' if the given URLs share the same origin. + """ + return ( + url.scheme == other.scheme + and url.host == other.host + and port_or_default(url) == port_or_default(other) + ) + + +def is_https_redirect(url: URL, location: URL) -> bool: + """ + Return 'True' if 'location' is a HTTPS upgrade of 'url' + """ + if url.host != location.host: + return False + + return ( + url.scheme == "http" + and port_or_default(url) == 80 + and location.scheme == "https" + and port_or_default(location) == 443 + ) + + +def get_environment_proxies() -> dict[str, str | None]: + """Gets proxy information from the environment""" + + # urllib.request.getproxies() falls back on System + # Registry and Config for proxies on Windows and macOS. + # We don't want to propagate non-HTTP proxies into + # our configuration such as 'TRAVIS_APT_PROXY'. + proxy_info = getproxies() + mounts: dict[str, str | None] = {} + + for scheme in ("http", "https", "all"): + if proxy_info.get(scheme): + hostname = proxy_info[scheme] + mounts[f"{scheme}://"] = ( + hostname if "://" in hostname else f"http://{hostname}" + ) + + no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")] + for hostname in no_proxy_hosts: + # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details + # on how names in `NO_PROXY` are handled. + if hostname == "*": + # If NO_PROXY=* is used or if "*" occurs as any one of the comma + # separated hostnames, then we should just bypass any information + # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore + # proxies. + return {} + elif hostname: + # NO_PROXY=.google.com is marked as "all://*.google.com, + # which disables "www.google.com" but not "google.com" + # NO_PROXY=google.com is marked as "all://*google.com, + # which disables "www.google.com" and "google.com". + # (But not "wwwgoogle.com") + # NO_PROXY can include domains, IPv6, IPv4 addresses and "localhost" + # NO_PROXY=example.com,::1,localhost,192.168.0.0/16 + if "://" in hostname: + mounts[hostname] = None + elif is_ipv4_hostname(hostname): + mounts[f"all://{hostname}"] = None + elif is_ipv6_hostname(hostname): + mounts[f"all://[{hostname}]"] = None + elif hostname.lower() == "localhost": + mounts[f"all://{hostname}"] = None + else: + mounts[f"all://*{hostname}"] = None + + return mounts + + +def to_bytes(value: str | bytes, encoding: str = "utf-8") -> bytes: + return value.encode(encoding) if isinstance(value, str) else value + + +def to_str(value: str | bytes, encoding: str = "utf-8") -> str: + return value if isinstance(value, str) else value.decode(encoding) + + +def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr: + return value if isinstance(match_type_of, str) else value.encode() + + +def unquote(value: str) -> str: + return value[1:-1] if value[0] == value[-1] == '"' else value + + +def guess_content_type(filename: str | None) -> str | None: + if filename: + return mimetypes.guess_type(filename)[0] or "application/octet-stream" + return None + + +def peek_filelike_length(stream: typing.Any) -> int | None: + """ + Given a file-like stream object, return its length in number of bytes + without reading it into memory. + """ + try: + # Is it an actual file? + fd = stream.fileno() + # Yup, seems to be an actual file. + length = os.fstat(fd).st_size + except (AttributeError, OSError): + # No... Maybe it's something that supports random access, like `io.BytesIO`? + try: + # Assuming so, go to end of stream to figure out its length, + # then put it back in place. + offset = stream.tell() + length = stream.seek(0, os.SEEK_END) + stream.seek(offset) + except (AttributeError, OSError): + # Not even that? Sorry, we're doomed... + return None + + return length + + +class Timer: + async def _get_time(self) -> float: + library = sniffio.current_async_library() + if library == "trio": + import trio + + return trio.current_time() + else: + import asyncio + + return asyncio.get_event_loop().time() + + def sync_start(self) -> None: + self.started = time.perf_counter() + + async def async_start(self) -> None: + self.started = await self._get_time() + + def sync_elapsed(self) -> float: + now = time.perf_counter() + return now - self.started + + async def async_elapsed(self) -> float: + now = await self._get_time() + return now - self.started + + +class URLPattern: + """ + A utility class currently used for making lookups against proxy keys... + + # Wildcard matching... + >>> pattern = URLPattern("all://") + >>> pattern.matches(httpx.URL("http://example.com")) + True + + # Witch scheme matching... + >>> pattern = URLPattern("https://") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + + # With domain matching... + >>> pattern = URLPattern("https://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # Wildcard scheme, with domain matching... + >>> pattern = URLPattern("all://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + True + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # With port matching... + >>> pattern = URLPattern("https://example.com:1234") + >>> pattern.matches(httpx.URL("https://example.com:1234")) + True + >>> pattern.matches(httpx.URL("https://example.com")) + False + """ + + def __init__(self, pattern: str) -> None: + from ._urls import URL + + if pattern and ":" not in pattern: + raise ValueError( + f"Proxy keys should use proper URL forms rather " + f"than plain scheme strings. " + f'Instead of "{pattern}", use "{pattern}://"' + ) + + url = URL(pattern) + self.pattern = pattern + self.scheme = "" if url.scheme == "all" else url.scheme + self.host = "" if url.host == "*" else url.host + self.port = url.port + if not url.host or url.host == "*": + self.host_regex: typing.Pattern[str] | None = None + elif url.host.startswith("*."): + # *.example.com should match "www.example.com", but not "example.com" + domain = re.escape(url.host[2:]) + self.host_regex = re.compile(f"^.+\\.{domain}$") + elif url.host.startswith("*"): + # *example.com should match "www.example.com" and "example.com" + domain = re.escape(url.host[1:]) + self.host_regex = re.compile(f"^(.+\\.)?{domain}$") + else: + # example.com should match "example.com" but not "www.example.com" + domain = re.escape(url.host) + self.host_regex = re.compile(f"^{domain}$") + + def matches(self, other: URL) -> bool: + if self.scheme and self.scheme != other.scheme: + return False + if ( + self.host + and self.host_regex is not None + and not self.host_regex.match(other.host) + ): + return False + if self.port is not None and self.port != other.port: + return False + return True + + @property + def priority(self) -> tuple[int, int, int]: + """ + The priority allows URLPattern instances to be sortable, so that + we can match from most specific to least specific. + """ + # URLs with a port should take priority over URLs without a port. + port_priority = 0 if self.port is not None else 1 + # Longer hostnames should match first. + host_priority = -len(self.host) + # Longer schemes should match first. + scheme_priority = -len(self.scheme) + return (port_priority, host_priority, scheme_priority) + + def __hash__(self) -> int: + return hash(self.pattern) + + def __lt__(self, other: URLPattern) -> bool: + return self.priority < other.priority + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, URLPattern) and self.pattern == other.pattern + + +def is_ipv4_hostname(hostname: str) -> bool: + try: + ipaddress.IPv4Address(hostname.split("/")[0]) + except Exception: + return False + return True + + +def is_ipv6_hostname(hostname: str) -> bool: + try: + ipaddress.IPv6Address(hostname.split("/")[0]) + except Exception: + return False + return True diff --git a/myenv/lib/python3.12/site-packages/httpx/py.typed b/myenv/lib/python3.12/site-packages/httpx/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/LICENSE.md b/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/LICENSE.md new file mode 100644 index 0000000..19b6b45 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/LICENSE.md @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2013-2024, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/METADATA b/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/METADATA new file mode 100644 index 0000000..a04a7f9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/METADATA @@ -0,0 +1,245 @@ +Metadata-Version: 2.1 +Name: idna +Version: 3.8 +Summary: Internationalized Domain Names in Applications (IDNA) +Author-email: Kim Davies +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst +Project-URL: Issue tracker, https://github.com/kjd/idna/issues +Project-URL: Source, https://github.com/kjd/idna + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalized Domain Names in +Applications (IDNA) protocol as specified in `RFC 5891 +`_. This is the latest version of +the protocol and is sometimes referred to as “IDNA 2008”. + +This library also provides support for Unicode Technical +Standard 46, `Unicode IDNA Compatibility Processing +`_. + +This acts as a suitable replacement for the “encodings.idna” +module that comes with the Python standard library, but which +only supports the older superseded IDNA specification (`RFC 3490 +`_). + +Basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + +Installation +------------ + +This package is available for installation from PyPI: + +.. code-block:: bash + + $ python3 -m pip install idna + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a +domain name argument and perform a conversion to A-labels or U-labels +respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + >>> import idna.codec + >>> print('домен.испытание'.encode('idna2008')) + b'xn--d1acufc.xn--80akhbyknj4f' + >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna2008')) + домен.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or +``alabel`` functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel('测试') + b'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the +IDNA specification does not normalize input from different potential +ways a user may input a domain name. This functionality, known as +a “mapping”, is considered by the specification to be a local +user-interface issue distinct from IDNA conversion functionality. + +This library provides one such mapping that was developed by the +Unicode Consortium. Known as `Unicode IDNA Compatibility Processing +`_, it provides for both a regular +mapping for typical applications, as well as a transitional mapping to +help migrate from older IDNA 2003 applications. Strings are +preprocessed according to Section 4.4 “Preprocessing for IDNA2008” +prior to the IDNA operations. + +For example, “Königsgäßchen” is not a permissible label as *LATIN +CAPITAL LETTER K* is not allowed (nor are capital letters in general). +UTS 46 will convert this into lower case prior to applying the IDNA +conversion. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from +the older 2003 standard to the current standard. For example, in the +original IDNA specification, the *LATIN SMALL LETTER SHARP S* (ß) was +converted into two *LATIN SMALL LETTER S* (ss), whereas in the current +IDNA specification this conversion is not performed. + +.. code-block:: pycon + + >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementers should use transitional processing with caution, only in +rare cases where conversion from legacy labels to current labels must be +performed (i.e. IDNA implementations that pre-date 2008). For typical +applications that just need to convert labels, transitional processing +is unlikely to be beneficial and could produce unexpected incompatible +results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the new +module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification +should raise an exception derived from the ``idna.IDNAError`` base +class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and +right-to-left characters in a label; ``idna.InvalidCodepoint`` when +a specific codepoint is an illegal character in an IDN label (i.e. +INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is +illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ +but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup +tables for performance. These tables are derived from computing against +eligibility criteria in the respective standards. These tables are +computed using the command-line script ``tools/idna-data``. + +This tool will fetch relevant codepoint data from the Unicode repository +and perform the required calculations to identify eligibility. There are +three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and + ``uts46data.py``, the pre-calculated lookup tables used for IDNA and + UTS 46 conversions. Implementers who wish to track this library against + a different Unicode version may use this tool to manually generate a + different version of the ``idnadata.py`` and ``uts46data.py`` files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix + B.1 of RFC 5892 and the pre-computed tables published by `IANA + `_. + +* ``idna-data U+0061``. Prints debugging output on the various + properties associated with an individual Unicode codepoint (in this + case, U+0061), that are used to assess the IDNA and UTS 46 status of a + codepoint. This is helpful in debugging or analysis. + +The tool accepts a number of arguments, described using ``idna-data +-h``. Most notably, the ``--version`` argument allows the specification +of the version of Unicode to be used in computing the table data. For +example, ``idna-data --version 9.0.0 make-libdata`` will generate +library data against Unicode 9.0.0. + + +Additional Notes +---------------- + +* **Packages**. The latest tagged release version is published in the + `Python Package Index `_. + +* **Version support**. This library supports Python 3.6 and higher. + As this library serves as a low-level toolkit for a variety of + applications, many of which strive for broad compatibility with older + Python versions, there is no rush to remove older interpreter support. + Removing support for older versions should be well justified in that the + maintenance burden has become too high. + +* **Python 2**. Python 2 is supported by version 2.x of this library. + Use "idna<3" in your requirements file if you need this library for + a Python 2 application. Be advised that these versions are no longer + actively developed. + +* **Testing**. The library has a test suite based on each rule of the + IDNA specification, as well as tests that are provided as part of the + Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing + `_. + +* **Emoji**. It is an occasional request to support emoji domains in + this library. Encoding of symbols like emoji is expressly prohibited by + the technical standard IDNA 2008 and emoji domains are broadly phased + out across the domain industry due to associated security risks. For + now, applications that need to support these non-compliant labels + may wish to consider trying the encode/decode operation in this library + first, and then falling back to using `encodings.idna`. See `the Github + project `_ for more discussion. + diff --git a/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/RECORD b/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/RECORD new file mode 100644 index 0000000..2faffc5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/RECORD @@ -0,0 +1,22 @@ +idna-3.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-3.8.dist-info/LICENSE.md,sha256=pZ8LDvNjWHQQmkRhykT_enDVBpboFHZ7-vch1Mmw2w8,1541 +idna-3.8.dist-info/METADATA,sha256=t8baHZrBTPkJi3Lr8ZHm0pbRKnelgO5AU7EGIeTvEcg,9948 +idna-3.8.dist-info/RECORD,, +idna-3.8.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 +idna/__pycache__/__init__.cpython-312.pyc,, +idna/__pycache__/codec.cpython-312.pyc,, +idna/__pycache__/compat.cpython-312.pyc,, +idna/__pycache__/core.cpython-312.pyc,, +idna/__pycache__/idnadata.cpython-312.pyc,, +idna/__pycache__/intranges.cpython-312.pyc,, +idna/__pycache__/package_data.cpython-312.pyc,, +idna/__pycache__/uts46data.cpython-312.pyc,, +idna/codec.py,sha256=PS6m-XmdST7Wj7J7ulRMakPDt5EBJyYrT3CPtjh-7t4,3426 +idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 +idna/core.py,sha256=OHDXwDVbb3R1gNXjHw7JWeeE2rn2u3a-QV-KCeznYcA,12884 +idna/idnadata.py,sha256=dqRwytzkjIHMBa2R1lYvHDwACenZPt8eGVu1Y8UBE-E,78320 +idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 +idna/package_data.py,sha256=DogtAD5vs_-I2Q0k3_ZA4egUq2YLJ4pBbbhI8APzOcY,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=1KuksWqLuccPXm2uyRVkhfiFLNIhM_H2m4azCcnOqEU,206503 diff --git a/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/WHEEL new file mode 100644 index 0000000..3b5e64b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna-3.8.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/myenv/lib/python3.12/site-packages/idna/__init__.py b/myenv/lib/python3.12/site-packages/idna/__init__.py new file mode 100644 index 0000000..a40eeaf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna/__init__.py @@ -0,0 +1,44 @@ +from .package_data import __version__ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain + +__all__ = [ + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/myenv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..540fd88 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc b/myenv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc new file mode 100644 index 0000000..32d4a24 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc b/myenv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..5f11a71 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc b/myenv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000..b62cd08 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc b/myenv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc new file mode 100644 index 0000000..0b73226 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc b/myenv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc new file mode 100644 index 0000000..e43758e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc b/myenv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc new file mode 100644 index 0000000..0d1bfc1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc b/myenv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc new file mode 100644 index 0000000..65a0c9f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/idna/codec.py b/myenv/lib/python3.12/site-packages/idna/codec.py new file mode 100644 index 0000000..c855a4d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna/codec.py @@ -0,0 +1,118 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re +from typing import Any, Tuple, Optional + +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return '', 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return b'', 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = b'' + if labels: + if not labels[-1]: + trailing_dot = b'.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = b'.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_bytes = b'.'.join(result) + trailing_dot + size += len(trailing_dot) + return result_bytes, size + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return ('', 0) + + if not isinstance(data, str): + data = str(data, 'ascii') + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = '.'.join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def search_function(name: str) -> Optional[codecs.CodecInfo]: + if name != 'idna2008': + return None + return codecs.CodecInfo( + name=name, + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) + +codecs.register(search_function) diff --git a/myenv/lib/python3.12/site-packages/idna/compat.py b/myenv/lib/python3.12/site-packages/idna/compat.py new file mode 100644 index 0000000..786e6bd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna/compat.py @@ -0,0 +1,13 @@ +from .core import * +from .codec import * +from typing import Any, Union + +def ToASCII(label: str) -> bytes: + return encode(label) + +def ToUnicode(label: Union[bytes, bytearray]) -> str: + return decode(label) + +def nameprep(s: Any) -> None: + raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') + diff --git a/myenv/lib/python3.12/site-packages/idna/core.py b/myenv/lib/python3.12/site-packages/idna/core.py new file mode 100644 index 0000000..69b66ef --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna/core.py @@ -0,0 +1,399 @@ +from . import idnadata +import bisect +import unicodedata +import re +from typing import Union, Optional +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError('Unknown character in unicodedata') + return v + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s: str) -> bytes: + return s.encode('punycode') + +def _unot(s: int) -> str: + return 'U+{:04X}'.format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = None # type: Optional[str] + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + elif joining_type in [ord('L'), ord('D')]: + ok = True + break + else: + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + elif joining_type in [ord('R'), ord('D')]: + ok = True + break + else: + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == '\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + except ValueError: + raise IDNAError('Unknown codepoint adjacent to joiner {} at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode('ascii') + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + return label_bytes + except UnicodeEncodeError: + pass + + check_label(label) + label_bytes = _alabel_prefix + _punycode(label) + + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = label + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix):] + if not label_bytes: + raise IDNAError('Malformed A-label, no Punycode eligible content found') + if label_bytes.decode('ascii')[-1] == '-': + raise IDNAError('A-label must not end with a hyphen') + else: + check_label(label_bytes) + return label_bytes.decode('ascii') + + try: + label = label_bytes.decode('punycode') + except UnicodeError: + raise IDNAError('Invalid A-label') + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = '' + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, 'Z')) - 1] + status = uts46row[1] + replacement = None # type: Optional[str] + if len(uts46row) == 3: + replacement = uts46row[2] + if (status == 'V' or + (status == 'D' and not transitional) or + (status == '3' and not std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == 'M' or + (status == '3' and not std3_rules) or + (status == 'D' and transitional)): + output += replacement + elif status != 'I': + raise IndexError() + except IndexError: + raise InvalidCodepoint( + 'Codepoint {} not allowed at position {} in {}'.format( + _unot(code_point), pos + 1, repr(domain))) + + return unicodedata.normalize('NFC', output) + + +def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes: + if not isinstance(s, str): + try: + s = str(s, 'ascii') + except UnicodeDecodeError: + raise IDNAError('should pass a unicode string to the function rather than a byte string.') + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str: + try: + if not isinstance(s, str): + s = str(s, 'ascii') + except UnicodeDecodeError: + raise IDNAError('Invalid ASCII in A-label') + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split('.') + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append('') + return '.'.join(result) diff --git a/myenv/lib/python3.12/site-packages/idna/idnadata.py b/myenv/lib/python3.12/site-packages/idna/idnadata.py new file mode 100644 index 0000000..c61dcf9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna/idnadata.py @@ -0,0 +1,4245 @@ +# This file is automatically generated by tools/idna-data + +__version__ = '15.1.0' +scripts = { + 'Greek': ( + 0x37000000374, + 0x37500000378, + 0x37a0000037e, + 0x37f00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038b, + 0x38c0000038d, + 0x38e000003a2, + 0x3a3000003e2, + 0x3f000000400, + 0x1d2600001d2b, + 0x1d5d00001d62, + 0x1d6600001d6b, + 0x1dbf00001dc0, + 0x1f0000001f16, + 0x1f1800001f1e, + 0x1f2000001f46, + 0x1f4800001f4e, + 0x1f5000001f58, + 0x1f5900001f5a, + 0x1f5b00001f5c, + 0x1f5d00001f5e, + 0x1f5f00001f7e, + 0x1f8000001fb5, + 0x1fb600001fc5, + 0x1fc600001fd4, + 0x1fd600001fdc, + 0x1fdd00001ff0, + 0x1ff200001ff5, + 0x1ff600001fff, + 0x212600002127, + 0xab650000ab66, + 0x101400001018f, + 0x101a0000101a1, + 0x1d2000001d246, + ), + 'Han': ( + 0x2e8000002e9a, + 0x2e9b00002ef4, + 0x2f0000002fd6, + 0x300500003006, + 0x300700003008, + 0x30210000302a, + 0x30380000303c, + 0x340000004dc0, + 0x4e000000a000, + 0xf9000000fa6e, + 0xfa700000fada, + 0x16fe200016fe4, + 0x16ff000016ff2, + 0x200000002a6e0, + 0x2a7000002b73a, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2ebf00002ee5e, + 0x2f8000002fa1e, + 0x300000003134b, + 0x31350000323b0, + ), + 'Hebrew': ( + 0x591000005c8, + 0x5d0000005eb, + 0x5ef000005f5, + 0xfb1d0000fb37, + 0xfb380000fb3d, + 0xfb3e0000fb3f, + 0xfb400000fb42, + 0xfb430000fb45, + 0xfb460000fb50, + ), + 'Hiragana': ( + 0x304100003097, + 0x309d000030a0, + 0x1b0010001b120, + 0x1b1320001b133, + 0x1b1500001b153, + 0x1f2000001f201, + ), + 'Katakana': ( + 0x30a1000030fb, + 0x30fd00003100, + 0x31f000003200, + 0x32d0000032ff, + 0x330000003358, + 0xff660000ff70, + 0xff710000ff9e, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b001, + 0x1b1200001b123, + 0x1b1550001b156, + 0x1b1640001b168, + ), +} +joining_types = { + 0xad: 84, + 0x300: 84, + 0x301: 84, + 0x302: 84, + 0x303: 84, + 0x304: 84, + 0x305: 84, + 0x306: 84, + 0x307: 84, + 0x308: 84, + 0x309: 84, + 0x30a: 84, + 0x30b: 84, + 0x30c: 84, + 0x30d: 84, + 0x30e: 84, + 0x30f: 84, + 0x310: 84, + 0x311: 84, + 0x312: 84, + 0x313: 84, + 0x314: 84, + 0x315: 84, + 0x316: 84, + 0x317: 84, + 0x318: 84, + 0x319: 84, + 0x31a: 84, + 0x31b: 84, + 0x31c: 84, + 0x31d: 84, + 0x31e: 84, + 0x31f: 84, + 0x320: 84, + 0x321: 84, + 0x322: 84, + 0x323: 84, + 0x324: 84, + 0x325: 84, + 0x326: 84, + 0x327: 84, + 0x328: 84, + 0x329: 84, + 0x32a: 84, + 0x32b: 84, + 0x32c: 84, + 0x32d: 84, + 0x32e: 84, + 0x32f: 84, + 0x330: 84, + 0x331: 84, + 0x332: 84, + 0x333: 84, + 0x334: 84, + 0x335: 84, + 0x336: 84, + 0x337: 84, + 0x338: 84, + 0x339: 84, + 0x33a: 84, + 0x33b: 84, + 0x33c: 84, + 0x33d: 84, + 0x33e: 84, + 0x33f: 84, + 0x340: 84, + 0x341: 84, + 0x342: 84, + 0x343: 84, + 0x344: 84, + 0x345: 84, + 0x346: 84, + 0x347: 84, + 0x348: 84, + 0x349: 84, + 0x34a: 84, + 0x34b: 84, + 0x34c: 84, + 0x34d: 84, + 0x34e: 84, + 0x34f: 84, + 0x350: 84, + 0x351: 84, + 0x352: 84, + 0x353: 84, + 0x354: 84, + 0x355: 84, + 0x356: 84, + 0x357: 84, + 0x358: 84, + 0x359: 84, + 0x35a: 84, + 0x35b: 84, + 0x35c: 84, + 0x35d: 84, + 0x35e: 84, + 0x35f: 84, + 0x360: 84, + 0x361: 84, + 0x362: 84, + 0x363: 84, + 0x364: 84, + 0x365: 84, + 0x366: 84, + 0x367: 84, + 0x368: 84, + 0x369: 84, + 0x36a: 84, + 0x36b: 84, + 0x36c: 84, + 0x36d: 84, + 0x36e: 84, + 0x36f: 84, + 0x483: 84, + 0x484: 84, + 0x485: 84, + 0x486: 84, + 0x487: 84, + 0x488: 84, + 0x489: 84, + 0x591: 84, + 0x592: 84, + 0x593: 84, + 0x594: 84, + 0x595: 84, + 0x596: 84, + 0x597: 84, + 0x598: 84, + 0x599: 84, + 0x59a: 84, + 0x59b: 84, + 0x59c: 84, + 0x59d: 84, + 0x59e: 84, + 0x59f: 84, + 0x5a0: 84, + 0x5a1: 84, + 0x5a2: 84, + 0x5a3: 84, + 0x5a4: 84, + 0x5a5: 84, + 0x5a6: 84, + 0x5a7: 84, + 0x5a8: 84, + 0x5a9: 84, + 0x5aa: 84, + 0x5ab: 84, + 0x5ac: 84, + 0x5ad: 84, + 0x5ae: 84, + 0x5af: 84, + 0x5b0: 84, + 0x5b1: 84, + 0x5b2: 84, + 0x5b3: 84, + 0x5b4: 84, + 0x5b5: 84, + 0x5b6: 84, + 0x5b7: 84, + 0x5b8: 84, + 0x5b9: 84, + 0x5ba: 84, + 0x5bb: 84, + 0x5bc: 84, + 0x5bd: 84, + 0x5bf: 84, + 0x5c1: 84, + 0x5c2: 84, + 0x5c4: 84, + 0x5c5: 84, + 0x5c7: 84, + 0x610: 84, + 0x611: 84, + 0x612: 84, + 0x613: 84, + 0x614: 84, + 0x615: 84, + 0x616: 84, + 0x617: 84, + 0x618: 84, + 0x619: 84, + 0x61a: 84, + 0x61c: 84, + 0x620: 68, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62a: 68, + 0x62b: 68, + 0x62c: 68, + 0x62d: 68, + 0x62e: 68, + 0x62f: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63a: 68, + 0x63b: 68, + 0x63c: 68, + 0x63d: 68, + 0x63e: 68, + 0x63f: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64a: 68, + 0x64b: 84, + 0x64c: 84, + 0x64d: 84, + 0x64e: 84, + 0x64f: 84, + 0x650: 84, + 0x651: 84, + 0x652: 84, + 0x653: 84, + 0x654: 84, + 0x655: 84, + 0x656: 84, + 0x657: 84, + 0x658: 84, + 0x659: 84, + 0x65a: 84, + 0x65b: 84, + 0x65c: 84, + 0x65d: 84, + 0x65e: 84, + 0x65f: 84, + 0x66e: 68, + 0x66f: 68, + 0x670: 84, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67a: 68, + 0x67b: 68, + 0x67c: 68, + 0x67d: 68, + 0x67e: 68, + 0x67f: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68a: 82, + 0x68b: 82, + 0x68c: 82, + 0x68d: 82, + 0x68e: 82, + 0x68f: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69a: 68, + 0x69b: 68, + 0x69c: 68, + 0x69d: 68, + 0x69e: 68, + 0x69f: 68, + 0x6a0: 68, + 0x6a1: 68, + 0x6a2: 68, + 0x6a3: 68, + 0x6a4: 68, + 0x6a5: 68, + 0x6a6: 68, + 0x6a7: 68, + 0x6a8: 68, + 0x6a9: 68, + 0x6aa: 68, + 0x6ab: 68, + 0x6ac: 68, + 0x6ad: 68, + 0x6ae: 68, + 0x6af: 68, + 0x6b0: 68, + 0x6b1: 68, + 0x6b2: 68, + 0x6b3: 68, + 0x6b4: 68, + 0x6b5: 68, + 0x6b6: 68, + 0x6b7: 68, + 0x6b8: 68, + 0x6b9: 68, + 0x6ba: 68, + 0x6bb: 68, + 0x6bc: 68, + 0x6bd: 68, + 0x6be: 68, + 0x6bf: 68, + 0x6c0: 82, + 0x6c1: 68, + 0x6c2: 68, + 0x6c3: 82, + 0x6c4: 82, + 0x6c5: 82, + 0x6c6: 82, + 0x6c7: 82, + 0x6c8: 82, + 0x6c9: 82, + 0x6ca: 82, + 0x6cb: 82, + 0x6cc: 68, + 0x6cd: 82, + 0x6ce: 68, + 0x6cf: 82, + 0x6d0: 68, + 0x6d1: 68, + 0x6d2: 82, + 0x6d3: 82, + 0x6d5: 82, + 0x6d6: 84, + 0x6d7: 84, + 0x6d8: 84, + 0x6d9: 84, + 0x6da: 84, + 0x6db: 84, + 0x6dc: 84, + 0x6df: 84, + 0x6e0: 84, + 0x6e1: 84, + 0x6e2: 84, + 0x6e3: 84, + 0x6e4: 84, + 0x6e7: 84, + 0x6e8: 84, + 0x6ea: 84, + 0x6eb: 84, + 0x6ec: 84, + 0x6ed: 84, + 0x6ee: 82, + 0x6ef: 82, + 0x6fa: 68, + 0x6fb: 68, + 0x6fc: 68, + 0x6ff: 68, + 0x70f: 84, + 0x710: 82, + 0x711: 84, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71a: 68, + 0x71b: 68, + 0x71c: 68, + 0x71d: 68, + 0x71e: 82, + 0x71f: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72a: 82, + 0x72b: 68, + 0x72c: 82, + 0x72d: 68, + 0x72e: 68, + 0x72f: 82, + 0x730: 84, + 0x731: 84, + 0x732: 84, + 0x733: 84, + 0x734: 84, + 0x735: 84, + 0x736: 84, + 0x737: 84, + 0x738: 84, + 0x739: 84, + 0x73a: 84, + 0x73b: 84, + 0x73c: 84, + 0x73d: 84, + 0x73e: 84, + 0x73f: 84, + 0x740: 84, + 0x741: 84, + 0x742: 84, + 0x743: 84, + 0x744: 84, + 0x745: 84, + 0x746: 84, + 0x747: 84, + 0x748: 84, + 0x749: 84, + 0x74a: 84, + 0x74d: 82, + 0x74e: 68, + 0x74f: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75a: 82, + 0x75b: 82, + 0x75c: 68, + 0x75d: 68, + 0x75e: 68, + 0x75f: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76a: 68, + 0x76b: 82, + 0x76c: 82, + 0x76d: 68, + 0x76e: 68, + 0x76f: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77a: 68, + 0x77b: 68, + 0x77c: 68, + 0x77d: 68, + 0x77e: 68, + 0x77f: 68, + 0x7a6: 84, + 0x7a7: 84, + 0x7a8: 84, + 0x7a9: 84, + 0x7aa: 84, + 0x7ab: 84, + 0x7ac: 84, + 0x7ad: 84, + 0x7ae: 84, + 0x7af: 84, + 0x7b0: 84, + 0x7ca: 68, + 0x7cb: 68, + 0x7cc: 68, + 0x7cd: 68, + 0x7ce: 68, + 0x7cf: 68, + 0x7d0: 68, + 0x7d1: 68, + 0x7d2: 68, + 0x7d3: 68, + 0x7d4: 68, + 0x7d5: 68, + 0x7d6: 68, + 0x7d7: 68, + 0x7d8: 68, + 0x7d9: 68, + 0x7da: 68, + 0x7db: 68, + 0x7dc: 68, + 0x7dd: 68, + 0x7de: 68, + 0x7df: 68, + 0x7e0: 68, + 0x7e1: 68, + 0x7e2: 68, + 0x7e3: 68, + 0x7e4: 68, + 0x7e5: 68, + 0x7e6: 68, + 0x7e7: 68, + 0x7e8: 68, + 0x7e9: 68, + 0x7ea: 68, + 0x7eb: 84, + 0x7ec: 84, + 0x7ed: 84, + 0x7ee: 84, + 0x7ef: 84, + 0x7f0: 84, + 0x7f1: 84, + 0x7f2: 84, + 0x7f3: 84, + 0x7fa: 67, + 0x7fd: 84, + 0x816: 84, + 0x817: 84, + 0x818: 84, + 0x819: 84, + 0x81b: 84, + 0x81c: 84, + 0x81d: 84, + 0x81e: 84, + 0x81f: 84, + 0x820: 84, + 0x821: 84, + 0x822: 84, + 0x823: 84, + 0x825: 84, + 0x826: 84, + 0x827: 84, + 0x829: 84, + 0x82a: 84, + 0x82b: 84, + 0x82c: 84, + 0x82d: 84, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84a: 68, + 0x84b: 68, + 0x84c: 68, + 0x84d: 68, + 0x84e: 68, + 0x84f: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x859: 84, + 0x85a: 84, + 0x85b: 84, + 0x860: 68, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86a: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87a: 82, + 0x87b: 82, + 0x87c: 82, + 0x87d: 82, + 0x87e: 82, + 0x87f: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x889: 68, + 0x88a: 68, + 0x88b: 68, + 0x88c: 68, + 0x88d: 68, + 0x88e: 82, + 0x898: 84, + 0x899: 84, + 0x89a: 84, + 0x89b: 84, + 0x89c: 84, + 0x89d: 84, + 0x89e: 84, + 0x89f: 84, + 0x8a0: 68, + 0x8a1: 68, + 0x8a2: 68, + 0x8a3: 68, + 0x8a4: 68, + 0x8a5: 68, + 0x8a6: 68, + 0x8a7: 68, + 0x8a8: 68, + 0x8a9: 68, + 0x8aa: 82, + 0x8ab: 82, + 0x8ac: 82, + 0x8ae: 82, + 0x8af: 68, + 0x8b0: 68, + 0x8b1: 82, + 0x8b2: 82, + 0x8b3: 68, + 0x8b4: 68, + 0x8b5: 68, + 0x8b6: 68, + 0x8b7: 68, + 0x8b8: 68, + 0x8b9: 82, + 0x8ba: 68, + 0x8bb: 68, + 0x8bc: 68, + 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, + 0x8c8: 68, + 0x8ca: 84, + 0x8cb: 84, + 0x8cc: 84, + 0x8cd: 84, + 0x8ce: 84, + 0x8cf: 84, + 0x8d0: 84, + 0x8d1: 84, + 0x8d2: 84, + 0x8d3: 84, + 0x8d4: 84, + 0x8d5: 84, + 0x8d6: 84, + 0x8d7: 84, + 0x8d8: 84, + 0x8d9: 84, + 0x8da: 84, + 0x8db: 84, + 0x8dc: 84, + 0x8dd: 84, + 0x8de: 84, + 0x8df: 84, + 0x8e0: 84, + 0x8e1: 84, + 0x8e3: 84, + 0x8e4: 84, + 0x8e5: 84, + 0x8e6: 84, + 0x8e7: 84, + 0x8e8: 84, + 0x8e9: 84, + 0x8ea: 84, + 0x8eb: 84, + 0x8ec: 84, + 0x8ed: 84, + 0x8ee: 84, + 0x8ef: 84, + 0x8f0: 84, + 0x8f1: 84, + 0x8f2: 84, + 0x8f3: 84, + 0x8f4: 84, + 0x8f5: 84, + 0x8f6: 84, + 0x8f7: 84, + 0x8f8: 84, + 0x8f9: 84, + 0x8fa: 84, + 0x8fb: 84, + 0x8fc: 84, + 0x8fd: 84, + 0x8fe: 84, + 0x8ff: 84, + 0x900: 84, + 0x901: 84, + 0x902: 84, + 0x93a: 84, + 0x93c: 84, + 0x941: 84, + 0x942: 84, + 0x943: 84, + 0x944: 84, + 0x945: 84, + 0x946: 84, + 0x947: 84, + 0x948: 84, + 0x94d: 84, + 0x951: 84, + 0x952: 84, + 0x953: 84, + 0x954: 84, + 0x955: 84, + 0x956: 84, + 0x957: 84, + 0x962: 84, + 0x963: 84, + 0x981: 84, + 0x9bc: 84, + 0x9c1: 84, + 0x9c2: 84, + 0x9c3: 84, + 0x9c4: 84, + 0x9cd: 84, + 0x9e2: 84, + 0x9e3: 84, + 0x9fe: 84, + 0xa01: 84, + 0xa02: 84, + 0xa3c: 84, + 0xa41: 84, + 0xa42: 84, + 0xa47: 84, + 0xa48: 84, + 0xa4b: 84, + 0xa4c: 84, + 0xa4d: 84, + 0xa51: 84, + 0xa70: 84, + 0xa71: 84, + 0xa75: 84, + 0xa81: 84, + 0xa82: 84, + 0xabc: 84, + 0xac1: 84, + 0xac2: 84, + 0xac3: 84, + 0xac4: 84, + 0xac5: 84, + 0xac7: 84, + 0xac8: 84, + 0xacd: 84, + 0xae2: 84, + 0xae3: 84, + 0xafa: 84, + 0xafb: 84, + 0xafc: 84, + 0xafd: 84, + 0xafe: 84, + 0xaff: 84, + 0xb01: 84, + 0xb3c: 84, + 0xb3f: 84, + 0xb41: 84, + 0xb42: 84, + 0xb43: 84, + 0xb44: 84, + 0xb4d: 84, + 0xb55: 84, + 0xb56: 84, + 0xb62: 84, + 0xb63: 84, + 0xb82: 84, + 0xbc0: 84, + 0xbcd: 84, + 0xc00: 84, + 0xc04: 84, + 0xc3c: 84, + 0xc3e: 84, + 0xc3f: 84, + 0xc40: 84, + 0xc46: 84, + 0xc47: 84, + 0xc48: 84, + 0xc4a: 84, + 0xc4b: 84, + 0xc4c: 84, + 0xc4d: 84, + 0xc55: 84, + 0xc56: 84, + 0xc62: 84, + 0xc63: 84, + 0xc81: 84, + 0xcbc: 84, + 0xcbf: 84, + 0xcc6: 84, + 0xccc: 84, + 0xccd: 84, + 0xce2: 84, + 0xce3: 84, + 0xd00: 84, + 0xd01: 84, + 0xd3b: 84, + 0xd3c: 84, + 0xd41: 84, + 0xd42: 84, + 0xd43: 84, + 0xd44: 84, + 0xd4d: 84, + 0xd62: 84, + 0xd63: 84, + 0xd81: 84, + 0xdca: 84, + 0xdd2: 84, + 0xdd3: 84, + 0xdd4: 84, + 0xdd6: 84, + 0xe31: 84, + 0xe34: 84, + 0xe35: 84, + 0xe36: 84, + 0xe37: 84, + 0xe38: 84, + 0xe39: 84, + 0xe3a: 84, + 0xe47: 84, + 0xe48: 84, + 0xe49: 84, + 0xe4a: 84, + 0xe4b: 84, + 0xe4c: 84, + 0xe4d: 84, + 0xe4e: 84, + 0xeb1: 84, + 0xeb4: 84, + 0xeb5: 84, + 0xeb6: 84, + 0xeb7: 84, + 0xeb8: 84, + 0xeb9: 84, + 0xeba: 84, + 0xebb: 84, + 0xebc: 84, + 0xec8: 84, + 0xec9: 84, + 0xeca: 84, + 0xecb: 84, + 0xecc: 84, + 0xecd: 84, + 0xece: 84, + 0xf18: 84, + 0xf19: 84, + 0xf35: 84, + 0xf37: 84, + 0xf39: 84, + 0xf71: 84, + 0xf72: 84, + 0xf73: 84, + 0xf74: 84, + 0xf75: 84, + 0xf76: 84, + 0xf77: 84, + 0xf78: 84, + 0xf79: 84, + 0xf7a: 84, + 0xf7b: 84, + 0xf7c: 84, + 0xf7d: 84, + 0xf7e: 84, + 0xf80: 84, + 0xf81: 84, + 0xf82: 84, + 0xf83: 84, + 0xf84: 84, + 0xf86: 84, + 0xf87: 84, + 0xf8d: 84, + 0xf8e: 84, + 0xf8f: 84, + 0xf90: 84, + 0xf91: 84, + 0xf92: 84, + 0xf93: 84, + 0xf94: 84, + 0xf95: 84, + 0xf96: 84, + 0xf97: 84, + 0xf99: 84, + 0xf9a: 84, + 0xf9b: 84, + 0xf9c: 84, + 0xf9d: 84, + 0xf9e: 84, + 0xf9f: 84, + 0xfa0: 84, + 0xfa1: 84, + 0xfa2: 84, + 0xfa3: 84, + 0xfa4: 84, + 0xfa5: 84, + 0xfa6: 84, + 0xfa7: 84, + 0xfa8: 84, + 0xfa9: 84, + 0xfaa: 84, + 0xfab: 84, + 0xfac: 84, + 0xfad: 84, + 0xfae: 84, + 0xfaf: 84, + 0xfb0: 84, + 0xfb1: 84, + 0xfb2: 84, + 0xfb3: 84, + 0xfb4: 84, + 0xfb5: 84, + 0xfb6: 84, + 0xfb7: 84, + 0xfb8: 84, + 0xfb9: 84, + 0xfba: 84, + 0xfbb: 84, + 0xfbc: 84, + 0xfc6: 84, + 0x102d: 84, + 0x102e: 84, + 0x102f: 84, + 0x1030: 84, + 0x1032: 84, + 0x1033: 84, + 0x1034: 84, + 0x1035: 84, + 0x1036: 84, + 0x1037: 84, + 0x1039: 84, + 0x103a: 84, + 0x103d: 84, + 0x103e: 84, + 0x1058: 84, + 0x1059: 84, + 0x105e: 84, + 0x105f: 84, + 0x1060: 84, + 0x1071: 84, + 0x1072: 84, + 0x1073: 84, + 0x1074: 84, + 0x1082: 84, + 0x1085: 84, + 0x1086: 84, + 0x108d: 84, + 0x109d: 84, + 0x135d: 84, + 0x135e: 84, + 0x135f: 84, + 0x1712: 84, + 0x1713: 84, + 0x1714: 84, + 0x1732: 84, + 0x1733: 84, + 0x1752: 84, + 0x1753: 84, + 0x1772: 84, + 0x1773: 84, + 0x17b4: 84, + 0x17b5: 84, + 0x17b7: 84, + 0x17b8: 84, + 0x17b9: 84, + 0x17ba: 84, + 0x17bb: 84, + 0x17bc: 84, + 0x17bd: 84, + 0x17c6: 84, + 0x17c9: 84, + 0x17ca: 84, + 0x17cb: 84, + 0x17cc: 84, + 0x17cd: 84, + 0x17ce: 84, + 0x17cf: 84, + 0x17d0: 84, + 0x17d1: 84, + 0x17d2: 84, + 0x17d3: 84, + 0x17dd: 84, + 0x1807: 68, + 0x180a: 67, + 0x180b: 84, + 0x180c: 84, + 0x180d: 84, + 0x180f: 84, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182a: 68, + 0x182b: 68, + 0x182c: 68, + 0x182d: 68, + 0x182e: 68, + 0x182f: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183a: 68, + 0x183b: 68, + 0x183c: 68, + 0x183d: 68, + 0x183e: 68, + 0x183f: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184a: 68, + 0x184b: 68, + 0x184c: 68, + 0x184d: 68, + 0x184e: 68, + 0x184f: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185a: 68, + 0x185b: 68, + 0x185c: 68, + 0x185d: 68, + 0x185e: 68, + 0x185f: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186a: 68, + 0x186b: 68, + 0x186c: 68, + 0x186d: 68, + 0x186e: 68, + 0x186f: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188a: 68, + 0x188b: 68, + 0x188c: 68, + 0x188d: 68, + 0x188e: 68, + 0x188f: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189a: 68, + 0x189b: 68, + 0x189c: 68, + 0x189d: 68, + 0x189e: 68, + 0x189f: 68, + 0x18a0: 68, + 0x18a1: 68, + 0x18a2: 68, + 0x18a3: 68, + 0x18a4: 68, + 0x18a5: 68, + 0x18a6: 68, + 0x18a7: 68, + 0x18a8: 68, + 0x18a9: 84, + 0x18aa: 68, + 0x1920: 84, + 0x1921: 84, + 0x1922: 84, + 0x1927: 84, + 0x1928: 84, + 0x1932: 84, + 0x1939: 84, + 0x193a: 84, + 0x193b: 84, + 0x1a17: 84, + 0x1a18: 84, + 0x1a1b: 84, + 0x1a56: 84, + 0x1a58: 84, + 0x1a59: 84, + 0x1a5a: 84, + 0x1a5b: 84, + 0x1a5c: 84, + 0x1a5d: 84, + 0x1a5e: 84, + 0x1a60: 84, + 0x1a62: 84, + 0x1a65: 84, + 0x1a66: 84, + 0x1a67: 84, + 0x1a68: 84, + 0x1a69: 84, + 0x1a6a: 84, + 0x1a6b: 84, + 0x1a6c: 84, + 0x1a73: 84, + 0x1a74: 84, + 0x1a75: 84, + 0x1a76: 84, + 0x1a77: 84, + 0x1a78: 84, + 0x1a79: 84, + 0x1a7a: 84, + 0x1a7b: 84, + 0x1a7c: 84, + 0x1a7f: 84, + 0x1ab0: 84, + 0x1ab1: 84, + 0x1ab2: 84, + 0x1ab3: 84, + 0x1ab4: 84, + 0x1ab5: 84, + 0x1ab6: 84, + 0x1ab7: 84, + 0x1ab8: 84, + 0x1ab9: 84, + 0x1aba: 84, + 0x1abb: 84, + 0x1abc: 84, + 0x1abd: 84, + 0x1abe: 84, + 0x1abf: 84, + 0x1ac0: 84, + 0x1ac1: 84, + 0x1ac2: 84, + 0x1ac3: 84, + 0x1ac4: 84, + 0x1ac5: 84, + 0x1ac6: 84, + 0x1ac7: 84, + 0x1ac8: 84, + 0x1ac9: 84, + 0x1aca: 84, + 0x1acb: 84, + 0x1acc: 84, + 0x1acd: 84, + 0x1ace: 84, + 0x1b00: 84, + 0x1b01: 84, + 0x1b02: 84, + 0x1b03: 84, + 0x1b34: 84, + 0x1b36: 84, + 0x1b37: 84, + 0x1b38: 84, + 0x1b39: 84, + 0x1b3a: 84, + 0x1b3c: 84, + 0x1b42: 84, + 0x1b6b: 84, + 0x1b6c: 84, + 0x1b6d: 84, + 0x1b6e: 84, + 0x1b6f: 84, + 0x1b70: 84, + 0x1b71: 84, + 0x1b72: 84, + 0x1b73: 84, + 0x1b80: 84, + 0x1b81: 84, + 0x1ba2: 84, + 0x1ba3: 84, + 0x1ba4: 84, + 0x1ba5: 84, + 0x1ba8: 84, + 0x1ba9: 84, + 0x1bab: 84, + 0x1bac: 84, + 0x1bad: 84, + 0x1be6: 84, + 0x1be8: 84, + 0x1be9: 84, + 0x1bed: 84, + 0x1bef: 84, + 0x1bf0: 84, + 0x1bf1: 84, + 0x1c2c: 84, + 0x1c2d: 84, + 0x1c2e: 84, + 0x1c2f: 84, + 0x1c30: 84, + 0x1c31: 84, + 0x1c32: 84, + 0x1c33: 84, + 0x1c36: 84, + 0x1c37: 84, + 0x1cd0: 84, + 0x1cd1: 84, + 0x1cd2: 84, + 0x1cd4: 84, + 0x1cd5: 84, + 0x1cd6: 84, + 0x1cd7: 84, + 0x1cd8: 84, + 0x1cd9: 84, + 0x1cda: 84, + 0x1cdb: 84, + 0x1cdc: 84, + 0x1cdd: 84, + 0x1cde: 84, + 0x1cdf: 84, + 0x1ce0: 84, + 0x1ce2: 84, + 0x1ce3: 84, + 0x1ce4: 84, + 0x1ce5: 84, + 0x1ce6: 84, + 0x1ce7: 84, + 0x1ce8: 84, + 0x1ced: 84, + 0x1cf4: 84, + 0x1cf8: 84, + 0x1cf9: 84, + 0x1dc0: 84, + 0x1dc1: 84, + 0x1dc2: 84, + 0x1dc3: 84, + 0x1dc4: 84, + 0x1dc5: 84, + 0x1dc6: 84, + 0x1dc7: 84, + 0x1dc8: 84, + 0x1dc9: 84, + 0x1dca: 84, + 0x1dcb: 84, + 0x1dcc: 84, + 0x1dcd: 84, + 0x1dce: 84, + 0x1dcf: 84, + 0x1dd0: 84, + 0x1dd1: 84, + 0x1dd2: 84, + 0x1dd3: 84, + 0x1dd4: 84, + 0x1dd5: 84, + 0x1dd6: 84, + 0x1dd7: 84, + 0x1dd8: 84, + 0x1dd9: 84, + 0x1dda: 84, + 0x1ddb: 84, + 0x1ddc: 84, + 0x1ddd: 84, + 0x1dde: 84, + 0x1ddf: 84, + 0x1de0: 84, + 0x1de1: 84, + 0x1de2: 84, + 0x1de3: 84, + 0x1de4: 84, + 0x1de5: 84, + 0x1de6: 84, + 0x1de7: 84, + 0x1de8: 84, + 0x1de9: 84, + 0x1dea: 84, + 0x1deb: 84, + 0x1dec: 84, + 0x1ded: 84, + 0x1dee: 84, + 0x1def: 84, + 0x1df0: 84, + 0x1df1: 84, + 0x1df2: 84, + 0x1df3: 84, + 0x1df4: 84, + 0x1df5: 84, + 0x1df6: 84, + 0x1df7: 84, + 0x1df8: 84, + 0x1df9: 84, + 0x1dfa: 84, + 0x1dfb: 84, + 0x1dfc: 84, + 0x1dfd: 84, + 0x1dfe: 84, + 0x1dff: 84, + 0x200b: 84, + 0x200d: 67, + 0x200e: 84, + 0x200f: 84, + 0x202a: 84, + 0x202b: 84, + 0x202c: 84, + 0x202d: 84, + 0x202e: 84, + 0x2060: 84, + 0x2061: 84, + 0x2062: 84, + 0x2063: 84, + 0x2064: 84, + 0x206a: 84, + 0x206b: 84, + 0x206c: 84, + 0x206d: 84, + 0x206e: 84, + 0x206f: 84, + 0x20d0: 84, + 0x20d1: 84, + 0x20d2: 84, + 0x20d3: 84, + 0x20d4: 84, + 0x20d5: 84, + 0x20d6: 84, + 0x20d7: 84, + 0x20d8: 84, + 0x20d9: 84, + 0x20da: 84, + 0x20db: 84, + 0x20dc: 84, + 0x20dd: 84, + 0x20de: 84, + 0x20df: 84, + 0x20e0: 84, + 0x20e1: 84, + 0x20e2: 84, + 0x20e3: 84, + 0x20e4: 84, + 0x20e5: 84, + 0x20e6: 84, + 0x20e7: 84, + 0x20e8: 84, + 0x20e9: 84, + 0x20ea: 84, + 0x20eb: 84, + 0x20ec: 84, + 0x20ed: 84, + 0x20ee: 84, + 0x20ef: 84, + 0x20f0: 84, + 0x2cef: 84, + 0x2cf0: 84, + 0x2cf1: 84, + 0x2d7f: 84, + 0x2de0: 84, + 0x2de1: 84, + 0x2de2: 84, + 0x2de3: 84, + 0x2de4: 84, + 0x2de5: 84, + 0x2de6: 84, + 0x2de7: 84, + 0x2de8: 84, + 0x2de9: 84, + 0x2dea: 84, + 0x2deb: 84, + 0x2dec: 84, + 0x2ded: 84, + 0x2dee: 84, + 0x2def: 84, + 0x2df0: 84, + 0x2df1: 84, + 0x2df2: 84, + 0x2df3: 84, + 0x2df4: 84, + 0x2df5: 84, + 0x2df6: 84, + 0x2df7: 84, + 0x2df8: 84, + 0x2df9: 84, + 0x2dfa: 84, + 0x2dfb: 84, + 0x2dfc: 84, + 0x2dfd: 84, + 0x2dfe: 84, + 0x2dff: 84, + 0x302a: 84, + 0x302b: 84, + 0x302c: 84, + 0x302d: 84, + 0x3099: 84, + 0x309a: 84, + 0xa66f: 84, + 0xa670: 84, + 0xa671: 84, + 0xa672: 84, + 0xa674: 84, + 0xa675: 84, + 0xa676: 84, + 0xa677: 84, + 0xa678: 84, + 0xa679: 84, + 0xa67a: 84, + 0xa67b: 84, + 0xa67c: 84, + 0xa67d: 84, + 0xa69e: 84, + 0xa69f: 84, + 0xa6f0: 84, + 0xa6f1: 84, + 0xa802: 84, + 0xa806: 84, + 0xa80b: 84, + 0xa825: 84, + 0xa826: 84, + 0xa82c: 84, + 0xa840: 68, + 0xa841: 68, + 0xa842: 68, + 0xa843: 68, + 0xa844: 68, + 0xa845: 68, + 0xa846: 68, + 0xa847: 68, + 0xa848: 68, + 0xa849: 68, + 0xa84a: 68, + 0xa84b: 68, + 0xa84c: 68, + 0xa84d: 68, + 0xa84e: 68, + 0xa84f: 68, + 0xa850: 68, + 0xa851: 68, + 0xa852: 68, + 0xa853: 68, + 0xa854: 68, + 0xa855: 68, + 0xa856: 68, + 0xa857: 68, + 0xa858: 68, + 0xa859: 68, + 0xa85a: 68, + 0xa85b: 68, + 0xa85c: 68, + 0xa85d: 68, + 0xa85e: 68, + 0xa85f: 68, + 0xa860: 68, + 0xa861: 68, + 0xa862: 68, + 0xa863: 68, + 0xa864: 68, + 0xa865: 68, + 0xa866: 68, + 0xa867: 68, + 0xa868: 68, + 0xa869: 68, + 0xa86a: 68, + 0xa86b: 68, + 0xa86c: 68, + 0xa86d: 68, + 0xa86e: 68, + 0xa86f: 68, + 0xa870: 68, + 0xa871: 68, + 0xa872: 76, + 0xa8c4: 84, + 0xa8c5: 84, + 0xa8e0: 84, + 0xa8e1: 84, + 0xa8e2: 84, + 0xa8e3: 84, + 0xa8e4: 84, + 0xa8e5: 84, + 0xa8e6: 84, + 0xa8e7: 84, + 0xa8e8: 84, + 0xa8e9: 84, + 0xa8ea: 84, + 0xa8eb: 84, + 0xa8ec: 84, + 0xa8ed: 84, + 0xa8ee: 84, + 0xa8ef: 84, + 0xa8f0: 84, + 0xa8f1: 84, + 0xa8ff: 84, + 0xa926: 84, + 0xa927: 84, + 0xa928: 84, + 0xa929: 84, + 0xa92a: 84, + 0xa92b: 84, + 0xa92c: 84, + 0xa92d: 84, + 0xa947: 84, + 0xa948: 84, + 0xa949: 84, + 0xa94a: 84, + 0xa94b: 84, + 0xa94c: 84, + 0xa94d: 84, + 0xa94e: 84, + 0xa94f: 84, + 0xa950: 84, + 0xa951: 84, + 0xa980: 84, + 0xa981: 84, + 0xa982: 84, + 0xa9b3: 84, + 0xa9b6: 84, + 0xa9b7: 84, + 0xa9b8: 84, + 0xa9b9: 84, + 0xa9bc: 84, + 0xa9bd: 84, + 0xa9e5: 84, + 0xaa29: 84, + 0xaa2a: 84, + 0xaa2b: 84, + 0xaa2c: 84, + 0xaa2d: 84, + 0xaa2e: 84, + 0xaa31: 84, + 0xaa32: 84, + 0xaa35: 84, + 0xaa36: 84, + 0xaa43: 84, + 0xaa4c: 84, + 0xaa7c: 84, + 0xaab0: 84, + 0xaab2: 84, + 0xaab3: 84, + 0xaab4: 84, + 0xaab7: 84, + 0xaab8: 84, + 0xaabe: 84, + 0xaabf: 84, + 0xaac1: 84, + 0xaaec: 84, + 0xaaed: 84, + 0xaaf6: 84, + 0xabe5: 84, + 0xabe8: 84, + 0xabed: 84, + 0xfb1e: 84, + 0xfe00: 84, + 0xfe01: 84, + 0xfe02: 84, + 0xfe03: 84, + 0xfe04: 84, + 0xfe05: 84, + 0xfe06: 84, + 0xfe07: 84, + 0xfe08: 84, + 0xfe09: 84, + 0xfe0a: 84, + 0xfe0b: 84, + 0xfe0c: 84, + 0xfe0d: 84, + 0xfe0e: 84, + 0xfe0f: 84, + 0xfe20: 84, + 0xfe21: 84, + 0xfe22: 84, + 0xfe23: 84, + 0xfe24: 84, + 0xfe25: 84, + 0xfe26: 84, + 0xfe27: 84, + 0xfe28: 84, + 0xfe29: 84, + 0xfe2a: 84, + 0xfe2b: 84, + 0xfe2c: 84, + 0xfe2d: 84, + 0xfe2e: 84, + 0xfe2f: 84, + 0xfeff: 84, + 0xfff9: 84, + 0xfffa: 84, + 0xfffb: 84, + 0x101fd: 84, + 0x102e0: 84, + 0x10376: 84, + 0x10377: 84, + 0x10378: 84, + 0x10379: 84, + 0x1037a: 84, + 0x10a01: 84, + 0x10a02: 84, + 0x10a03: 84, + 0x10a05: 84, + 0x10a06: 84, + 0x10a0c: 84, + 0x10a0d: 84, + 0x10a0e: 84, + 0x10a0f: 84, + 0x10a38: 84, + 0x10a39: 84, + 0x10a3a: 84, + 0x10a3f: 84, + 0x10ac0: 68, + 0x10ac1: 68, + 0x10ac2: 68, + 0x10ac3: 68, + 0x10ac4: 68, + 0x10ac5: 82, + 0x10ac7: 82, + 0x10ac9: 82, + 0x10aca: 82, + 0x10acd: 76, + 0x10ace: 82, + 0x10acf: 82, + 0x10ad0: 82, + 0x10ad1: 82, + 0x10ad2: 82, + 0x10ad3: 68, + 0x10ad4: 68, + 0x10ad5: 68, + 0x10ad6: 68, + 0x10ad7: 76, + 0x10ad8: 68, + 0x10ad9: 68, + 0x10ada: 68, + 0x10adb: 68, + 0x10adc: 68, + 0x10add: 82, + 0x10ade: 68, + 0x10adf: 68, + 0x10ae0: 68, + 0x10ae1: 82, + 0x10ae4: 82, + 0x10ae5: 84, + 0x10ae6: 84, + 0x10aeb: 68, + 0x10aec: 68, + 0x10aed: 68, + 0x10aee: 68, + 0x10aef: 82, + 0x10b80: 68, + 0x10b81: 82, + 0x10b82: 68, + 0x10b83: 82, + 0x10b84: 82, + 0x10b85: 82, + 0x10b86: 68, + 0x10b87: 68, + 0x10b88: 68, + 0x10b89: 82, + 0x10b8a: 68, + 0x10b8b: 68, + 0x10b8c: 82, + 0x10b8d: 68, + 0x10b8e: 82, + 0x10b8f: 82, + 0x10b90: 68, + 0x10b91: 82, + 0x10ba9: 82, + 0x10baa: 82, + 0x10bab: 82, + 0x10bac: 82, + 0x10bad: 68, + 0x10bae: 68, + 0x10d00: 76, + 0x10d01: 68, + 0x10d02: 68, + 0x10d03: 68, + 0x10d04: 68, + 0x10d05: 68, + 0x10d06: 68, + 0x10d07: 68, + 0x10d08: 68, + 0x10d09: 68, + 0x10d0a: 68, + 0x10d0b: 68, + 0x10d0c: 68, + 0x10d0d: 68, + 0x10d0e: 68, + 0x10d0f: 68, + 0x10d10: 68, + 0x10d11: 68, + 0x10d12: 68, + 0x10d13: 68, + 0x10d14: 68, + 0x10d15: 68, + 0x10d16: 68, + 0x10d17: 68, + 0x10d18: 68, + 0x10d19: 68, + 0x10d1a: 68, + 0x10d1b: 68, + 0x10d1c: 68, + 0x10d1d: 68, + 0x10d1e: 68, + 0x10d1f: 68, + 0x10d20: 68, + 0x10d21: 68, + 0x10d22: 82, + 0x10d23: 68, + 0x10d24: 84, + 0x10d25: 84, + 0x10d26: 84, + 0x10d27: 84, + 0x10eab: 84, + 0x10eac: 84, + 0x10efd: 84, + 0x10efe: 84, + 0x10eff: 84, + 0x10f30: 68, + 0x10f31: 68, + 0x10f32: 68, + 0x10f33: 82, + 0x10f34: 68, + 0x10f35: 68, + 0x10f36: 68, + 0x10f37: 68, + 0x10f38: 68, + 0x10f39: 68, + 0x10f3a: 68, + 0x10f3b: 68, + 0x10f3c: 68, + 0x10f3d: 68, + 0x10f3e: 68, + 0x10f3f: 68, + 0x10f40: 68, + 0x10f41: 68, + 0x10f42: 68, + 0x10f43: 68, + 0x10f44: 68, + 0x10f46: 84, + 0x10f47: 84, + 0x10f48: 84, + 0x10f49: 84, + 0x10f4a: 84, + 0x10f4b: 84, + 0x10f4c: 84, + 0x10f4d: 84, + 0x10f4e: 84, + 0x10f4f: 84, + 0x10f50: 84, + 0x10f51: 68, + 0x10f52: 68, + 0x10f53: 68, + 0x10f54: 82, + 0x10f70: 68, + 0x10f71: 68, + 0x10f72: 68, + 0x10f73: 68, + 0x10f74: 82, + 0x10f75: 82, + 0x10f76: 68, + 0x10f77: 68, + 0x10f78: 68, + 0x10f79: 68, + 0x10f7a: 68, + 0x10f7b: 68, + 0x10f7c: 68, + 0x10f7d: 68, + 0x10f7e: 68, + 0x10f7f: 68, + 0x10f80: 68, + 0x10f81: 68, + 0x10f82: 84, + 0x10f83: 84, + 0x10f84: 84, + 0x10f85: 84, + 0x10fb0: 68, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, + 0x11001: 84, + 0x11038: 84, + 0x11039: 84, + 0x1103a: 84, + 0x1103b: 84, + 0x1103c: 84, + 0x1103d: 84, + 0x1103e: 84, + 0x1103f: 84, + 0x11040: 84, + 0x11041: 84, + 0x11042: 84, + 0x11043: 84, + 0x11044: 84, + 0x11045: 84, + 0x11046: 84, + 0x11070: 84, + 0x11073: 84, + 0x11074: 84, + 0x1107f: 84, + 0x11080: 84, + 0x11081: 84, + 0x110b3: 84, + 0x110b4: 84, + 0x110b5: 84, + 0x110b6: 84, + 0x110b9: 84, + 0x110ba: 84, + 0x110c2: 84, + 0x11100: 84, + 0x11101: 84, + 0x11102: 84, + 0x11127: 84, + 0x11128: 84, + 0x11129: 84, + 0x1112a: 84, + 0x1112b: 84, + 0x1112d: 84, + 0x1112e: 84, + 0x1112f: 84, + 0x11130: 84, + 0x11131: 84, + 0x11132: 84, + 0x11133: 84, + 0x11134: 84, + 0x11173: 84, + 0x11180: 84, + 0x11181: 84, + 0x111b6: 84, + 0x111b7: 84, + 0x111b8: 84, + 0x111b9: 84, + 0x111ba: 84, + 0x111bb: 84, + 0x111bc: 84, + 0x111bd: 84, + 0x111be: 84, + 0x111c9: 84, + 0x111ca: 84, + 0x111cb: 84, + 0x111cc: 84, + 0x111cf: 84, + 0x1122f: 84, + 0x11230: 84, + 0x11231: 84, + 0x11234: 84, + 0x11236: 84, + 0x11237: 84, + 0x1123e: 84, + 0x11241: 84, + 0x112df: 84, + 0x112e3: 84, + 0x112e4: 84, + 0x112e5: 84, + 0x112e6: 84, + 0x112e7: 84, + 0x112e8: 84, + 0x112e9: 84, + 0x112ea: 84, + 0x11300: 84, + 0x11301: 84, + 0x1133b: 84, + 0x1133c: 84, + 0x11340: 84, + 0x11366: 84, + 0x11367: 84, + 0x11368: 84, + 0x11369: 84, + 0x1136a: 84, + 0x1136b: 84, + 0x1136c: 84, + 0x11370: 84, + 0x11371: 84, + 0x11372: 84, + 0x11373: 84, + 0x11374: 84, + 0x11438: 84, + 0x11439: 84, + 0x1143a: 84, + 0x1143b: 84, + 0x1143c: 84, + 0x1143d: 84, + 0x1143e: 84, + 0x1143f: 84, + 0x11442: 84, + 0x11443: 84, + 0x11444: 84, + 0x11446: 84, + 0x1145e: 84, + 0x114b3: 84, + 0x114b4: 84, + 0x114b5: 84, + 0x114b6: 84, + 0x114b7: 84, + 0x114b8: 84, + 0x114ba: 84, + 0x114bf: 84, + 0x114c0: 84, + 0x114c2: 84, + 0x114c3: 84, + 0x115b2: 84, + 0x115b3: 84, + 0x115b4: 84, + 0x115b5: 84, + 0x115bc: 84, + 0x115bd: 84, + 0x115bf: 84, + 0x115c0: 84, + 0x115dc: 84, + 0x115dd: 84, + 0x11633: 84, + 0x11634: 84, + 0x11635: 84, + 0x11636: 84, + 0x11637: 84, + 0x11638: 84, + 0x11639: 84, + 0x1163a: 84, + 0x1163d: 84, + 0x1163f: 84, + 0x11640: 84, + 0x116ab: 84, + 0x116ad: 84, + 0x116b0: 84, + 0x116b1: 84, + 0x116b2: 84, + 0x116b3: 84, + 0x116b4: 84, + 0x116b5: 84, + 0x116b7: 84, + 0x1171d: 84, + 0x1171e: 84, + 0x1171f: 84, + 0x11722: 84, + 0x11723: 84, + 0x11724: 84, + 0x11725: 84, + 0x11727: 84, + 0x11728: 84, + 0x11729: 84, + 0x1172a: 84, + 0x1172b: 84, + 0x1182f: 84, + 0x11830: 84, + 0x11831: 84, + 0x11832: 84, + 0x11833: 84, + 0x11834: 84, + 0x11835: 84, + 0x11836: 84, + 0x11837: 84, + 0x11839: 84, + 0x1183a: 84, + 0x1193b: 84, + 0x1193c: 84, + 0x1193e: 84, + 0x11943: 84, + 0x119d4: 84, + 0x119d5: 84, + 0x119d6: 84, + 0x119d7: 84, + 0x119da: 84, + 0x119db: 84, + 0x119e0: 84, + 0x11a01: 84, + 0x11a02: 84, + 0x11a03: 84, + 0x11a04: 84, + 0x11a05: 84, + 0x11a06: 84, + 0x11a07: 84, + 0x11a08: 84, + 0x11a09: 84, + 0x11a0a: 84, + 0x11a33: 84, + 0x11a34: 84, + 0x11a35: 84, + 0x11a36: 84, + 0x11a37: 84, + 0x11a38: 84, + 0x11a3b: 84, + 0x11a3c: 84, + 0x11a3d: 84, + 0x11a3e: 84, + 0x11a47: 84, + 0x11a51: 84, + 0x11a52: 84, + 0x11a53: 84, + 0x11a54: 84, + 0x11a55: 84, + 0x11a56: 84, + 0x11a59: 84, + 0x11a5a: 84, + 0x11a5b: 84, + 0x11a8a: 84, + 0x11a8b: 84, + 0x11a8c: 84, + 0x11a8d: 84, + 0x11a8e: 84, + 0x11a8f: 84, + 0x11a90: 84, + 0x11a91: 84, + 0x11a92: 84, + 0x11a93: 84, + 0x11a94: 84, + 0x11a95: 84, + 0x11a96: 84, + 0x11a98: 84, + 0x11a99: 84, + 0x11c30: 84, + 0x11c31: 84, + 0x11c32: 84, + 0x11c33: 84, + 0x11c34: 84, + 0x11c35: 84, + 0x11c36: 84, + 0x11c38: 84, + 0x11c39: 84, + 0x11c3a: 84, + 0x11c3b: 84, + 0x11c3c: 84, + 0x11c3d: 84, + 0x11c3f: 84, + 0x11c92: 84, + 0x11c93: 84, + 0x11c94: 84, + 0x11c95: 84, + 0x11c96: 84, + 0x11c97: 84, + 0x11c98: 84, + 0x11c99: 84, + 0x11c9a: 84, + 0x11c9b: 84, + 0x11c9c: 84, + 0x11c9d: 84, + 0x11c9e: 84, + 0x11c9f: 84, + 0x11ca0: 84, + 0x11ca1: 84, + 0x11ca2: 84, + 0x11ca3: 84, + 0x11ca4: 84, + 0x11ca5: 84, + 0x11ca6: 84, + 0x11ca7: 84, + 0x11caa: 84, + 0x11cab: 84, + 0x11cac: 84, + 0x11cad: 84, + 0x11cae: 84, + 0x11caf: 84, + 0x11cb0: 84, + 0x11cb2: 84, + 0x11cb3: 84, + 0x11cb5: 84, + 0x11cb6: 84, + 0x11d31: 84, + 0x11d32: 84, + 0x11d33: 84, + 0x11d34: 84, + 0x11d35: 84, + 0x11d36: 84, + 0x11d3a: 84, + 0x11d3c: 84, + 0x11d3d: 84, + 0x11d3f: 84, + 0x11d40: 84, + 0x11d41: 84, + 0x11d42: 84, + 0x11d43: 84, + 0x11d44: 84, + 0x11d45: 84, + 0x11d47: 84, + 0x11d90: 84, + 0x11d91: 84, + 0x11d95: 84, + 0x11d97: 84, + 0x11ef3: 84, + 0x11ef4: 84, + 0x11f00: 84, + 0x11f01: 84, + 0x11f36: 84, + 0x11f37: 84, + 0x11f38: 84, + 0x11f39: 84, + 0x11f3a: 84, + 0x11f40: 84, + 0x11f42: 84, + 0x13430: 84, + 0x13431: 84, + 0x13432: 84, + 0x13433: 84, + 0x13434: 84, + 0x13435: 84, + 0x13436: 84, + 0x13437: 84, + 0x13438: 84, + 0x13439: 84, + 0x1343a: 84, + 0x1343b: 84, + 0x1343c: 84, + 0x1343d: 84, + 0x1343e: 84, + 0x1343f: 84, + 0x13440: 84, + 0x13447: 84, + 0x13448: 84, + 0x13449: 84, + 0x1344a: 84, + 0x1344b: 84, + 0x1344c: 84, + 0x1344d: 84, + 0x1344e: 84, + 0x1344f: 84, + 0x13450: 84, + 0x13451: 84, + 0x13452: 84, + 0x13453: 84, + 0x13454: 84, + 0x13455: 84, + 0x16af0: 84, + 0x16af1: 84, + 0x16af2: 84, + 0x16af3: 84, + 0x16af4: 84, + 0x16b30: 84, + 0x16b31: 84, + 0x16b32: 84, + 0x16b33: 84, + 0x16b34: 84, + 0x16b35: 84, + 0x16b36: 84, + 0x16f4f: 84, + 0x16f8f: 84, + 0x16f90: 84, + 0x16f91: 84, + 0x16f92: 84, + 0x16fe4: 84, + 0x1bc9d: 84, + 0x1bc9e: 84, + 0x1bca0: 84, + 0x1bca1: 84, + 0x1bca2: 84, + 0x1bca3: 84, + 0x1cf00: 84, + 0x1cf01: 84, + 0x1cf02: 84, + 0x1cf03: 84, + 0x1cf04: 84, + 0x1cf05: 84, + 0x1cf06: 84, + 0x1cf07: 84, + 0x1cf08: 84, + 0x1cf09: 84, + 0x1cf0a: 84, + 0x1cf0b: 84, + 0x1cf0c: 84, + 0x1cf0d: 84, + 0x1cf0e: 84, + 0x1cf0f: 84, + 0x1cf10: 84, + 0x1cf11: 84, + 0x1cf12: 84, + 0x1cf13: 84, + 0x1cf14: 84, + 0x1cf15: 84, + 0x1cf16: 84, + 0x1cf17: 84, + 0x1cf18: 84, + 0x1cf19: 84, + 0x1cf1a: 84, + 0x1cf1b: 84, + 0x1cf1c: 84, + 0x1cf1d: 84, + 0x1cf1e: 84, + 0x1cf1f: 84, + 0x1cf20: 84, + 0x1cf21: 84, + 0x1cf22: 84, + 0x1cf23: 84, + 0x1cf24: 84, + 0x1cf25: 84, + 0x1cf26: 84, + 0x1cf27: 84, + 0x1cf28: 84, + 0x1cf29: 84, + 0x1cf2a: 84, + 0x1cf2b: 84, + 0x1cf2c: 84, + 0x1cf2d: 84, + 0x1cf30: 84, + 0x1cf31: 84, + 0x1cf32: 84, + 0x1cf33: 84, + 0x1cf34: 84, + 0x1cf35: 84, + 0x1cf36: 84, + 0x1cf37: 84, + 0x1cf38: 84, + 0x1cf39: 84, + 0x1cf3a: 84, + 0x1cf3b: 84, + 0x1cf3c: 84, + 0x1cf3d: 84, + 0x1cf3e: 84, + 0x1cf3f: 84, + 0x1cf40: 84, + 0x1cf41: 84, + 0x1cf42: 84, + 0x1cf43: 84, + 0x1cf44: 84, + 0x1cf45: 84, + 0x1cf46: 84, + 0x1d167: 84, + 0x1d168: 84, + 0x1d169: 84, + 0x1d173: 84, + 0x1d174: 84, + 0x1d175: 84, + 0x1d176: 84, + 0x1d177: 84, + 0x1d178: 84, + 0x1d179: 84, + 0x1d17a: 84, + 0x1d17b: 84, + 0x1d17c: 84, + 0x1d17d: 84, + 0x1d17e: 84, + 0x1d17f: 84, + 0x1d180: 84, + 0x1d181: 84, + 0x1d182: 84, + 0x1d185: 84, + 0x1d186: 84, + 0x1d187: 84, + 0x1d188: 84, + 0x1d189: 84, + 0x1d18a: 84, + 0x1d18b: 84, + 0x1d1aa: 84, + 0x1d1ab: 84, + 0x1d1ac: 84, + 0x1d1ad: 84, + 0x1d242: 84, + 0x1d243: 84, + 0x1d244: 84, + 0x1da00: 84, + 0x1da01: 84, + 0x1da02: 84, + 0x1da03: 84, + 0x1da04: 84, + 0x1da05: 84, + 0x1da06: 84, + 0x1da07: 84, + 0x1da08: 84, + 0x1da09: 84, + 0x1da0a: 84, + 0x1da0b: 84, + 0x1da0c: 84, + 0x1da0d: 84, + 0x1da0e: 84, + 0x1da0f: 84, + 0x1da10: 84, + 0x1da11: 84, + 0x1da12: 84, + 0x1da13: 84, + 0x1da14: 84, + 0x1da15: 84, + 0x1da16: 84, + 0x1da17: 84, + 0x1da18: 84, + 0x1da19: 84, + 0x1da1a: 84, + 0x1da1b: 84, + 0x1da1c: 84, + 0x1da1d: 84, + 0x1da1e: 84, + 0x1da1f: 84, + 0x1da20: 84, + 0x1da21: 84, + 0x1da22: 84, + 0x1da23: 84, + 0x1da24: 84, + 0x1da25: 84, + 0x1da26: 84, + 0x1da27: 84, + 0x1da28: 84, + 0x1da29: 84, + 0x1da2a: 84, + 0x1da2b: 84, + 0x1da2c: 84, + 0x1da2d: 84, + 0x1da2e: 84, + 0x1da2f: 84, + 0x1da30: 84, + 0x1da31: 84, + 0x1da32: 84, + 0x1da33: 84, + 0x1da34: 84, + 0x1da35: 84, + 0x1da36: 84, + 0x1da3b: 84, + 0x1da3c: 84, + 0x1da3d: 84, + 0x1da3e: 84, + 0x1da3f: 84, + 0x1da40: 84, + 0x1da41: 84, + 0x1da42: 84, + 0x1da43: 84, + 0x1da44: 84, + 0x1da45: 84, + 0x1da46: 84, + 0x1da47: 84, + 0x1da48: 84, + 0x1da49: 84, + 0x1da4a: 84, + 0x1da4b: 84, + 0x1da4c: 84, + 0x1da4d: 84, + 0x1da4e: 84, + 0x1da4f: 84, + 0x1da50: 84, + 0x1da51: 84, + 0x1da52: 84, + 0x1da53: 84, + 0x1da54: 84, + 0x1da55: 84, + 0x1da56: 84, + 0x1da57: 84, + 0x1da58: 84, + 0x1da59: 84, + 0x1da5a: 84, + 0x1da5b: 84, + 0x1da5c: 84, + 0x1da5d: 84, + 0x1da5e: 84, + 0x1da5f: 84, + 0x1da60: 84, + 0x1da61: 84, + 0x1da62: 84, + 0x1da63: 84, + 0x1da64: 84, + 0x1da65: 84, + 0x1da66: 84, + 0x1da67: 84, + 0x1da68: 84, + 0x1da69: 84, + 0x1da6a: 84, + 0x1da6b: 84, + 0x1da6c: 84, + 0x1da75: 84, + 0x1da84: 84, + 0x1da9b: 84, + 0x1da9c: 84, + 0x1da9d: 84, + 0x1da9e: 84, + 0x1da9f: 84, + 0x1daa1: 84, + 0x1daa2: 84, + 0x1daa3: 84, + 0x1daa4: 84, + 0x1daa5: 84, + 0x1daa6: 84, + 0x1daa7: 84, + 0x1daa8: 84, + 0x1daa9: 84, + 0x1daaa: 84, + 0x1daab: 84, + 0x1daac: 84, + 0x1daad: 84, + 0x1daae: 84, + 0x1daaf: 84, + 0x1e000: 84, + 0x1e001: 84, + 0x1e002: 84, + 0x1e003: 84, + 0x1e004: 84, + 0x1e005: 84, + 0x1e006: 84, + 0x1e008: 84, + 0x1e009: 84, + 0x1e00a: 84, + 0x1e00b: 84, + 0x1e00c: 84, + 0x1e00d: 84, + 0x1e00e: 84, + 0x1e00f: 84, + 0x1e010: 84, + 0x1e011: 84, + 0x1e012: 84, + 0x1e013: 84, + 0x1e014: 84, + 0x1e015: 84, + 0x1e016: 84, + 0x1e017: 84, + 0x1e018: 84, + 0x1e01b: 84, + 0x1e01c: 84, + 0x1e01d: 84, + 0x1e01e: 84, + 0x1e01f: 84, + 0x1e020: 84, + 0x1e021: 84, + 0x1e023: 84, + 0x1e024: 84, + 0x1e026: 84, + 0x1e027: 84, + 0x1e028: 84, + 0x1e029: 84, + 0x1e02a: 84, + 0x1e08f: 84, + 0x1e130: 84, + 0x1e131: 84, + 0x1e132: 84, + 0x1e133: 84, + 0x1e134: 84, + 0x1e135: 84, + 0x1e136: 84, + 0x1e2ae: 84, + 0x1e2ec: 84, + 0x1e2ed: 84, + 0x1e2ee: 84, + 0x1e2ef: 84, + 0x1e4ec: 84, + 0x1e4ed: 84, + 0x1e4ee: 84, + 0x1e4ef: 84, + 0x1e8d0: 84, + 0x1e8d1: 84, + 0x1e8d2: 84, + 0x1e8d3: 84, + 0x1e8d4: 84, + 0x1e8d5: 84, + 0x1e8d6: 84, + 0x1e900: 68, + 0x1e901: 68, + 0x1e902: 68, + 0x1e903: 68, + 0x1e904: 68, + 0x1e905: 68, + 0x1e906: 68, + 0x1e907: 68, + 0x1e908: 68, + 0x1e909: 68, + 0x1e90a: 68, + 0x1e90b: 68, + 0x1e90c: 68, + 0x1e90d: 68, + 0x1e90e: 68, + 0x1e90f: 68, + 0x1e910: 68, + 0x1e911: 68, + 0x1e912: 68, + 0x1e913: 68, + 0x1e914: 68, + 0x1e915: 68, + 0x1e916: 68, + 0x1e917: 68, + 0x1e918: 68, + 0x1e919: 68, + 0x1e91a: 68, + 0x1e91b: 68, + 0x1e91c: 68, + 0x1e91d: 68, + 0x1e91e: 68, + 0x1e91f: 68, + 0x1e920: 68, + 0x1e921: 68, + 0x1e922: 68, + 0x1e923: 68, + 0x1e924: 68, + 0x1e925: 68, + 0x1e926: 68, + 0x1e927: 68, + 0x1e928: 68, + 0x1e929: 68, + 0x1e92a: 68, + 0x1e92b: 68, + 0x1e92c: 68, + 0x1e92d: 68, + 0x1e92e: 68, + 0x1e92f: 68, + 0x1e930: 68, + 0x1e931: 68, + 0x1e932: 68, + 0x1e933: 68, + 0x1e934: 68, + 0x1e935: 68, + 0x1e936: 68, + 0x1e937: 68, + 0x1e938: 68, + 0x1e939: 68, + 0x1e93a: 68, + 0x1e93b: 68, + 0x1e93c: 68, + 0x1e93d: 68, + 0x1e93e: 68, + 0x1e93f: 68, + 0x1e940: 68, + 0x1e941: 68, + 0x1e942: 68, + 0x1e943: 68, + 0x1e944: 84, + 0x1e945: 84, + 0x1e946: 84, + 0x1e947: 84, + 0x1e948: 84, + 0x1e949: 84, + 0x1e94a: 84, + 0x1e94b: 84, + 0xe0001: 84, + 0xe0020: 84, + 0xe0021: 84, + 0xe0022: 84, + 0xe0023: 84, + 0xe0024: 84, + 0xe0025: 84, + 0xe0026: 84, + 0xe0027: 84, + 0xe0028: 84, + 0xe0029: 84, + 0xe002a: 84, + 0xe002b: 84, + 0xe002c: 84, + 0xe002d: 84, + 0xe002e: 84, + 0xe002f: 84, + 0xe0030: 84, + 0xe0031: 84, + 0xe0032: 84, + 0xe0033: 84, + 0xe0034: 84, + 0xe0035: 84, + 0xe0036: 84, + 0xe0037: 84, + 0xe0038: 84, + 0xe0039: 84, + 0xe003a: 84, + 0xe003b: 84, + 0xe003c: 84, + 0xe003d: 84, + 0xe003e: 84, + 0xe003f: 84, + 0xe0040: 84, + 0xe0041: 84, + 0xe0042: 84, + 0xe0043: 84, + 0xe0044: 84, + 0xe0045: 84, + 0xe0046: 84, + 0xe0047: 84, + 0xe0048: 84, + 0xe0049: 84, + 0xe004a: 84, + 0xe004b: 84, + 0xe004c: 84, + 0xe004d: 84, + 0xe004e: 84, + 0xe004f: 84, + 0xe0050: 84, + 0xe0051: 84, + 0xe0052: 84, + 0xe0053: 84, + 0xe0054: 84, + 0xe0055: 84, + 0xe0056: 84, + 0xe0057: 84, + 0xe0058: 84, + 0xe0059: 84, + 0xe005a: 84, + 0xe005b: 84, + 0xe005c: 84, + 0xe005d: 84, + 0xe005e: 84, + 0xe005f: 84, + 0xe0060: 84, + 0xe0061: 84, + 0xe0062: 84, + 0xe0063: 84, + 0xe0064: 84, + 0xe0065: 84, + 0xe0066: 84, + 0xe0067: 84, + 0xe0068: 84, + 0xe0069: 84, + 0xe006a: 84, + 0xe006b: 84, + 0xe006c: 84, + 0xe006d: 84, + 0xe006e: 84, + 0xe006f: 84, + 0xe0070: 84, + 0xe0071: 84, + 0xe0072: 84, + 0xe0073: 84, + 0xe0074: 84, + 0xe0075: 84, + 0xe0076: 84, + 0xe0077: 84, + 0xe0078: 84, + 0xe0079: 84, + 0xe007a: 84, + 0xe007b: 84, + 0xe007c: 84, + 0xe007d: 84, + 0xe007e: 84, + 0xe007f: 84, + 0xe0100: 84, + 0xe0101: 84, + 0xe0102: 84, + 0xe0103: 84, + 0xe0104: 84, + 0xe0105: 84, + 0xe0106: 84, + 0xe0107: 84, + 0xe0108: 84, + 0xe0109: 84, + 0xe010a: 84, + 0xe010b: 84, + 0xe010c: 84, + 0xe010d: 84, + 0xe010e: 84, + 0xe010f: 84, + 0xe0110: 84, + 0xe0111: 84, + 0xe0112: 84, + 0xe0113: 84, + 0xe0114: 84, + 0xe0115: 84, + 0xe0116: 84, + 0xe0117: 84, + 0xe0118: 84, + 0xe0119: 84, + 0xe011a: 84, + 0xe011b: 84, + 0xe011c: 84, + 0xe011d: 84, + 0xe011e: 84, + 0xe011f: 84, + 0xe0120: 84, + 0xe0121: 84, + 0xe0122: 84, + 0xe0123: 84, + 0xe0124: 84, + 0xe0125: 84, + 0xe0126: 84, + 0xe0127: 84, + 0xe0128: 84, + 0xe0129: 84, + 0xe012a: 84, + 0xe012b: 84, + 0xe012c: 84, + 0xe012d: 84, + 0xe012e: 84, + 0xe012f: 84, + 0xe0130: 84, + 0xe0131: 84, + 0xe0132: 84, + 0xe0133: 84, + 0xe0134: 84, + 0xe0135: 84, + 0xe0136: 84, + 0xe0137: 84, + 0xe0138: 84, + 0xe0139: 84, + 0xe013a: 84, + 0xe013b: 84, + 0xe013c: 84, + 0xe013d: 84, + 0xe013e: 84, + 0xe013f: 84, + 0xe0140: 84, + 0xe0141: 84, + 0xe0142: 84, + 0xe0143: 84, + 0xe0144: 84, + 0xe0145: 84, + 0xe0146: 84, + 0xe0147: 84, + 0xe0148: 84, + 0xe0149: 84, + 0xe014a: 84, + 0xe014b: 84, + 0xe014c: 84, + 0xe014d: 84, + 0xe014e: 84, + 0xe014f: 84, + 0xe0150: 84, + 0xe0151: 84, + 0xe0152: 84, + 0xe0153: 84, + 0xe0154: 84, + 0xe0155: 84, + 0xe0156: 84, + 0xe0157: 84, + 0xe0158: 84, + 0xe0159: 84, + 0xe015a: 84, + 0xe015b: 84, + 0xe015c: 84, + 0xe015d: 84, + 0xe015e: 84, + 0xe015f: 84, + 0xe0160: 84, + 0xe0161: 84, + 0xe0162: 84, + 0xe0163: 84, + 0xe0164: 84, + 0xe0165: 84, + 0xe0166: 84, + 0xe0167: 84, + 0xe0168: 84, + 0xe0169: 84, + 0xe016a: 84, + 0xe016b: 84, + 0xe016c: 84, + 0xe016d: 84, + 0xe016e: 84, + 0xe016f: 84, + 0xe0170: 84, + 0xe0171: 84, + 0xe0172: 84, + 0xe0173: 84, + 0xe0174: 84, + 0xe0175: 84, + 0xe0176: 84, + 0xe0177: 84, + 0xe0178: 84, + 0xe0179: 84, + 0xe017a: 84, + 0xe017b: 84, + 0xe017c: 84, + 0xe017d: 84, + 0xe017e: 84, + 0xe017f: 84, + 0xe0180: 84, + 0xe0181: 84, + 0xe0182: 84, + 0xe0183: 84, + 0xe0184: 84, + 0xe0185: 84, + 0xe0186: 84, + 0xe0187: 84, + 0xe0188: 84, + 0xe0189: 84, + 0xe018a: 84, + 0xe018b: 84, + 0xe018c: 84, + 0xe018d: 84, + 0xe018e: 84, + 0xe018f: 84, + 0xe0190: 84, + 0xe0191: 84, + 0xe0192: 84, + 0xe0193: 84, + 0xe0194: 84, + 0xe0195: 84, + 0xe0196: 84, + 0xe0197: 84, + 0xe0198: 84, + 0xe0199: 84, + 0xe019a: 84, + 0xe019b: 84, + 0xe019c: 84, + 0xe019d: 84, + 0xe019e: 84, + 0xe019f: 84, + 0xe01a0: 84, + 0xe01a1: 84, + 0xe01a2: 84, + 0xe01a3: 84, + 0xe01a4: 84, + 0xe01a5: 84, + 0xe01a6: 84, + 0xe01a7: 84, + 0xe01a8: 84, + 0xe01a9: 84, + 0xe01aa: 84, + 0xe01ab: 84, + 0xe01ac: 84, + 0xe01ad: 84, + 0xe01ae: 84, + 0xe01af: 84, + 0xe01b0: 84, + 0xe01b1: 84, + 0xe01b2: 84, + 0xe01b3: 84, + 0xe01b4: 84, + 0xe01b5: 84, + 0xe01b6: 84, + 0xe01b7: 84, + 0xe01b8: 84, + 0xe01b9: 84, + 0xe01ba: 84, + 0xe01bb: 84, + 0xe01bc: 84, + 0xe01bd: 84, + 0xe01be: 84, + 0xe01bf: 84, + 0xe01c0: 84, + 0xe01c1: 84, + 0xe01c2: 84, + 0xe01c3: 84, + 0xe01c4: 84, + 0xe01c5: 84, + 0xe01c6: 84, + 0xe01c7: 84, + 0xe01c8: 84, + 0xe01c9: 84, + 0xe01ca: 84, + 0xe01cb: 84, + 0xe01cc: 84, + 0xe01cd: 84, + 0xe01ce: 84, + 0xe01cf: 84, + 0xe01d0: 84, + 0xe01d1: 84, + 0xe01d2: 84, + 0xe01d3: 84, + 0xe01d4: 84, + 0xe01d5: 84, + 0xe01d6: 84, + 0xe01d7: 84, + 0xe01d8: 84, + 0xe01d9: 84, + 0xe01da: 84, + 0xe01db: 84, + 0xe01dc: 84, + 0xe01dd: 84, + 0xe01de: 84, + 0xe01df: 84, + 0xe01e0: 84, + 0xe01e1: 84, + 0xe01e2: 84, + 0xe01e3: 84, + 0xe01e4: 84, + 0xe01e5: 84, + 0xe01e6: 84, + 0xe01e7: 84, + 0xe01e8: 84, + 0xe01e9: 84, + 0xe01ea: 84, + 0xe01eb: 84, + 0xe01ec: 84, + 0xe01ed: 84, + 0xe01ee: 84, + 0xe01ef: 84, +} +codepoint_classes = { + 'PVALID': ( + 0x2d0000002e, + 0x300000003a, + 0x610000007b, + 0xdf000000f7, + 0xf800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010a, + 0x10b0000010c, + 0x10d0000010e, + 0x10f00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011a, + 0x11b0000011c, + 0x11d0000011e, + 0x11f00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012a, + 0x12b0000012c, + 0x12d0000012e, + 0x12f00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13a0000013b, + 0x13c0000013d, + 0x13e0000013f, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14b0000014c, + 0x14d0000014e, + 0x14f00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015a, + 0x15b0000015c, + 0x15d0000015e, + 0x15f00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016a, + 0x16b0000016c, + 0x16d0000016e, + 0x16f00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17a0000017b, + 0x17c0000017d, + 0x17e0000017f, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18c0000018e, + 0x19200000193, + 0x19500000196, + 0x1990000019c, + 0x19e0000019f, + 0x1a1000001a2, + 0x1a3000001a4, + 0x1a5000001a6, + 0x1a8000001a9, + 0x1aa000001ac, + 0x1ad000001ae, + 0x1b0000001b1, + 0x1b4000001b5, + 0x1b6000001b7, + 0x1b9000001bc, + 0x1bd000001c4, + 0x1ce000001cf, + 0x1d0000001d1, + 0x1d2000001d3, + 0x1d4000001d5, + 0x1d6000001d7, + 0x1d8000001d9, + 0x1da000001db, + 0x1dc000001de, + 0x1df000001e0, + 0x1e1000001e2, + 0x1e3000001e4, + 0x1e5000001e6, + 0x1e7000001e8, + 0x1e9000001ea, + 0x1eb000001ec, + 0x1ed000001ee, + 0x1ef000001f1, + 0x1f5000001f6, + 0x1f9000001fa, + 0x1fb000001fc, + 0x1fd000001fe, + 0x1ff00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020a, + 0x20b0000020c, + 0x20d0000020e, + 0x20f00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021a, + 0x21b0000021c, + 0x21d0000021e, + 0x21f00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022a, + 0x22b0000022c, + 0x22d0000022e, + 0x22f00000230, + 0x23100000232, + 0x2330000023a, + 0x23c0000023d, + 0x23f00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024a, + 0x24b0000024c, + 0x24d0000024e, + 0x24f000002b0, + 0x2b9000002c2, + 0x2c6000002d2, + 0x2ec000002ed, + 0x2ee000002ef, + 0x30000000340, + 0x34200000343, + 0x3460000034f, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37b0000037e, + 0x39000000391, + 0x3ac000003cf, + 0x3d7000003d8, + 0x3d9000003da, + 0x3db000003dc, + 0x3dd000003de, + 0x3df000003e0, + 0x3e1000003e2, + 0x3e3000003e4, + 0x3e5000003e6, + 0x3e7000003e8, + 0x3e9000003ea, + 0x3eb000003ec, + 0x3ed000003ee, + 0x3ef000003f0, + 0x3f3000003f4, + 0x3f8000003f9, + 0x3fb000003fd, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046a, + 0x46b0000046c, + 0x46d0000046e, + 0x46f00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047a, + 0x47b0000047c, + 0x47d0000047e, + 0x47f00000480, + 0x48100000482, + 0x48300000488, + 0x48b0000048c, + 0x48d0000048e, + 0x48f00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049a, + 0x49b0000049c, + 0x49d0000049e, + 0x49f000004a0, + 0x4a1000004a2, + 0x4a3000004a4, + 0x4a5000004a6, + 0x4a7000004a8, + 0x4a9000004aa, + 0x4ab000004ac, + 0x4ad000004ae, + 0x4af000004b0, + 0x4b1000004b2, + 0x4b3000004b4, + 0x4b5000004b6, + 0x4b7000004b8, + 0x4b9000004ba, + 0x4bb000004bc, + 0x4bd000004be, + 0x4bf000004c0, + 0x4c2000004c3, + 0x4c4000004c5, + 0x4c6000004c7, + 0x4c8000004c9, + 0x4ca000004cb, + 0x4cc000004cd, + 0x4ce000004d0, + 0x4d1000004d2, + 0x4d3000004d4, + 0x4d5000004d6, + 0x4d7000004d8, + 0x4d9000004da, + 0x4db000004dc, + 0x4dd000004de, + 0x4df000004e0, + 0x4e1000004e2, + 0x4e3000004e4, + 0x4e5000004e6, + 0x4e7000004e8, + 0x4e9000004ea, + 0x4eb000004ec, + 0x4ed000004ee, + 0x4ef000004f0, + 0x4f1000004f2, + 0x4f3000004f4, + 0x4f5000004f6, + 0x4f7000004f8, + 0x4f9000004fa, + 0x4fb000004fc, + 0x4fd000004fe, + 0x4ff00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050a, + 0x50b0000050c, + 0x50d0000050e, + 0x50f00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051a, + 0x51b0000051c, + 0x51d0000051e, + 0x51f00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052a, + 0x52b0000052c, + 0x52d0000052e, + 0x52f00000530, + 0x5590000055a, + 0x56000000587, + 0x58800000589, + 0x591000005be, + 0x5bf000005c0, + 0x5c1000005c3, + 0x5c4000005c6, + 0x5c7000005c8, + 0x5d0000005eb, + 0x5ef000005f3, + 0x6100000061b, + 0x62000000640, + 0x64100000660, + 0x66e00000675, + 0x679000006d4, + 0x6d5000006dd, + 0x6df000006e9, + 0x6ea000006f0, + 0x6fa00000700, + 0x7100000074b, + 0x74d000007b2, + 0x7c0000007f6, + 0x7fd000007fe, + 0x8000000082e, + 0x8400000085c, + 0x8600000086b, + 0x87000000888, + 0x8890000088f, + 0x898000008e2, + 0x8e300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098d, + 0x98f00000991, + 0x993000009a9, + 0x9aa000009b1, + 0x9b2000009b3, + 0x9b6000009ba, + 0x9bc000009c5, + 0x9c7000009c9, + 0x9cb000009cf, + 0x9d7000009d8, + 0x9e0000009e4, + 0x9e6000009f2, + 0x9fc000009fd, + 0x9fe000009ff, + 0xa0100000a04, + 0xa0500000a0b, + 0xa0f00000a11, + 0xa1300000a29, + 0xa2a00000a31, + 0xa3200000a33, + 0xa3500000a36, + 0xa3800000a3a, + 0xa3c00000a3d, + 0xa3e00000a43, + 0xa4700000a49, + 0xa4b00000a4e, + 0xa5100000a52, + 0xa5c00000a5d, + 0xa6600000a76, + 0xa8100000a84, + 0xa8500000a8e, + 0xa8f00000a92, + 0xa9300000aa9, + 0xaaa00000ab1, + 0xab200000ab4, + 0xab500000aba, + 0xabc00000ac6, + 0xac700000aca, + 0xacb00000ace, + 0xad000000ad1, + 0xae000000ae4, + 0xae600000af0, + 0xaf900000b00, + 0xb0100000b04, + 0xb0500000b0d, + 0xb0f00000b11, + 0xb1300000b29, + 0xb2a00000b31, + 0xb3200000b34, + 0xb3500000b3a, + 0xb3c00000b45, + 0xb4700000b49, + 0xb4b00000b4e, + 0xb5500000b58, + 0xb5f00000b64, + 0xb6600000b70, + 0xb7100000b72, + 0xb8200000b84, + 0xb8500000b8b, + 0xb8e00000b91, + 0xb9200000b96, + 0xb9900000b9b, + 0xb9c00000b9d, + 0xb9e00000ba0, + 0xba300000ba5, + 0xba800000bab, + 0xbae00000bba, + 0xbbe00000bc3, + 0xbc600000bc9, + 0xbca00000bce, + 0xbd000000bd1, + 0xbd700000bd8, + 0xbe600000bf0, + 0xc0000000c0d, + 0xc0e00000c11, + 0xc1200000c29, + 0xc2a00000c3a, + 0xc3c00000c45, + 0xc4600000c49, + 0xc4a00000c4e, + 0xc5500000c57, + 0xc5800000c5b, + 0xc5d00000c5e, + 0xc6000000c64, + 0xc6600000c70, + 0xc8000000c84, + 0xc8500000c8d, + 0xc8e00000c91, + 0xc9200000ca9, + 0xcaa00000cb4, + 0xcb500000cba, + 0xcbc00000cc5, + 0xcc600000cc9, + 0xcca00000cce, + 0xcd500000cd7, + 0xcdd00000cdf, + 0xce000000ce4, + 0xce600000cf0, + 0xcf100000cf4, + 0xd0000000d0d, + 0xd0e00000d11, + 0xd1200000d45, + 0xd4600000d49, + 0xd4a00000d4f, + 0xd5400000d58, + 0xd5f00000d64, + 0xd6600000d70, + 0xd7a00000d80, + 0xd8100000d84, + 0xd8500000d97, + 0xd9a00000db2, + 0xdb300000dbc, + 0xdbd00000dbe, + 0xdc000000dc7, + 0xdca00000dcb, + 0xdcf00000dd5, + 0xdd600000dd7, + 0xdd800000de0, + 0xde600000df0, + 0xdf200000df4, + 0xe0100000e33, + 0xe3400000e3b, + 0xe4000000e4f, + 0xe5000000e5a, + 0xe8100000e83, + 0xe8400000e85, + 0xe8600000e8b, + 0xe8c00000ea4, + 0xea500000ea6, + 0xea700000eb3, + 0xeb400000ebe, + 0xec000000ec5, + 0xec600000ec7, + 0xec800000ecf, + 0xed000000eda, + 0xede00000ee0, + 0xf0000000f01, + 0xf0b00000f0c, + 0xf1800000f1a, + 0xf2000000f2a, + 0xf3500000f36, + 0xf3700000f38, + 0xf3900000f3a, + 0xf3e00000f43, + 0xf4400000f48, + 0xf4900000f4d, + 0xf4e00000f52, + 0xf5300000f57, + 0xf5800000f5c, + 0xf5d00000f69, + 0xf6a00000f6d, + 0xf7100000f73, + 0xf7400000f75, + 0xf7a00000f81, + 0xf8200000f85, + 0xf8600000f93, + 0xf9400000f98, + 0xf9900000f9d, + 0xf9e00000fa2, + 0xfa300000fa7, + 0xfa800000fac, + 0xfad00000fb9, + 0xfba00000fbd, + 0xfc600000fc7, + 0x10000000104a, + 0x10500000109e, + 0x10d0000010fb, + 0x10fd00001100, + 0x120000001249, + 0x124a0000124e, + 0x125000001257, + 0x125800001259, + 0x125a0000125e, + 0x126000001289, + 0x128a0000128e, + 0x1290000012b1, + 0x12b2000012b6, + 0x12b8000012bf, + 0x12c0000012c1, + 0x12c2000012c6, + 0x12c8000012d7, + 0x12d800001311, + 0x131200001316, + 0x13180000135b, + 0x135d00001360, + 0x138000001390, + 0x13a0000013f6, + 0x14010000166d, + 0x166f00001680, + 0x16810000169b, + 0x16a0000016eb, + 0x16f1000016f9, + 0x170000001716, + 0x171f00001735, + 0x174000001754, + 0x17600000176d, + 0x176e00001771, + 0x177200001774, + 0x1780000017b4, + 0x17b6000017d4, + 0x17d7000017d8, + 0x17dc000017de, + 0x17e0000017ea, + 0x18100000181a, + 0x182000001879, + 0x1880000018ab, + 0x18b0000018f6, + 0x19000000191f, + 0x19200000192c, + 0x19300000193c, + 0x19460000196e, + 0x197000001975, + 0x1980000019ac, + 0x19b0000019ca, + 0x19d0000019da, + 0x1a0000001a1c, + 0x1a2000001a5f, + 0x1a6000001a7d, + 0x1a7f00001a8a, + 0x1a9000001a9a, + 0x1aa700001aa8, + 0x1ab000001abe, + 0x1abf00001acf, + 0x1b0000001b4d, + 0x1b5000001b5a, + 0x1b6b00001b74, + 0x1b8000001bf4, + 0x1c0000001c38, + 0x1c4000001c4a, + 0x1c4d00001c7e, + 0x1cd000001cd3, + 0x1cd400001cfb, + 0x1d0000001d2c, + 0x1d2f00001d30, + 0x1d3b00001d3c, + 0x1d4e00001d4f, + 0x1d6b00001d78, + 0x1d7900001d9b, + 0x1dc000001e00, + 0x1e0100001e02, + 0x1e0300001e04, + 0x1e0500001e06, + 0x1e0700001e08, + 0x1e0900001e0a, + 0x1e0b00001e0c, + 0x1e0d00001e0e, + 0x1e0f00001e10, + 0x1e1100001e12, + 0x1e1300001e14, + 0x1e1500001e16, + 0x1e1700001e18, + 0x1e1900001e1a, + 0x1e1b00001e1c, + 0x1e1d00001e1e, + 0x1e1f00001e20, + 0x1e2100001e22, + 0x1e2300001e24, + 0x1e2500001e26, + 0x1e2700001e28, + 0x1e2900001e2a, + 0x1e2b00001e2c, + 0x1e2d00001e2e, + 0x1e2f00001e30, + 0x1e3100001e32, + 0x1e3300001e34, + 0x1e3500001e36, + 0x1e3700001e38, + 0x1e3900001e3a, + 0x1e3b00001e3c, + 0x1e3d00001e3e, + 0x1e3f00001e40, + 0x1e4100001e42, + 0x1e4300001e44, + 0x1e4500001e46, + 0x1e4700001e48, + 0x1e4900001e4a, + 0x1e4b00001e4c, + 0x1e4d00001e4e, + 0x1e4f00001e50, + 0x1e5100001e52, + 0x1e5300001e54, + 0x1e5500001e56, + 0x1e5700001e58, + 0x1e5900001e5a, + 0x1e5b00001e5c, + 0x1e5d00001e5e, + 0x1e5f00001e60, + 0x1e6100001e62, + 0x1e6300001e64, + 0x1e6500001e66, + 0x1e6700001e68, + 0x1e6900001e6a, + 0x1e6b00001e6c, + 0x1e6d00001e6e, + 0x1e6f00001e70, + 0x1e7100001e72, + 0x1e7300001e74, + 0x1e7500001e76, + 0x1e7700001e78, + 0x1e7900001e7a, + 0x1e7b00001e7c, + 0x1e7d00001e7e, + 0x1e7f00001e80, + 0x1e8100001e82, + 0x1e8300001e84, + 0x1e8500001e86, + 0x1e8700001e88, + 0x1e8900001e8a, + 0x1e8b00001e8c, + 0x1e8d00001e8e, + 0x1e8f00001e90, + 0x1e9100001e92, + 0x1e9300001e94, + 0x1e9500001e9a, + 0x1e9c00001e9e, + 0x1e9f00001ea0, + 0x1ea100001ea2, + 0x1ea300001ea4, + 0x1ea500001ea6, + 0x1ea700001ea8, + 0x1ea900001eaa, + 0x1eab00001eac, + 0x1ead00001eae, + 0x1eaf00001eb0, + 0x1eb100001eb2, + 0x1eb300001eb4, + 0x1eb500001eb6, + 0x1eb700001eb8, + 0x1eb900001eba, + 0x1ebb00001ebc, + 0x1ebd00001ebe, + 0x1ebf00001ec0, + 0x1ec100001ec2, + 0x1ec300001ec4, + 0x1ec500001ec6, + 0x1ec700001ec8, + 0x1ec900001eca, + 0x1ecb00001ecc, + 0x1ecd00001ece, + 0x1ecf00001ed0, + 0x1ed100001ed2, + 0x1ed300001ed4, + 0x1ed500001ed6, + 0x1ed700001ed8, + 0x1ed900001eda, + 0x1edb00001edc, + 0x1edd00001ede, + 0x1edf00001ee0, + 0x1ee100001ee2, + 0x1ee300001ee4, + 0x1ee500001ee6, + 0x1ee700001ee8, + 0x1ee900001eea, + 0x1eeb00001eec, + 0x1eed00001eee, + 0x1eef00001ef0, + 0x1ef100001ef2, + 0x1ef300001ef4, + 0x1ef500001ef6, + 0x1ef700001ef8, + 0x1ef900001efa, + 0x1efb00001efc, + 0x1efd00001efe, + 0x1eff00001f08, + 0x1f1000001f16, + 0x1f2000001f28, + 0x1f3000001f38, + 0x1f4000001f46, + 0x1f5000001f58, + 0x1f6000001f68, + 0x1f7000001f71, + 0x1f7200001f73, + 0x1f7400001f75, + 0x1f7600001f77, + 0x1f7800001f79, + 0x1f7a00001f7b, + 0x1f7c00001f7d, + 0x1fb000001fb2, + 0x1fb600001fb7, + 0x1fc600001fc7, + 0x1fd000001fd3, + 0x1fd600001fd8, + 0x1fe000001fe3, + 0x1fe400001fe8, + 0x1ff600001ff7, + 0x214e0000214f, + 0x218400002185, + 0x2c3000002c60, + 0x2c6100002c62, + 0x2c6500002c67, + 0x2c6800002c69, + 0x2c6a00002c6b, + 0x2c6c00002c6d, + 0x2c7100002c72, + 0x2c7300002c75, + 0x2c7600002c7c, + 0x2c8100002c82, + 0x2c8300002c84, + 0x2c8500002c86, + 0x2c8700002c88, + 0x2c8900002c8a, + 0x2c8b00002c8c, + 0x2c8d00002c8e, + 0x2c8f00002c90, + 0x2c9100002c92, + 0x2c9300002c94, + 0x2c9500002c96, + 0x2c9700002c98, + 0x2c9900002c9a, + 0x2c9b00002c9c, + 0x2c9d00002c9e, + 0x2c9f00002ca0, + 0x2ca100002ca2, + 0x2ca300002ca4, + 0x2ca500002ca6, + 0x2ca700002ca8, + 0x2ca900002caa, + 0x2cab00002cac, + 0x2cad00002cae, + 0x2caf00002cb0, + 0x2cb100002cb2, + 0x2cb300002cb4, + 0x2cb500002cb6, + 0x2cb700002cb8, + 0x2cb900002cba, + 0x2cbb00002cbc, + 0x2cbd00002cbe, + 0x2cbf00002cc0, + 0x2cc100002cc2, + 0x2cc300002cc4, + 0x2cc500002cc6, + 0x2cc700002cc8, + 0x2cc900002cca, + 0x2ccb00002ccc, + 0x2ccd00002cce, + 0x2ccf00002cd0, + 0x2cd100002cd2, + 0x2cd300002cd4, + 0x2cd500002cd6, + 0x2cd700002cd8, + 0x2cd900002cda, + 0x2cdb00002cdc, + 0x2cdd00002cde, + 0x2cdf00002ce0, + 0x2ce100002ce2, + 0x2ce300002ce5, + 0x2cec00002ced, + 0x2cee00002cf2, + 0x2cf300002cf4, + 0x2d0000002d26, + 0x2d2700002d28, + 0x2d2d00002d2e, + 0x2d3000002d68, + 0x2d7f00002d97, + 0x2da000002da7, + 0x2da800002daf, + 0x2db000002db7, + 0x2db800002dbf, + 0x2dc000002dc7, + 0x2dc800002dcf, + 0x2dd000002dd7, + 0x2dd800002ddf, + 0x2de000002e00, + 0x2e2f00002e30, + 0x300500003008, + 0x302a0000302e, + 0x303c0000303d, + 0x304100003097, + 0x30990000309b, + 0x309d0000309f, + 0x30a1000030fb, + 0x30fc000030ff, + 0x310500003130, + 0x31a0000031c0, + 0x31f000003200, + 0x340000004dc0, + 0x4e000000a48d, + 0xa4d00000a4fe, + 0xa5000000a60d, + 0xa6100000a62c, + 0xa6410000a642, + 0xa6430000a644, + 0xa6450000a646, + 0xa6470000a648, + 0xa6490000a64a, + 0xa64b0000a64c, + 0xa64d0000a64e, + 0xa64f0000a650, + 0xa6510000a652, + 0xa6530000a654, + 0xa6550000a656, + 0xa6570000a658, + 0xa6590000a65a, + 0xa65b0000a65c, + 0xa65d0000a65e, + 0xa65f0000a660, + 0xa6610000a662, + 0xa6630000a664, + 0xa6650000a666, + 0xa6670000a668, + 0xa6690000a66a, + 0xa66b0000a66c, + 0xa66d0000a670, + 0xa6740000a67e, + 0xa67f0000a680, + 0xa6810000a682, + 0xa6830000a684, + 0xa6850000a686, + 0xa6870000a688, + 0xa6890000a68a, + 0xa68b0000a68c, + 0xa68d0000a68e, + 0xa68f0000a690, + 0xa6910000a692, + 0xa6930000a694, + 0xa6950000a696, + 0xa6970000a698, + 0xa6990000a69a, + 0xa69b0000a69c, + 0xa69e0000a6e6, + 0xa6f00000a6f2, + 0xa7170000a720, + 0xa7230000a724, + 0xa7250000a726, + 0xa7270000a728, + 0xa7290000a72a, + 0xa72b0000a72c, + 0xa72d0000a72e, + 0xa72f0000a732, + 0xa7330000a734, + 0xa7350000a736, + 0xa7370000a738, + 0xa7390000a73a, + 0xa73b0000a73c, + 0xa73d0000a73e, + 0xa73f0000a740, + 0xa7410000a742, + 0xa7430000a744, + 0xa7450000a746, + 0xa7470000a748, + 0xa7490000a74a, + 0xa74b0000a74c, + 0xa74d0000a74e, + 0xa74f0000a750, + 0xa7510000a752, + 0xa7530000a754, + 0xa7550000a756, + 0xa7570000a758, + 0xa7590000a75a, + 0xa75b0000a75c, + 0xa75d0000a75e, + 0xa75f0000a760, + 0xa7610000a762, + 0xa7630000a764, + 0xa7650000a766, + 0xa7670000a768, + 0xa7690000a76a, + 0xa76b0000a76c, + 0xa76d0000a76e, + 0xa76f0000a770, + 0xa7710000a779, + 0xa77a0000a77b, + 0xa77c0000a77d, + 0xa77f0000a780, + 0xa7810000a782, + 0xa7830000a784, + 0xa7850000a786, + 0xa7870000a789, + 0xa78c0000a78d, + 0xa78e0000a790, + 0xa7910000a792, + 0xa7930000a796, + 0xa7970000a798, + 0xa7990000a79a, + 0xa79b0000a79c, + 0xa79d0000a79e, + 0xa79f0000a7a0, + 0xa7a10000a7a2, + 0xa7a30000a7a4, + 0xa7a50000a7a6, + 0xa7a70000a7a8, + 0xa7a90000a7aa, + 0xa7af0000a7b0, + 0xa7b50000a7b6, + 0xa7b70000a7b8, + 0xa7b90000a7ba, + 0xa7bb0000a7bc, + 0xa7bd0000a7be, + 0xa7bf0000a7c0, + 0xa7c10000a7c2, + 0xa7c30000a7c4, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7d10000a7d2, + 0xa7d30000a7d4, + 0xa7d50000a7d6, + 0xa7d70000a7d8, + 0xa7d90000a7da, + 0xa7f60000a7f8, + 0xa7fa0000a828, + 0xa82c0000a82d, + 0xa8400000a874, + 0xa8800000a8c6, + 0xa8d00000a8da, + 0xa8e00000a8f8, + 0xa8fb0000a8fc, + 0xa8fd0000a92e, + 0xa9300000a954, + 0xa9800000a9c1, + 0xa9cf0000a9da, + 0xa9e00000a9ff, + 0xaa000000aa37, + 0xaa400000aa4e, + 0xaa500000aa5a, + 0xaa600000aa77, + 0xaa7a0000aac3, + 0xaadb0000aade, + 0xaae00000aaf0, + 0xaaf20000aaf7, + 0xab010000ab07, + 0xab090000ab0f, + 0xab110000ab17, + 0xab200000ab27, + 0xab280000ab2f, + 0xab300000ab5b, + 0xab600000ab69, + 0xabc00000abeb, + 0xabec0000abee, + 0xabf00000abfa, + 0xac000000d7a4, + 0xfa0e0000fa10, + 0xfa110000fa12, + 0xfa130000fa15, + 0xfa1f0000fa20, + 0xfa210000fa22, + 0xfa230000fa25, + 0xfa270000fa2a, + 0xfb1e0000fb1f, + 0xfe200000fe30, + 0xfe730000fe74, + 0x100000001000c, + 0x1000d00010027, + 0x100280001003b, + 0x1003c0001003e, + 0x1003f0001004e, + 0x100500001005e, + 0x10080000100fb, + 0x101fd000101fe, + 0x102800001029d, + 0x102a0000102d1, + 0x102e0000102e1, + 0x1030000010320, + 0x1032d00010341, + 0x103420001034a, + 0x103500001037b, + 0x103800001039e, + 0x103a0000103c4, + 0x103c8000103d0, + 0x104280001049e, + 0x104a0000104aa, + 0x104d8000104fc, + 0x1050000010528, + 0x1053000010564, + 0x10597000105a2, + 0x105a3000105b2, + 0x105b3000105ba, + 0x105bb000105bd, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1078000010781, + 0x1080000010806, + 0x1080800010809, + 0x1080a00010836, + 0x1083700010839, + 0x1083c0001083d, + 0x1083f00010856, + 0x1086000010877, + 0x108800001089f, + 0x108e0000108f3, + 0x108f4000108f6, + 0x1090000010916, + 0x109200001093a, + 0x10980000109b8, + 0x109be000109c0, + 0x10a0000010a04, + 0x10a0500010a07, + 0x10a0c00010a14, + 0x10a1500010a18, + 0x10a1900010a36, + 0x10a3800010a3b, + 0x10a3f00010a40, + 0x10a6000010a7d, + 0x10a8000010a9d, + 0x10ac000010ac8, + 0x10ac900010ae7, + 0x10b0000010b36, + 0x10b4000010b56, + 0x10b6000010b73, + 0x10b8000010b92, + 0x10c0000010c49, + 0x10cc000010cf3, + 0x10d0000010d28, + 0x10d3000010d3a, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, + 0x10efd00010f1d, + 0x10f2700010f28, + 0x10f3000010f51, + 0x10f7000010f86, + 0x10fb000010fc5, + 0x10fe000010ff7, + 0x1100000011047, + 0x1106600011076, + 0x1107f000110bb, + 0x110c2000110c3, + 0x110d0000110e9, + 0x110f0000110fa, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111c5, + 0x111c9000111cd, + 0x111ce000111db, + 0x111dc000111dd, + 0x1120000011212, + 0x1121300011238, + 0x1123e00011242, + 0x1128000011287, + 0x1128800011289, + 0x1128a0001128e, + 0x1128f0001129e, + 0x1129f000112a9, + 0x112b0000112eb, + 0x112f0000112fa, + 0x1130000011304, + 0x113050001130d, + 0x1130f00011311, + 0x1131300011329, + 0x1132a00011331, + 0x1133200011334, + 0x113350001133a, + 0x1133b00011345, + 0x1134700011349, + 0x1134b0001134e, + 0x1135000011351, + 0x1135700011358, + 0x1135d00011364, + 0x113660001136d, + 0x1137000011375, + 0x114000001144b, + 0x114500001145a, + 0x1145e00011462, + 0x11480000114c6, + 0x114c7000114c8, + 0x114d0000114da, + 0x11580000115b6, + 0x115b8000115c1, + 0x115d8000115de, + 0x1160000011641, + 0x1164400011645, + 0x116500001165a, + 0x11680000116b9, + 0x116c0000116ca, + 0x117000001171b, + 0x1171d0001172c, + 0x117300001173a, + 0x1174000011747, + 0x118000001183b, + 0x118c0000118ea, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, + 0x119a0000119a8, + 0x119aa000119d8, + 0x119da000119e2, + 0x119e3000119e5, + 0x11a0000011a3f, + 0x11a4700011a48, + 0x11a5000011a9a, + 0x11a9d00011a9e, + 0x11ab000011af9, + 0x11c0000011c09, + 0x11c0a00011c37, + 0x11c3800011c41, + 0x11c5000011c5a, + 0x11c7200011c90, + 0x11c9200011ca8, + 0x11ca900011cb7, + 0x11d0000011d07, + 0x11d0800011d0a, + 0x11d0b00011d37, + 0x11d3a00011d3b, + 0x11d3c00011d3e, + 0x11d3f00011d48, + 0x11d5000011d5a, + 0x11d6000011d66, + 0x11d6700011d69, + 0x11d6a00011d8f, + 0x11d9000011d92, + 0x11d9300011d99, + 0x11da000011daa, + 0x11ee000011ef7, + 0x11f0000011f11, + 0x11f1200011f3b, + 0x11f3e00011f43, + 0x11f5000011f5a, + 0x11fb000011fb1, + 0x120000001239a, + 0x1248000012544, + 0x12f9000012ff1, + 0x1300000013430, + 0x1344000013456, + 0x1440000014647, + 0x1680000016a39, + 0x16a4000016a5f, + 0x16a6000016a6a, + 0x16a7000016abf, + 0x16ac000016aca, + 0x16ad000016aee, + 0x16af000016af5, + 0x16b0000016b37, + 0x16b4000016b44, + 0x16b5000016b5a, + 0x16b6300016b78, + 0x16b7d00016b90, + 0x16e6000016e80, + 0x16f0000016f4b, + 0x16f4f00016f88, + 0x16f8f00016fa0, + 0x16fe000016fe2, + 0x16fe300016fe5, + 0x16ff000016ff2, + 0x17000000187f8, + 0x1880000018cd6, + 0x18d0000018d09, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b123, + 0x1b1320001b133, + 0x1b1500001b153, + 0x1b1550001b156, + 0x1b1640001b168, + 0x1b1700001b2fc, + 0x1bc000001bc6b, + 0x1bc700001bc7d, + 0x1bc800001bc89, + 0x1bc900001bc9a, + 0x1bc9d0001bc9f, + 0x1cf000001cf2e, + 0x1cf300001cf47, + 0x1da000001da37, + 0x1da3b0001da6d, + 0x1da750001da76, + 0x1da840001da85, + 0x1da9b0001daa0, + 0x1daa10001dab0, + 0x1df000001df1f, + 0x1df250001df2b, + 0x1e0000001e007, + 0x1e0080001e019, + 0x1e01b0001e022, + 0x1e0230001e025, + 0x1e0260001e02b, + 0x1e08f0001e090, + 0x1e1000001e12d, + 0x1e1300001e13e, + 0x1e1400001e14a, + 0x1e14e0001e14f, + 0x1e2900001e2af, + 0x1e2c00001e2fa, + 0x1e4d00001e4fa, + 0x1e7e00001e7e7, + 0x1e7e80001e7ec, + 0x1e7ed0001e7ef, + 0x1e7f00001e7ff, + 0x1e8000001e8c5, + 0x1e8d00001e8d7, + 0x1e9220001e94c, + 0x1e9500001e95a, + 0x200000002a6e0, + 0x2a7000002b73a, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2ebf00002ee5e, + 0x300000003134b, + 0x31350000323b0, + ), + 'CONTEXTJ': ( + 0x200c0000200e, + ), + 'CONTEXTO': ( + 0xb7000000b8, + 0x37500000376, + 0x5f3000005f5, + 0x6600000066a, + 0x6f0000006fa, + 0x30fb000030fc, + ), +} diff --git a/myenv/lib/python3.12/site-packages/idna/intranges.py b/myenv/lib/python3.12/site-packages/idna/intranges.py new file mode 100644 index 0000000..6a43b04 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna/intranges.py @@ -0,0 +1,54 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect +from typing import List, Tuple + +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + +def _encode_range(start: int, end: int) -> int: + return (start << 32) | end + +def _decode_range(r: int) -> Tuple[int, int]: + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos-1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/myenv/lib/python3.12/site-packages/idna/package_data.py b/myenv/lib/python3.12/site-packages/idna/package_data.py new file mode 100644 index 0000000..79aa47c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna/package_data.py @@ -0,0 +1,2 @@ +__version__ = '3.8' + diff --git a/myenv/lib/python3.12/site-packages/idna/py.typed b/myenv/lib/python3.12/site-packages/idna/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/idna/uts46data.py b/myenv/lib/python3.12/site-packages/idna/uts46data.py new file mode 100644 index 0000000..6a1eddb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/idna/uts46data.py @@ -0,0 +1,8598 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +from typing import List, Tuple, Union + + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = '15.1.0' +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', 'a'), + (0x42, 'M', 'b'), + (0x43, 'M', 'c'), + (0x44, 'M', 'd'), + (0x45, 'M', 'e'), + (0x46, 'M', 'f'), + (0x47, 'M', 'g'), + (0x48, 'M', 'h'), + (0x49, 'M', 'i'), + (0x4A, 'M', 'j'), + (0x4B, 'M', 'k'), + (0x4C, 'M', 'l'), + (0x4D, 'M', 'm'), + (0x4E, 'M', 'n'), + (0x4F, 'M', 'o'), + (0x50, 'M', 'p'), + (0x51, 'M', 'q'), + (0x52, 'M', 'r'), + (0x53, 'M', 's'), + (0x54, 'M', 't'), + (0x55, 'M', 'u'), + (0x56, 'M', 'v'), + (0x57, 'M', 'w'), + (0x58, 'M', 'x'), + (0x59, 'M', 'y'), + (0x5A, 'M', 'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', ' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', ' ̈'), + (0xA9, 'V'), + (0xAA, 'M', 'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', ' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', '2'), + (0xB3, 'M', '3'), + (0xB4, '3', ' ́'), + (0xB5, 'M', 'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', ' ̧'), + (0xB9, 'M', '1'), + (0xBA, 'M', 'o'), + (0xBB, 'V'), + (0xBC, 'M', '1⁄4'), + (0xBD, 'M', '1⁄2'), + (0xBE, 'M', '3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', 'à'), + (0xC1, 'M', 'á'), + (0xC2, 'M', 'â'), + (0xC3, 'M', 'ã'), + (0xC4, 'M', 'ä'), + (0xC5, 'M', 'å'), + (0xC6, 'M', 'æ'), + (0xC7, 'M', 'ç'), + ] + +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC8, 'M', 'è'), + (0xC9, 'M', 'é'), + (0xCA, 'M', 'ê'), + (0xCB, 'M', 'ë'), + (0xCC, 'M', 'ì'), + (0xCD, 'M', 'í'), + (0xCE, 'M', 'î'), + (0xCF, 'M', 'ï'), + (0xD0, 'M', 'ð'), + (0xD1, 'M', 'ñ'), + (0xD2, 'M', 'ò'), + (0xD3, 'M', 'ó'), + (0xD4, 'M', 'ô'), + (0xD5, 'M', 'õ'), + (0xD6, 'M', 'ö'), + (0xD7, 'V'), + (0xD8, 'M', 'ø'), + (0xD9, 'M', 'ù'), + (0xDA, 'M', 'ú'), + (0xDB, 'M', 'û'), + (0xDC, 'M', 'ü'), + (0xDD, 'M', 'ý'), + (0xDE, 'M', 'þ'), + (0xDF, 'D', 'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', 'ā'), + (0x101, 'V'), + (0x102, 'M', 'ă'), + (0x103, 'V'), + (0x104, 'M', 'ą'), + (0x105, 'V'), + (0x106, 'M', 'ć'), + (0x107, 'V'), + (0x108, 'M', 'ĉ'), + (0x109, 'V'), + (0x10A, 'M', 'ċ'), + (0x10B, 'V'), + (0x10C, 'M', 'č'), + (0x10D, 'V'), + (0x10E, 'M', 'ď'), + (0x10F, 'V'), + (0x110, 'M', 'đ'), + (0x111, 'V'), + (0x112, 'M', 'ē'), + (0x113, 'V'), + (0x114, 'M', 'ĕ'), + (0x115, 'V'), + (0x116, 'M', 'ė'), + (0x117, 'V'), + (0x118, 'M', 'ę'), + (0x119, 'V'), + (0x11A, 'M', 'ě'), + (0x11B, 'V'), + (0x11C, 'M', 'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', 'ğ'), + (0x11F, 'V'), + (0x120, 'M', 'ġ'), + (0x121, 'V'), + (0x122, 'M', 'ģ'), + (0x123, 'V'), + (0x124, 'M', 'ĥ'), + (0x125, 'V'), + (0x126, 'M', 'ħ'), + (0x127, 'V'), + (0x128, 'M', 'ĩ'), + (0x129, 'V'), + (0x12A, 'M', 'ī'), + (0x12B, 'V'), + ] + +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x12C, 'M', 'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', 'į'), + (0x12F, 'V'), + (0x130, 'M', 'i̇'), + (0x131, 'V'), + (0x132, 'M', 'ij'), + (0x134, 'M', 'ĵ'), + (0x135, 'V'), + (0x136, 'M', 'ķ'), + (0x137, 'V'), + (0x139, 'M', 'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', 'ļ'), + (0x13C, 'V'), + (0x13D, 'M', 'ľ'), + (0x13E, 'V'), + (0x13F, 'M', 'l·'), + (0x141, 'M', 'ł'), + (0x142, 'V'), + (0x143, 'M', 'ń'), + (0x144, 'V'), + (0x145, 'M', 'ņ'), + (0x146, 'V'), + (0x147, 'M', 'ň'), + (0x148, 'V'), + (0x149, 'M', 'ʼn'), + (0x14A, 'M', 'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', 'ō'), + (0x14D, 'V'), + (0x14E, 'M', 'ŏ'), + (0x14F, 'V'), + (0x150, 'M', 'ő'), + (0x151, 'V'), + (0x152, 'M', 'œ'), + (0x153, 'V'), + (0x154, 'M', 'ŕ'), + (0x155, 'V'), + (0x156, 'M', 'ŗ'), + (0x157, 'V'), + (0x158, 'M', 'ř'), + (0x159, 'V'), + (0x15A, 'M', 'ś'), + (0x15B, 'V'), + (0x15C, 'M', 'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', 'ş'), + (0x15F, 'V'), + (0x160, 'M', 'š'), + (0x161, 'V'), + (0x162, 'M', 'ţ'), + (0x163, 'V'), + (0x164, 'M', 'ť'), + (0x165, 'V'), + (0x166, 'M', 'ŧ'), + (0x167, 'V'), + (0x168, 'M', 'ũ'), + (0x169, 'V'), + (0x16A, 'M', 'ū'), + (0x16B, 'V'), + (0x16C, 'M', 'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', 'ů'), + (0x16F, 'V'), + (0x170, 'M', 'ű'), + (0x171, 'V'), + (0x172, 'M', 'ų'), + (0x173, 'V'), + (0x174, 'M', 'ŵ'), + (0x175, 'V'), + (0x176, 'M', 'ŷ'), + (0x177, 'V'), + (0x178, 'M', 'ÿ'), + (0x179, 'M', 'ź'), + (0x17A, 'V'), + (0x17B, 'M', 'ż'), + (0x17C, 'V'), + (0x17D, 'M', 'ž'), + (0x17E, 'V'), + (0x17F, 'M', 's'), + (0x180, 'V'), + (0x181, 'M', 'ɓ'), + (0x182, 'M', 'ƃ'), + (0x183, 'V'), + (0x184, 'M', 'ƅ'), + (0x185, 'V'), + (0x186, 'M', 'ɔ'), + (0x187, 'M', 'ƈ'), + (0x188, 'V'), + (0x189, 'M', 'ɖ'), + (0x18A, 'M', 'ɗ'), + (0x18B, 'M', 'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', 'ǝ'), + (0x18F, 'M', 'ə'), + (0x190, 'M', 'ɛ'), + (0x191, 'M', 'ƒ'), + (0x192, 'V'), + (0x193, 'M', 'ɠ'), + ] + +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x194, 'M', 'ɣ'), + (0x195, 'V'), + (0x196, 'M', 'ɩ'), + (0x197, 'M', 'ɨ'), + (0x198, 'M', 'ƙ'), + (0x199, 'V'), + (0x19C, 'M', 'ɯ'), + (0x19D, 'M', 'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', 'ɵ'), + (0x1A0, 'M', 'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', 'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', 'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', 'ʀ'), + (0x1A7, 'M', 'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', 'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', 'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', 'ʈ'), + (0x1AF, 'M', 'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', 'ʊ'), + (0x1B2, 'M', 'ʋ'), + (0x1B3, 'M', 'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', 'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', 'ʒ'), + (0x1B8, 'M', 'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', 'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', 'dž'), + (0x1C7, 'M', 'lj'), + (0x1CA, 'M', 'nj'), + (0x1CD, 'M', 'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', 'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', 'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', 'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', 'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', 'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', 'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', 'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', 'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', 'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', 'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', 'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', 'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', 'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', 'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', 'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', 'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', 'dz'), + (0x1F4, 'M', 'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', 'ƕ'), + (0x1F7, 'M', 'ƿ'), + (0x1F8, 'M', 'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', 'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', 'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', 'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', 'ȁ'), + (0x201, 'V'), + (0x202, 'M', 'ȃ'), + (0x203, 'V'), + (0x204, 'M', 'ȅ'), + (0x205, 'V'), + (0x206, 'M', 'ȇ'), + (0x207, 'V'), + (0x208, 'M', 'ȉ'), + (0x209, 'V'), + (0x20A, 'M', 'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', 'ȍ'), + ] + +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x20D, 'V'), + (0x20E, 'M', 'ȏ'), + (0x20F, 'V'), + (0x210, 'M', 'ȑ'), + (0x211, 'V'), + (0x212, 'M', 'ȓ'), + (0x213, 'V'), + (0x214, 'M', 'ȕ'), + (0x215, 'V'), + (0x216, 'M', 'ȗ'), + (0x217, 'V'), + (0x218, 'M', 'ș'), + (0x219, 'V'), + (0x21A, 'M', 'ț'), + (0x21B, 'V'), + (0x21C, 'M', 'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', 'ȟ'), + (0x21F, 'V'), + (0x220, 'M', 'ƞ'), + (0x221, 'V'), + (0x222, 'M', 'ȣ'), + (0x223, 'V'), + (0x224, 'M', 'ȥ'), + (0x225, 'V'), + (0x226, 'M', 'ȧ'), + (0x227, 'V'), + (0x228, 'M', 'ȩ'), + (0x229, 'V'), + (0x22A, 'M', 'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', 'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', 'ȯ'), + (0x22F, 'V'), + (0x230, 'M', 'ȱ'), + (0x231, 'V'), + (0x232, 'M', 'ȳ'), + (0x233, 'V'), + (0x23A, 'M', 'ⱥ'), + (0x23B, 'M', 'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', 'ƚ'), + (0x23E, 'M', 'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', 'ɂ'), + (0x242, 'V'), + (0x243, 'M', 'ƀ'), + (0x244, 'M', 'ʉ'), + (0x245, 'M', 'ʌ'), + (0x246, 'M', 'ɇ'), + (0x247, 'V'), + (0x248, 'M', 'ɉ'), + (0x249, 'V'), + (0x24A, 'M', 'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', 'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', 'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', 'h'), + (0x2B1, 'M', 'ɦ'), + (0x2B2, 'M', 'j'), + (0x2B3, 'M', 'r'), + (0x2B4, 'M', 'ɹ'), + (0x2B5, 'M', 'ɻ'), + (0x2B6, 'M', 'ʁ'), + (0x2B7, 'M', 'w'), + (0x2B8, 'M', 'y'), + (0x2B9, 'V'), + (0x2D8, '3', ' ̆'), + (0x2D9, '3', ' ̇'), + (0x2DA, '3', ' ̊'), + (0x2DB, '3', ' ̨'), + (0x2DC, '3', ' ̃'), + (0x2DD, '3', ' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', 'ɣ'), + (0x2E1, 'M', 'l'), + (0x2E2, 'M', 's'), + (0x2E3, 'M', 'x'), + (0x2E4, 'M', 'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', '̀'), + (0x341, 'M', '́'), + (0x342, 'V'), + (0x343, 'M', '̓'), + (0x344, 'M', '̈́'), + (0x345, 'M', 'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', 'ͱ'), + (0x371, 'V'), + (0x372, 'M', 'ͳ'), + (0x373, 'V'), + (0x374, 'M', 'ʹ'), + (0x375, 'V'), + (0x376, 'M', 'ͷ'), + (0x377, 'V'), + ] + +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x378, 'X'), + (0x37A, '3', ' ι'), + (0x37B, 'V'), + (0x37E, '3', ';'), + (0x37F, 'M', 'ϳ'), + (0x380, 'X'), + (0x384, '3', ' ́'), + (0x385, '3', ' ̈́'), + (0x386, 'M', 'ά'), + (0x387, 'M', '·'), + (0x388, 'M', 'έ'), + (0x389, 'M', 'ή'), + (0x38A, 'M', 'ί'), + (0x38B, 'X'), + (0x38C, 'M', 'ό'), + (0x38D, 'X'), + (0x38E, 'M', 'ύ'), + (0x38F, 'M', 'ώ'), + (0x390, 'V'), + (0x391, 'M', 'α'), + (0x392, 'M', 'β'), + (0x393, 'M', 'γ'), + (0x394, 'M', 'δ'), + (0x395, 'M', 'ε'), + (0x396, 'M', 'ζ'), + (0x397, 'M', 'η'), + (0x398, 'M', 'θ'), + (0x399, 'M', 'ι'), + (0x39A, 'M', 'κ'), + (0x39B, 'M', 'λ'), + (0x39C, 'M', 'μ'), + (0x39D, 'M', 'ν'), + (0x39E, 'M', 'ξ'), + (0x39F, 'M', 'ο'), + (0x3A0, 'M', 'π'), + (0x3A1, 'M', 'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', 'σ'), + (0x3A4, 'M', 'τ'), + (0x3A5, 'M', 'υ'), + (0x3A6, 'M', 'φ'), + (0x3A7, 'M', 'χ'), + (0x3A8, 'M', 'ψ'), + (0x3A9, 'M', 'ω'), + (0x3AA, 'M', 'ϊ'), + (0x3AB, 'M', 'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', 'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', 'ϗ'), + (0x3D0, 'M', 'β'), + (0x3D1, 'M', 'θ'), + (0x3D2, 'M', 'υ'), + (0x3D3, 'M', 'ύ'), + (0x3D4, 'M', 'ϋ'), + (0x3D5, 'M', 'φ'), + (0x3D6, 'M', 'π'), + (0x3D7, 'V'), + (0x3D8, 'M', 'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', 'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', 'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', 'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', 'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', 'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', 'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', 'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', 'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', 'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', 'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', 'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', 'κ'), + (0x3F1, 'M', 'ρ'), + (0x3F2, 'M', 'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', 'θ'), + (0x3F5, 'M', 'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', 'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', 'σ'), + (0x3FA, 'M', 'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', 'ͻ'), + (0x3FE, 'M', 'ͼ'), + (0x3FF, 'M', 'ͽ'), + (0x400, 'M', 'ѐ'), + (0x401, 'M', 'ё'), + (0x402, 'M', 'ђ'), + ] + +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x403, 'M', 'ѓ'), + (0x404, 'M', 'є'), + (0x405, 'M', 'ѕ'), + (0x406, 'M', 'і'), + (0x407, 'M', 'ї'), + (0x408, 'M', 'ј'), + (0x409, 'M', 'љ'), + (0x40A, 'M', 'њ'), + (0x40B, 'M', 'ћ'), + (0x40C, 'M', 'ќ'), + (0x40D, 'M', 'ѝ'), + (0x40E, 'M', 'ў'), + (0x40F, 'M', 'џ'), + (0x410, 'M', 'а'), + (0x411, 'M', 'б'), + (0x412, 'M', 'в'), + (0x413, 'M', 'г'), + (0x414, 'M', 'д'), + (0x415, 'M', 'е'), + (0x416, 'M', 'ж'), + (0x417, 'M', 'з'), + (0x418, 'M', 'и'), + (0x419, 'M', 'й'), + (0x41A, 'M', 'к'), + (0x41B, 'M', 'л'), + (0x41C, 'M', 'м'), + (0x41D, 'M', 'н'), + (0x41E, 'M', 'о'), + (0x41F, 'M', 'п'), + (0x420, 'M', 'р'), + (0x421, 'M', 'с'), + (0x422, 'M', 'т'), + (0x423, 'M', 'у'), + (0x424, 'M', 'ф'), + (0x425, 'M', 'х'), + (0x426, 'M', 'ц'), + (0x427, 'M', 'ч'), + (0x428, 'M', 'ш'), + (0x429, 'M', 'щ'), + (0x42A, 'M', 'ъ'), + (0x42B, 'M', 'ы'), + (0x42C, 'M', 'ь'), + (0x42D, 'M', 'э'), + (0x42E, 'M', 'ю'), + (0x42F, 'M', 'я'), + (0x430, 'V'), + (0x460, 'M', 'ѡ'), + (0x461, 'V'), + (0x462, 'M', 'ѣ'), + (0x463, 'V'), + (0x464, 'M', 'ѥ'), + (0x465, 'V'), + (0x466, 'M', 'ѧ'), + (0x467, 'V'), + (0x468, 'M', 'ѩ'), + (0x469, 'V'), + (0x46A, 'M', 'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', 'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', 'ѯ'), + (0x46F, 'V'), + (0x470, 'M', 'ѱ'), + (0x471, 'V'), + (0x472, 'M', 'ѳ'), + (0x473, 'V'), + (0x474, 'M', 'ѵ'), + (0x475, 'V'), + (0x476, 'M', 'ѷ'), + (0x477, 'V'), + (0x478, 'M', 'ѹ'), + (0x479, 'V'), + (0x47A, 'M', 'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', 'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', 'ѿ'), + (0x47F, 'V'), + (0x480, 'M', 'ҁ'), + (0x481, 'V'), + (0x48A, 'M', 'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', 'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', 'ҏ'), + (0x48F, 'V'), + (0x490, 'M', 'ґ'), + (0x491, 'V'), + (0x492, 'M', 'ғ'), + (0x493, 'V'), + (0x494, 'M', 'ҕ'), + (0x495, 'V'), + (0x496, 'M', 'җ'), + (0x497, 'V'), + (0x498, 'M', 'ҙ'), + (0x499, 'V'), + (0x49A, 'M', 'қ'), + (0x49B, 'V'), + (0x49C, 'M', 'ҝ'), + (0x49D, 'V'), + ] + +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x49E, 'M', 'ҟ'), + (0x49F, 'V'), + (0x4A0, 'M', 'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', 'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', 'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', 'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', 'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', 'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', 'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', 'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', 'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', 'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', 'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', 'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', 'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', 'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', 'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', 'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', 'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', 'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', 'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', 'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', 'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', 'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', 'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', 'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', 'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', 'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', 'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', 'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', 'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', 'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', 'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', 'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', 'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', 'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', 'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', 'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', 'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', 'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', 'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', 'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', 'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', 'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', 'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', 'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', 'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', 'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', 'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', 'ԁ'), + (0x501, 'V'), + (0x502, 'M', 'ԃ'), + ] + +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x503, 'V'), + (0x504, 'M', 'ԅ'), + (0x505, 'V'), + (0x506, 'M', 'ԇ'), + (0x507, 'V'), + (0x508, 'M', 'ԉ'), + (0x509, 'V'), + (0x50A, 'M', 'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', 'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', 'ԏ'), + (0x50F, 'V'), + (0x510, 'M', 'ԑ'), + (0x511, 'V'), + (0x512, 'M', 'ԓ'), + (0x513, 'V'), + (0x514, 'M', 'ԕ'), + (0x515, 'V'), + (0x516, 'M', 'ԗ'), + (0x517, 'V'), + (0x518, 'M', 'ԙ'), + (0x519, 'V'), + (0x51A, 'M', 'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', 'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', 'ԟ'), + (0x51F, 'V'), + (0x520, 'M', 'ԡ'), + (0x521, 'V'), + (0x522, 'M', 'ԣ'), + (0x523, 'V'), + (0x524, 'M', 'ԥ'), + (0x525, 'V'), + (0x526, 'M', 'ԧ'), + (0x527, 'V'), + (0x528, 'M', 'ԩ'), + (0x529, 'V'), + (0x52A, 'M', 'ԫ'), + (0x52B, 'V'), + (0x52C, 'M', 'ԭ'), + (0x52D, 'V'), + (0x52E, 'M', 'ԯ'), + (0x52F, 'V'), + (0x530, 'X'), + (0x531, 'M', 'ա'), + (0x532, 'M', 'բ'), + (0x533, 'M', 'գ'), + (0x534, 'M', 'դ'), + (0x535, 'M', 'ե'), + (0x536, 'M', 'զ'), + (0x537, 'M', 'է'), + (0x538, 'M', 'ը'), + (0x539, 'M', 'թ'), + (0x53A, 'M', 'ժ'), + (0x53B, 'M', 'ի'), + (0x53C, 'M', 'լ'), + (0x53D, 'M', 'խ'), + (0x53E, 'M', 'ծ'), + (0x53F, 'M', 'կ'), + (0x540, 'M', 'հ'), + (0x541, 'M', 'ձ'), + (0x542, 'M', 'ղ'), + (0x543, 'M', 'ճ'), + (0x544, 'M', 'մ'), + (0x545, 'M', 'յ'), + (0x546, 'M', 'ն'), + (0x547, 'M', 'շ'), + (0x548, 'M', 'ո'), + (0x549, 'M', 'չ'), + (0x54A, 'M', 'պ'), + (0x54B, 'M', 'ջ'), + (0x54C, 'M', 'ռ'), + (0x54D, 'M', 'ս'), + (0x54E, 'M', 'վ'), + (0x54F, 'M', 'տ'), + (0x550, 'M', 'ր'), + (0x551, 'M', 'ց'), + (0x552, 'M', 'ւ'), + (0x553, 'M', 'փ'), + (0x554, 'M', 'ք'), + (0x555, 'M', 'օ'), + (0x556, 'M', 'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x587, 'M', 'եւ'), + (0x588, 'V'), + (0x58B, 'X'), + (0x58D, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5EF, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61D, 'V'), + ] + +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x675, 'M', 'اٴ'), + (0x676, 'M', 'وٴ'), + (0x677, 'M', 'ۇٴ'), + (0x678, 'M', 'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x7FD, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), + (0x870, 'V'), + (0x88F, 'X'), + (0x898, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), + (0x958, 'M', 'क़'), + (0x959, 'M', 'ख़'), + (0x95A, 'M', 'ग़'), + (0x95B, 'M', 'ज़'), + (0x95C, 'M', 'ड़'), + (0x95D, 'M', 'ढ़'), + (0x95E, 'M', 'फ़'), + (0x95F, 'M', 'य़'), + (0x960, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', 'ড়'), + (0x9DD, 'M', 'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', 'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FF, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', 'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', 'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', 'ਖ਼'), + (0xA5A, 'M', 'ਗ਼'), + (0xA5B, 'M', 'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + ] + +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA5E, 'M', 'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA77, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB55, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', 'ଡ଼'), + (0xB5D, 'M', 'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC00, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + ] + +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC3A, 'X'), + (0xC3C, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5B, 'X'), + (0xC5D, 'V'), + (0xC5E, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC77, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDD, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF4, 'X'), + (0xD00, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD80, 'X'), + (0xD81, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', 'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE86, 'V'), + (0xE8B, 'X'), + (0xE8C, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEB3, 'M', 'ໍາ'), + (0xEB4, 'V'), + ] + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECF, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', 'ຫນ'), + (0xEDD, 'M', 'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', '་'), + (0xF0D, 'V'), + (0xF43, 'M', 'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', 'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', 'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', 'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', 'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', 'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', 'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', 'ཱུ'), + (0xF76, 'M', 'ྲྀ'), + (0xF77, 'M', 'ྲཱྀ'), + (0xF78, 'M', 'ླྀ'), + (0xF79, 'M', 'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', 'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', 'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', 'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', 'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', 'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', 'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', 'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', 'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', 'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', 'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + ] + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F6, 'X'), + (0x13F8, 'M', 'Ᏸ'), + (0x13F9, 'M', 'Ᏹ'), + (0x13FA, 'M', 'Ᏺ'), + (0x13FB, 'M', 'Ᏻ'), + (0x13FC, 'M', 'Ᏼ'), + (0x13FD, 'M', 'Ᏽ'), + (0x13FE, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F9, 'X'), + (0x1700, 'V'), + (0x1716, 'X'), + (0x171F, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x180F, 'I'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1879, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191F, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1ACF, 'X'), + (0x1B00, 'V'), + (0x1B4D, 'X'), + (0x1B50, 'V'), + (0x1B7F, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'M', 'в'), + ] + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1C81, 'M', 'д'), + (0x1C82, 'M', 'о'), + (0x1C83, 'M', 'с'), + (0x1C84, 'M', 'т'), + (0x1C86, 'M', 'ъ'), + (0x1C87, 'M', 'ѣ'), + (0x1C88, 'M', 'ꙋ'), + (0x1C89, 'X'), + (0x1C90, 'M', 'ა'), + (0x1C91, 'M', 'ბ'), + (0x1C92, 'M', 'გ'), + (0x1C93, 'M', 'დ'), + (0x1C94, 'M', 'ე'), + (0x1C95, 'M', 'ვ'), + (0x1C96, 'M', 'ზ'), + (0x1C97, 'M', 'თ'), + (0x1C98, 'M', 'ი'), + (0x1C99, 'M', 'კ'), + (0x1C9A, 'M', 'ლ'), + (0x1C9B, 'M', 'მ'), + (0x1C9C, 'M', 'ნ'), + (0x1C9D, 'M', 'ო'), + (0x1C9E, 'M', 'პ'), + (0x1C9F, 'M', 'ჟ'), + (0x1CA0, 'M', 'რ'), + (0x1CA1, 'M', 'ს'), + (0x1CA2, 'M', 'ტ'), + (0x1CA3, 'M', 'უ'), + (0x1CA4, 'M', 'ფ'), + (0x1CA5, 'M', 'ქ'), + (0x1CA6, 'M', 'ღ'), + (0x1CA7, 'M', 'ყ'), + (0x1CA8, 'M', 'შ'), + (0x1CA9, 'M', 'ჩ'), + (0x1CAA, 'M', 'ც'), + (0x1CAB, 'M', 'ძ'), + (0x1CAC, 'M', 'წ'), + (0x1CAD, 'M', 'ჭ'), + (0x1CAE, 'M', 'ხ'), + (0x1CAF, 'M', 'ჯ'), + (0x1CB0, 'M', 'ჰ'), + (0x1CB1, 'M', 'ჱ'), + (0x1CB2, 'M', 'ჲ'), + (0x1CB3, 'M', 'ჳ'), + (0x1CB4, 'M', 'ჴ'), + (0x1CB5, 'M', 'ჵ'), + (0x1CB6, 'M', 'ჶ'), + (0x1CB7, 'M', 'ჷ'), + (0x1CB8, 'M', 'ჸ'), + (0x1CB9, 'M', 'ჹ'), + (0x1CBA, 'M', 'ჺ'), + (0x1CBB, 'X'), + (0x1CBD, 'M', 'ჽ'), + (0x1CBE, 'M', 'ჾ'), + (0x1CBF, 'M', 'ჿ'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CFB, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', 'a'), + (0x1D2D, 'M', 'æ'), + (0x1D2E, 'M', 'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', 'd'), + (0x1D31, 'M', 'e'), + (0x1D32, 'M', 'ǝ'), + (0x1D33, 'M', 'g'), + (0x1D34, 'M', 'h'), + (0x1D35, 'M', 'i'), + (0x1D36, 'M', 'j'), + (0x1D37, 'M', 'k'), + (0x1D38, 'M', 'l'), + (0x1D39, 'M', 'm'), + (0x1D3A, 'M', 'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', 'o'), + (0x1D3D, 'M', 'ȣ'), + (0x1D3E, 'M', 'p'), + (0x1D3F, 'M', 'r'), + (0x1D40, 'M', 't'), + (0x1D41, 'M', 'u'), + (0x1D42, 'M', 'w'), + (0x1D43, 'M', 'a'), + (0x1D44, 'M', 'ɐ'), + (0x1D45, 'M', 'ɑ'), + (0x1D46, 'M', 'ᴂ'), + (0x1D47, 'M', 'b'), + (0x1D48, 'M', 'd'), + (0x1D49, 'M', 'e'), + (0x1D4A, 'M', 'ə'), + (0x1D4B, 'M', 'ɛ'), + (0x1D4C, 'M', 'ɜ'), + (0x1D4D, 'M', 'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', 'k'), + (0x1D50, 'M', 'm'), + (0x1D51, 'M', 'ŋ'), + (0x1D52, 'M', 'o'), + (0x1D53, 'M', 'ɔ'), + ] + +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D54, 'M', 'ᴖ'), + (0x1D55, 'M', 'ᴗ'), + (0x1D56, 'M', 'p'), + (0x1D57, 'M', 't'), + (0x1D58, 'M', 'u'), + (0x1D59, 'M', 'ᴝ'), + (0x1D5A, 'M', 'ɯ'), + (0x1D5B, 'M', 'v'), + (0x1D5C, 'M', 'ᴥ'), + (0x1D5D, 'M', 'β'), + (0x1D5E, 'M', 'γ'), + (0x1D5F, 'M', 'δ'), + (0x1D60, 'M', 'φ'), + (0x1D61, 'M', 'χ'), + (0x1D62, 'M', 'i'), + (0x1D63, 'M', 'r'), + (0x1D64, 'M', 'u'), + (0x1D65, 'M', 'v'), + (0x1D66, 'M', 'β'), + (0x1D67, 'M', 'γ'), + (0x1D68, 'M', 'ρ'), + (0x1D69, 'M', 'φ'), + (0x1D6A, 'M', 'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', 'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', 'ɒ'), + (0x1D9C, 'M', 'c'), + (0x1D9D, 'M', 'ɕ'), + (0x1D9E, 'M', 'ð'), + (0x1D9F, 'M', 'ɜ'), + (0x1DA0, 'M', 'f'), + (0x1DA1, 'M', 'ɟ'), + (0x1DA2, 'M', 'ɡ'), + (0x1DA3, 'M', 'ɥ'), + (0x1DA4, 'M', 'ɨ'), + (0x1DA5, 'M', 'ɩ'), + (0x1DA6, 'M', 'ɪ'), + (0x1DA7, 'M', 'ᵻ'), + (0x1DA8, 'M', 'ʝ'), + (0x1DA9, 'M', 'ɭ'), + (0x1DAA, 'M', 'ᶅ'), + (0x1DAB, 'M', 'ʟ'), + (0x1DAC, 'M', 'ɱ'), + (0x1DAD, 'M', 'ɰ'), + (0x1DAE, 'M', 'ɲ'), + (0x1DAF, 'M', 'ɳ'), + (0x1DB0, 'M', 'ɴ'), + (0x1DB1, 'M', 'ɵ'), + (0x1DB2, 'M', 'ɸ'), + (0x1DB3, 'M', 'ʂ'), + (0x1DB4, 'M', 'ʃ'), + (0x1DB5, 'M', 'ƫ'), + (0x1DB6, 'M', 'ʉ'), + (0x1DB7, 'M', 'ʊ'), + (0x1DB8, 'M', 'ᴜ'), + (0x1DB9, 'M', 'ʋ'), + (0x1DBA, 'M', 'ʌ'), + (0x1DBB, 'M', 'z'), + (0x1DBC, 'M', 'ʐ'), + (0x1DBD, 'M', 'ʑ'), + (0x1DBE, 'M', 'ʒ'), + (0x1DBF, 'M', 'θ'), + (0x1DC0, 'V'), + (0x1E00, 'M', 'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', 'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', 'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', 'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', 'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', 'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', 'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', 'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', 'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', 'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', 'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', 'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', 'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', 'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', 'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', 'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', 'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', 'ḣ'), + (0x1E23, 'V'), + ] + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E24, 'M', 'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', 'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', 'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', 'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', 'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', 'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', 'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', 'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', 'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', 'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', 'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', 'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', 'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', 'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', 'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', 'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', 'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', 'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', 'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', 'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', 'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', 'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', 'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', 'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', 'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', 'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', 'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', 'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', 'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', 'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', 'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', 'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', 'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', 'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', 'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', 'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', 'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', 'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', 'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', 'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', 'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', 'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', 'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', 'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', 'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', 'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', 'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', 'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', 'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', 'ẇ'), + (0x1E87, 'V'), + ] + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E88, 'M', 'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', 'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', 'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', 'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', 'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', 'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', 'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', 'aʾ'), + (0x1E9B, 'M', 'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', 'ß'), + (0x1E9F, 'V'), + (0x1EA0, 'M', 'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', 'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', 'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', 'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', 'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', 'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', 'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', 'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', 'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', 'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', 'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', 'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', 'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', 'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', 'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', 'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', 'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', 'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', 'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', 'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', 'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', 'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', 'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', 'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', 'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', 'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', 'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', 'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', 'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', 'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', 'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', 'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', 'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', 'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', 'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', 'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', 'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', 'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', 'ử'), + (0x1EED, 'V'), + (0x1EEE, 'M', 'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', 'ự'), + ] + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EF1, 'V'), + (0x1EF2, 'M', 'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', 'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', 'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', 'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', 'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', 'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', 'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', 'ἀ'), + (0x1F09, 'M', 'ἁ'), + (0x1F0A, 'M', 'ἂ'), + (0x1F0B, 'M', 'ἃ'), + (0x1F0C, 'M', 'ἄ'), + (0x1F0D, 'M', 'ἅ'), + (0x1F0E, 'M', 'ἆ'), + (0x1F0F, 'M', 'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', 'ἐ'), + (0x1F19, 'M', 'ἑ'), + (0x1F1A, 'M', 'ἒ'), + (0x1F1B, 'M', 'ἓ'), + (0x1F1C, 'M', 'ἔ'), + (0x1F1D, 'M', 'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', 'ἠ'), + (0x1F29, 'M', 'ἡ'), + (0x1F2A, 'M', 'ἢ'), + (0x1F2B, 'M', 'ἣ'), + (0x1F2C, 'M', 'ἤ'), + (0x1F2D, 'M', 'ἥ'), + (0x1F2E, 'M', 'ἦ'), + (0x1F2F, 'M', 'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', 'ἰ'), + (0x1F39, 'M', 'ἱ'), + (0x1F3A, 'M', 'ἲ'), + (0x1F3B, 'M', 'ἳ'), + (0x1F3C, 'M', 'ἴ'), + (0x1F3D, 'M', 'ἵ'), + (0x1F3E, 'M', 'ἶ'), + (0x1F3F, 'M', 'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', 'ὀ'), + (0x1F49, 'M', 'ὁ'), + (0x1F4A, 'M', 'ὂ'), + (0x1F4B, 'M', 'ὃ'), + (0x1F4C, 'M', 'ὄ'), + (0x1F4D, 'M', 'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', 'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', 'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', 'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', 'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', 'ὠ'), + (0x1F69, 'M', 'ὡ'), + (0x1F6A, 'M', 'ὢ'), + (0x1F6B, 'M', 'ὣ'), + (0x1F6C, 'M', 'ὤ'), + (0x1F6D, 'M', 'ὥ'), + (0x1F6E, 'M', 'ὦ'), + (0x1F6F, 'M', 'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', 'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', 'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', 'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', 'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', 'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', 'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', 'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', 'ἀι'), + (0x1F81, 'M', 'ἁι'), + (0x1F82, 'M', 'ἂι'), + (0x1F83, 'M', 'ἃι'), + (0x1F84, 'M', 'ἄι'), + (0x1F85, 'M', 'ἅι'), + (0x1F86, 'M', 'ἆι'), + (0x1F87, 'M', 'ἇι'), + ] + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F88, 'M', 'ἀι'), + (0x1F89, 'M', 'ἁι'), + (0x1F8A, 'M', 'ἂι'), + (0x1F8B, 'M', 'ἃι'), + (0x1F8C, 'M', 'ἄι'), + (0x1F8D, 'M', 'ἅι'), + (0x1F8E, 'M', 'ἆι'), + (0x1F8F, 'M', 'ἇι'), + (0x1F90, 'M', 'ἠι'), + (0x1F91, 'M', 'ἡι'), + (0x1F92, 'M', 'ἢι'), + (0x1F93, 'M', 'ἣι'), + (0x1F94, 'M', 'ἤι'), + (0x1F95, 'M', 'ἥι'), + (0x1F96, 'M', 'ἦι'), + (0x1F97, 'M', 'ἧι'), + (0x1F98, 'M', 'ἠι'), + (0x1F99, 'M', 'ἡι'), + (0x1F9A, 'M', 'ἢι'), + (0x1F9B, 'M', 'ἣι'), + (0x1F9C, 'M', 'ἤι'), + (0x1F9D, 'M', 'ἥι'), + (0x1F9E, 'M', 'ἦι'), + (0x1F9F, 'M', 'ἧι'), + (0x1FA0, 'M', 'ὠι'), + (0x1FA1, 'M', 'ὡι'), + (0x1FA2, 'M', 'ὢι'), + (0x1FA3, 'M', 'ὣι'), + (0x1FA4, 'M', 'ὤι'), + (0x1FA5, 'M', 'ὥι'), + (0x1FA6, 'M', 'ὦι'), + (0x1FA7, 'M', 'ὧι'), + (0x1FA8, 'M', 'ὠι'), + (0x1FA9, 'M', 'ὡι'), + (0x1FAA, 'M', 'ὢι'), + (0x1FAB, 'M', 'ὣι'), + (0x1FAC, 'M', 'ὤι'), + (0x1FAD, 'M', 'ὥι'), + (0x1FAE, 'M', 'ὦι'), + (0x1FAF, 'M', 'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', 'ὰι'), + (0x1FB3, 'M', 'αι'), + (0x1FB4, 'M', 'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', 'ᾶι'), + (0x1FB8, 'M', 'ᾰ'), + (0x1FB9, 'M', 'ᾱ'), + (0x1FBA, 'M', 'ὰ'), + (0x1FBB, 'M', 'ά'), + (0x1FBC, 'M', 'αι'), + (0x1FBD, '3', ' ̓'), + (0x1FBE, 'M', 'ι'), + (0x1FBF, '3', ' ̓'), + (0x1FC0, '3', ' ͂'), + (0x1FC1, '3', ' ̈͂'), + (0x1FC2, 'M', 'ὴι'), + (0x1FC3, 'M', 'ηι'), + (0x1FC4, 'M', 'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', 'ῆι'), + (0x1FC8, 'M', 'ὲ'), + (0x1FC9, 'M', 'έ'), + (0x1FCA, 'M', 'ὴ'), + (0x1FCB, 'M', 'ή'), + (0x1FCC, 'M', 'ηι'), + (0x1FCD, '3', ' ̓̀'), + (0x1FCE, '3', ' ̓́'), + (0x1FCF, '3', ' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', 'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', 'ῐ'), + (0x1FD9, 'M', 'ῑ'), + (0x1FDA, 'M', 'ὶ'), + (0x1FDB, 'M', 'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', ' ̔̀'), + (0x1FDE, '3', ' ̔́'), + (0x1FDF, '3', ' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', 'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', 'ῠ'), + (0x1FE9, 'M', 'ῡ'), + (0x1FEA, 'M', 'ὺ'), + (0x1FEB, 'M', 'ύ'), + (0x1FEC, 'M', 'ῥ'), + (0x1FED, '3', ' ̈̀'), + (0x1FEE, '3', ' ̈́'), + (0x1FEF, '3', '`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', 'ὼι'), + (0x1FF3, 'M', 'ωι'), + (0x1FF4, 'M', 'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + ] + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FF7, 'M', 'ῶι'), + (0x1FF8, 'M', 'ὸ'), + (0x1FF9, 'M', 'ό'), + (0x1FFA, 'M', 'ὼ'), + (0x1FFB, 'M', 'ώ'), + (0x1FFC, 'M', 'ωι'), + (0x1FFD, '3', ' ́'), + (0x1FFE, '3', ' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', ' '), + (0x200B, 'I'), + (0x200C, 'D', ''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', '‐'), + (0x2012, 'V'), + (0x2017, '3', ' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', ' '), + (0x2030, 'V'), + (0x2033, 'M', '′′'), + (0x2034, 'M', '′′′'), + (0x2035, 'V'), + (0x2036, 'M', '‵‵'), + (0x2037, 'M', '‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', '!!'), + (0x203D, 'V'), + (0x203E, '3', ' ̅'), + (0x203F, 'V'), + (0x2047, '3', '??'), + (0x2048, '3', '?!'), + (0x2049, '3', '!?'), + (0x204A, 'V'), + (0x2057, 'M', '′′′′'), + (0x2058, 'V'), + (0x205F, '3', ' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', '0'), + (0x2071, 'M', 'i'), + (0x2072, 'X'), + (0x2074, 'M', '4'), + (0x2075, 'M', '5'), + (0x2076, 'M', '6'), + (0x2077, 'M', '7'), + (0x2078, 'M', '8'), + (0x2079, 'M', '9'), + (0x207A, '3', '+'), + (0x207B, 'M', '−'), + (0x207C, '3', '='), + (0x207D, '3', '('), + (0x207E, '3', ')'), + (0x207F, 'M', 'n'), + (0x2080, 'M', '0'), + (0x2081, 'M', '1'), + (0x2082, 'M', '2'), + (0x2083, 'M', '3'), + (0x2084, 'M', '4'), + (0x2085, 'M', '5'), + (0x2086, 'M', '6'), + (0x2087, 'M', '7'), + (0x2088, 'M', '8'), + (0x2089, 'M', '9'), + (0x208A, '3', '+'), + (0x208B, 'M', '−'), + (0x208C, '3', '='), + (0x208D, '3', '('), + (0x208E, '3', ')'), + (0x208F, 'X'), + (0x2090, 'M', 'a'), + (0x2091, 'M', 'e'), + (0x2092, 'M', 'o'), + (0x2093, 'M', 'x'), + (0x2094, 'M', 'ə'), + (0x2095, 'M', 'h'), + (0x2096, 'M', 'k'), + (0x2097, 'M', 'l'), + (0x2098, 'M', 'm'), + (0x2099, 'M', 'n'), + (0x209A, 'M', 'p'), + (0x209B, 'M', 's'), + (0x209C, 'M', 't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', 'rs'), + (0x20A9, 'V'), + (0x20C1, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', 'a/c'), + (0x2101, '3', 'a/s'), + (0x2102, 'M', 'c'), + (0x2103, 'M', '°c'), + (0x2104, 'V'), + ] + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2105, '3', 'c/o'), + (0x2106, '3', 'c/u'), + (0x2107, 'M', 'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', '°f'), + (0x210A, 'M', 'g'), + (0x210B, 'M', 'h'), + (0x210F, 'M', 'ħ'), + (0x2110, 'M', 'i'), + (0x2112, 'M', 'l'), + (0x2114, 'V'), + (0x2115, 'M', 'n'), + (0x2116, 'M', 'no'), + (0x2117, 'V'), + (0x2119, 'M', 'p'), + (0x211A, 'M', 'q'), + (0x211B, 'M', 'r'), + (0x211E, 'V'), + (0x2120, 'M', 'sm'), + (0x2121, 'M', 'tel'), + (0x2122, 'M', 'tm'), + (0x2123, 'V'), + (0x2124, 'M', 'z'), + (0x2125, 'V'), + (0x2126, 'M', 'ω'), + (0x2127, 'V'), + (0x2128, 'M', 'z'), + (0x2129, 'V'), + (0x212A, 'M', 'k'), + (0x212B, 'M', 'å'), + (0x212C, 'M', 'b'), + (0x212D, 'M', 'c'), + (0x212E, 'V'), + (0x212F, 'M', 'e'), + (0x2131, 'M', 'f'), + (0x2132, 'X'), + (0x2133, 'M', 'm'), + (0x2134, 'M', 'o'), + (0x2135, 'M', 'א'), + (0x2136, 'M', 'ב'), + (0x2137, 'M', 'ג'), + (0x2138, 'M', 'ד'), + (0x2139, 'M', 'i'), + (0x213A, 'V'), + (0x213B, 'M', 'fax'), + (0x213C, 'M', 'π'), + (0x213D, 'M', 'γ'), + (0x213F, 'M', 'π'), + (0x2140, 'M', '∑'), + (0x2141, 'V'), + (0x2145, 'M', 'd'), + (0x2147, 'M', 'e'), + (0x2148, 'M', 'i'), + (0x2149, 'M', 'j'), + (0x214A, 'V'), + (0x2150, 'M', '1⁄7'), + (0x2151, 'M', '1⁄9'), + (0x2152, 'M', '1⁄10'), + (0x2153, 'M', '1⁄3'), + (0x2154, 'M', '2⁄3'), + (0x2155, 'M', '1⁄5'), + (0x2156, 'M', '2⁄5'), + (0x2157, 'M', '3⁄5'), + (0x2158, 'M', '4⁄5'), + (0x2159, 'M', '1⁄6'), + (0x215A, 'M', '5⁄6'), + (0x215B, 'M', '1⁄8'), + (0x215C, 'M', '3⁄8'), + (0x215D, 'M', '5⁄8'), + (0x215E, 'M', '7⁄8'), + (0x215F, 'M', '1⁄'), + (0x2160, 'M', 'i'), + (0x2161, 'M', 'ii'), + (0x2162, 'M', 'iii'), + (0x2163, 'M', 'iv'), + (0x2164, 'M', 'v'), + (0x2165, 'M', 'vi'), + (0x2166, 'M', 'vii'), + (0x2167, 'M', 'viii'), + (0x2168, 'M', 'ix'), + (0x2169, 'M', 'x'), + (0x216A, 'M', 'xi'), + (0x216B, 'M', 'xii'), + (0x216C, 'M', 'l'), + (0x216D, 'M', 'c'), + (0x216E, 'M', 'd'), + (0x216F, 'M', 'm'), + (0x2170, 'M', 'i'), + (0x2171, 'M', 'ii'), + (0x2172, 'M', 'iii'), + (0x2173, 'M', 'iv'), + (0x2174, 'M', 'v'), + (0x2175, 'M', 'vi'), + (0x2176, 'M', 'vii'), + (0x2177, 'M', 'viii'), + (0x2178, 'M', 'ix'), + (0x2179, 'M', 'x'), + (0x217A, 'M', 'xi'), + (0x217B, 'M', 'xii'), + (0x217C, 'M', 'l'), + ] + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x217D, 'M', 'c'), + (0x217E, 'M', 'd'), + (0x217F, 'M', 'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', '0⁄3'), + (0x218A, 'V'), + (0x218C, 'X'), + (0x2190, 'V'), + (0x222C, 'M', '∫∫'), + (0x222D, 'M', '∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', '∮∮'), + (0x2230, 'M', '∮∮∮'), + (0x2231, 'V'), + (0x2329, 'M', '〈'), + (0x232A, 'M', '〉'), + (0x232B, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', '1'), + (0x2461, 'M', '2'), + (0x2462, 'M', '3'), + (0x2463, 'M', '4'), + (0x2464, 'M', '5'), + (0x2465, 'M', '6'), + (0x2466, 'M', '7'), + (0x2467, 'M', '8'), + (0x2468, 'M', '9'), + (0x2469, 'M', '10'), + (0x246A, 'M', '11'), + (0x246B, 'M', '12'), + (0x246C, 'M', '13'), + (0x246D, 'M', '14'), + (0x246E, 'M', '15'), + (0x246F, 'M', '16'), + (0x2470, 'M', '17'), + (0x2471, 'M', '18'), + (0x2472, 'M', '19'), + (0x2473, 'M', '20'), + (0x2474, '3', '(1)'), + (0x2475, '3', '(2)'), + (0x2476, '3', '(3)'), + (0x2477, '3', '(4)'), + (0x2478, '3', '(5)'), + (0x2479, '3', '(6)'), + (0x247A, '3', '(7)'), + (0x247B, '3', '(8)'), + (0x247C, '3', '(9)'), + (0x247D, '3', '(10)'), + (0x247E, '3', '(11)'), + (0x247F, '3', '(12)'), + (0x2480, '3', '(13)'), + (0x2481, '3', '(14)'), + (0x2482, '3', '(15)'), + (0x2483, '3', '(16)'), + (0x2484, '3', '(17)'), + (0x2485, '3', '(18)'), + (0x2486, '3', '(19)'), + (0x2487, '3', '(20)'), + (0x2488, 'X'), + (0x249C, '3', '(a)'), + (0x249D, '3', '(b)'), + (0x249E, '3', '(c)'), + (0x249F, '3', '(d)'), + (0x24A0, '3', '(e)'), + (0x24A1, '3', '(f)'), + (0x24A2, '3', '(g)'), + (0x24A3, '3', '(h)'), + (0x24A4, '3', '(i)'), + (0x24A5, '3', '(j)'), + (0x24A6, '3', '(k)'), + (0x24A7, '3', '(l)'), + (0x24A8, '3', '(m)'), + (0x24A9, '3', '(n)'), + (0x24AA, '3', '(o)'), + (0x24AB, '3', '(p)'), + (0x24AC, '3', '(q)'), + (0x24AD, '3', '(r)'), + (0x24AE, '3', '(s)'), + (0x24AF, '3', '(t)'), + (0x24B0, '3', '(u)'), + (0x24B1, '3', '(v)'), + (0x24B2, '3', '(w)'), + (0x24B3, '3', '(x)'), + (0x24B4, '3', '(y)'), + (0x24B5, '3', '(z)'), + (0x24B6, 'M', 'a'), + (0x24B7, 'M', 'b'), + (0x24B8, 'M', 'c'), + (0x24B9, 'M', 'd'), + (0x24BA, 'M', 'e'), + (0x24BB, 'M', 'f'), + (0x24BC, 'M', 'g'), + (0x24BD, 'M', 'h'), + (0x24BE, 'M', 'i'), + (0x24BF, 'M', 'j'), + (0x24C0, 'M', 'k'), + ] + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x24C1, 'M', 'l'), + (0x24C2, 'M', 'm'), + (0x24C3, 'M', 'n'), + (0x24C4, 'M', 'o'), + (0x24C5, 'M', 'p'), + (0x24C6, 'M', 'q'), + (0x24C7, 'M', 'r'), + (0x24C8, 'M', 's'), + (0x24C9, 'M', 't'), + (0x24CA, 'M', 'u'), + (0x24CB, 'M', 'v'), + (0x24CC, 'M', 'w'), + (0x24CD, 'M', 'x'), + (0x24CE, 'M', 'y'), + (0x24CF, 'M', 'z'), + (0x24D0, 'M', 'a'), + (0x24D1, 'M', 'b'), + (0x24D2, 'M', 'c'), + (0x24D3, 'M', 'd'), + (0x24D4, 'M', 'e'), + (0x24D5, 'M', 'f'), + (0x24D6, 'M', 'g'), + (0x24D7, 'M', 'h'), + (0x24D8, 'M', 'i'), + (0x24D9, 'M', 'j'), + (0x24DA, 'M', 'k'), + (0x24DB, 'M', 'l'), + (0x24DC, 'M', 'm'), + (0x24DD, 'M', 'n'), + (0x24DE, 'M', 'o'), + (0x24DF, 'M', 'p'), + (0x24E0, 'M', 'q'), + (0x24E1, 'M', 'r'), + (0x24E2, 'M', 's'), + (0x24E3, 'M', 't'), + (0x24E4, 'M', 'u'), + (0x24E5, 'M', 'v'), + (0x24E6, 'M', 'w'), + (0x24E7, 'M', 'x'), + (0x24E8, 'M', 'y'), + (0x24E9, 'M', 'z'), + (0x24EA, 'M', '0'), + (0x24EB, 'V'), + (0x2A0C, 'M', '∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', '::='), + (0x2A75, '3', '=='), + (0x2A76, '3', '==='), + (0x2A77, 'V'), + (0x2ADC, 'M', '⫝̸'), + (0x2ADD, 'V'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B97, 'V'), + (0x2C00, 'M', 'ⰰ'), + (0x2C01, 'M', 'ⰱ'), + (0x2C02, 'M', 'ⰲ'), + (0x2C03, 'M', 'ⰳ'), + (0x2C04, 'M', 'ⰴ'), + (0x2C05, 'M', 'ⰵ'), + (0x2C06, 'M', 'ⰶ'), + (0x2C07, 'M', 'ⰷ'), + (0x2C08, 'M', 'ⰸ'), + (0x2C09, 'M', 'ⰹ'), + (0x2C0A, 'M', 'ⰺ'), + (0x2C0B, 'M', 'ⰻ'), + (0x2C0C, 'M', 'ⰼ'), + (0x2C0D, 'M', 'ⰽ'), + (0x2C0E, 'M', 'ⰾ'), + (0x2C0F, 'M', 'ⰿ'), + (0x2C10, 'M', 'ⱀ'), + (0x2C11, 'M', 'ⱁ'), + (0x2C12, 'M', 'ⱂ'), + (0x2C13, 'M', 'ⱃ'), + (0x2C14, 'M', 'ⱄ'), + (0x2C15, 'M', 'ⱅ'), + (0x2C16, 'M', 'ⱆ'), + (0x2C17, 'M', 'ⱇ'), + (0x2C18, 'M', 'ⱈ'), + (0x2C19, 'M', 'ⱉ'), + (0x2C1A, 'M', 'ⱊ'), + (0x2C1B, 'M', 'ⱋ'), + (0x2C1C, 'M', 'ⱌ'), + (0x2C1D, 'M', 'ⱍ'), + (0x2C1E, 'M', 'ⱎ'), + (0x2C1F, 'M', 'ⱏ'), + (0x2C20, 'M', 'ⱐ'), + (0x2C21, 'M', 'ⱑ'), + (0x2C22, 'M', 'ⱒ'), + (0x2C23, 'M', 'ⱓ'), + (0x2C24, 'M', 'ⱔ'), + (0x2C25, 'M', 'ⱕ'), + (0x2C26, 'M', 'ⱖ'), + (0x2C27, 'M', 'ⱗ'), + (0x2C28, 'M', 'ⱘ'), + (0x2C29, 'M', 'ⱙ'), + (0x2C2A, 'M', 'ⱚ'), + (0x2C2B, 'M', 'ⱛ'), + (0x2C2C, 'M', 'ⱜ'), + ] + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C2D, 'M', 'ⱝ'), + (0x2C2E, 'M', 'ⱞ'), + (0x2C2F, 'M', 'ⱟ'), + (0x2C30, 'V'), + (0x2C60, 'M', 'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', 'ɫ'), + (0x2C63, 'M', 'ᵽ'), + (0x2C64, 'M', 'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', 'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', 'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', 'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', 'ɑ'), + (0x2C6E, 'M', 'ɱ'), + (0x2C6F, 'M', 'ɐ'), + (0x2C70, 'M', 'ɒ'), + (0x2C71, 'V'), + (0x2C72, 'M', 'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', 'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', 'j'), + (0x2C7D, 'M', 'v'), + (0x2C7E, 'M', 'ȿ'), + (0x2C7F, 'M', 'ɀ'), + (0x2C80, 'M', 'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', 'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', 'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', 'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', 'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', 'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', 'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', 'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', 'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', 'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', 'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', 'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', 'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', 'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', 'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', 'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', 'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', 'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', 'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', 'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', 'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', 'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', 'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', 'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', 'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', 'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', 'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', 'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', 'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', 'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', 'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', 'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', 'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', 'ⳃ'), + (0x2CC3, 'V'), + (0x2CC4, 'M', 'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', 'ⳇ'), + ] + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2CC7, 'V'), + (0x2CC8, 'M', 'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', 'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', 'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', 'ⳏ'), + (0x2CCF, 'V'), + (0x2CD0, 'M', 'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', 'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', 'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', 'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', 'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', 'ⳛ'), + (0x2CDB, 'V'), + (0x2CDC, 'M', 'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', 'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', 'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', 'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', 'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', 'ⳮ'), + (0x2CEE, 'V'), + (0x2CF2, 'M', 'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', 'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E5E, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', '母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', '龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', '一'), + (0x2F01, 'M', '丨'), + (0x2F02, 'M', '丶'), + (0x2F03, 'M', '丿'), + (0x2F04, 'M', '乙'), + (0x2F05, 'M', '亅'), + (0x2F06, 'M', '二'), + (0x2F07, 'M', '亠'), + (0x2F08, 'M', '人'), + (0x2F09, 'M', '儿'), + (0x2F0A, 'M', '入'), + (0x2F0B, 'M', '八'), + (0x2F0C, 'M', '冂'), + (0x2F0D, 'M', '冖'), + (0x2F0E, 'M', '冫'), + (0x2F0F, 'M', '几'), + (0x2F10, 'M', '凵'), + (0x2F11, 'M', '刀'), + (0x2F12, 'M', '力'), + (0x2F13, 'M', '勹'), + (0x2F14, 'M', '匕'), + (0x2F15, 'M', '匚'), + (0x2F16, 'M', '匸'), + (0x2F17, 'M', '十'), + (0x2F18, 'M', '卜'), + (0x2F19, 'M', '卩'), + ] + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F1A, 'M', '厂'), + (0x2F1B, 'M', '厶'), + (0x2F1C, 'M', '又'), + (0x2F1D, 'M', '口'), + (0x2F1E, 'M', '囗'), + (0x2F1F, 'M', '土'), + (0x2F20, 'M', '士'), + (0x2F21, 'M', '夂'), + (0x2F22, 'M', '夊'), + (0x2F23, 'M', '夕'), + (0x2F24, 'M', '大'), + (0x2F25, 'M', '女'), + (0x2F26, 'M', '子'), + (0x2F27, 'M', '宀'), + (0x2F28, 'M', '寸'), + (0x2F29, 'M', '小'), + (0x2F2A, 'M', '尢'), + (0x2F2B, 'M', '尸'), + (0x2F2C, 'M', '屮'), + (0x2F2D, 'M', '山'), + (0x2F2E, 'M', '巛'), + (0x2F2F, 'M', '工'), + (0x2F30, 'M', '己'), + (0x2F31, 'M', '巾'), + (0x2F32, 'M', '干'), + (0x2F33, 'M', '幺'), + (0x2F34, 'M', '广'), + (0x2F35, 'M', '廴'), + (0x2F36, 'M', '廾'), + (0x2F37, 'M', '弋'), + (0x2F38, 'M', '弓'), + (0x2F39, 'M', '彐'), + (0x2F3A, 'M', '彡'), + (0x2F3B, 'M', '彳'), + (0x2F3C, 'M', '心'), + (0x2F3D, 'M', '戈'), + (0x2F3E, 'M', '戶'), + (0x2F3F, 'M', '手'), + (0x2F40, 'M', '支'), + (0x2F41, 'M', '攴'), + (0x2F42, 'M', '文'), + (0x2F43, 'M', '斗'), + (0x2F44, 'M', '斤'), + (0x2F45, 'M', '方'), + (0x2F46, 'M', '无'), + (0x2F47, 'M', '日'), + (0x2F48, 'M', '曰'), + (0x2F49, 'M', '月'), + (0x2F4A, 'M', '木'), + (0x2F4B, 'M', '欠'), + (0x2F4C, 'M', '止'), + (0x2F4D, 'M', '歹'), + (0x2F4E, 'M', '殳'), + (0x2F4F, 'M', '毋'), + (0x2F50, 'M', '比'), + (0x2F51, 'M', '毛'), + (0x2F52, 'M', '氏'), + (0x2F53, 'M', '气'), + (0x2F54, 'M', '水'), + (0x2F55, 'M', '火'), + (0x2F56, 'M', '爪'), + (0x2F57, 'M', '父'), + (0x2F58, 'M', '爻'), + (0x2F59, 'M', '爿'), + (0x2F5A, 'M', '片'), + (0x2F5B, 'M', '牙'), + (0x2F5C, 'M', '牛'), + (0x2F5D, 'M', '犬'), + (0x2F5E, 'M', '玄'), + (0x2F5F, 'M', '玉'), + (0x2F60, 'M', '瓜'), + (0x2F61, 'M', '瓦'), + (0x2F62, 'M', '甘'), + (0x2F63, 'M', '生'), + (0x2F64, 'M', '用'), + (0x2F65, 'M', '田'), + (0x2F66, 'M', '疋'), + (0x2F67, 'M', '疒'), + (0x2F68, 'M', '癶'), + (0x2F69, 'M', '白'), + (0x2F6A, 'M', '皮'), + (0x2F6B, 'M', '皿'), + (0x2F6C, 'M', '目'), + (0x2F6D, 'M', '矛'), + (0x2F6E, 'M', '矢'), + (0x2F6F, 'M', '石'), + (0x2F70, 'M', '示'), + (0x2F71, 'M', '禸'), + (0x2F72, 'M', '禾'), + (0x2F73, 'M', '穴'), + (0x2F74, 'M', '立'), + (0x2F75, 'M', '竹'), + (0x2F76, 'M', '米'), + (0x2F77, 'M', '糸'), + (0x2F78, 'M', '缶'), + (0x2F79, 'M', '网'), + (0x2F7A, 'M', '羊'), + (0x2F7B, 'M', '羽'), + (0x2F7C, 'M', '老'), + (0x2F7D, 'M', '而'), + ] + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F7E, 'M', '耒'), + (0x2F7F, 'M', '耳'), + (0x2F80, 'M', '聿'), + (0x2F81, 'M', '肉'), + (0x2F82, 'M', '臣'), + (0x2F83, 'M', '自'), + (0x2F84, 'M', '至'), + (0x2F85, 'M', '臼'), + (0x2F86, 'M', '舌'), + (0x2F87, 'M', '舛'), + (0x2F88, 'M', '舟'), + (0x2F89, 'M', '艮'), + (0x2F8A, 'M', '色'), + (0x2F8B, 'M', '艸'), + (0x2F8C, 'M', '虍'), + (0x2F8D, 'M', '虫'), + (0x2F8E, 'M', '血'), + (0x2F8F, 'M', '行'), + (0x2F90, 'M', '衣'), + (0x2F91, 'M', '襾'), + (0x2F92, 'M', '見'), + (0x2F93, 'M', '角'), + (0x2F94, 'M', '言'), + (0x2F95, 'M', '谷'), + (0x2F96, 'M', '豆'), + (0x2F97, 'M', '豕'), + (0x2F98, 'M', '豸'), + (0x2F99, 'M', '貝'), + (0x2F9A, 'M', '赤'), + (0x2F9B, 'M', '走'), + (0x2F9C, 'M', '足'), + (0x2F9D, 'M', '身'), + (0x2F9E, 'M', '車'), + (0x2F9F, 'M', '辛'), + (0x2FA0, 'M', '辰'), + (0x2FA1, 'M', '辵'), + (0x2FA2, 'M', '邑'), + (0x2FA3, 'M', '酉'), + (0x2FA4, 'M', '釆'), + (0x2FA5, 'M', '里'), + (0x2FA6, 'M', '金'), + (0x2FA7, 'M', '長'), + (0x2FA8, 'M', '門'), + (0x2FA9, 'M', '阜'), + (0x2FAA, 'M', '隶'), + (0x2FAB, 'M', '隹'), + (0x2FAC, 'M', '雨'), + (0x2FAD, 'M', '靑'), + (0x2FAE, 'M', '非'), + (0x2FAF, 'M', '面'), + (0x2FB0, 'M', '革'), + (0x2FB1, 'M', '韋'), + (0x2FB2, 'M', '韭'), + (0x2FB3, 'M', '音'), + (0x2FB4, 'M', '頁'), + (0x2FB5, 'M', '風'), + (0x2FB6, 'M', '飛'), + (0x2FB7, 'M', '食'), + (0x2FB8, 'M', '首'), + (0x2FB9, 'M', '香'), + (0x2FBA, 'M', '馬'), + (0x2FBB, 'M', '骨'), + (0x2FBC, 'M', '高'), + (0x2FBD, 'M', '髟'), + (0x2FBE, 'M', '鬥'), + (0x2FBF, 'M', '鬯'), + (0x2FC0, 'M', '鬲'), + (0x2FC1, 'M', '鬼'), + (0x2FC2, 'M', '魚'), + (0x2FC3, 'M', '鳥'), + (0x2FC4, 'M', '鹵'), + (0x2FC5, 'M', '鹿'), + (0x2FC6, 'M', '麥'), + (0x2FC7, 'M', '麻'), + (0x2FC8, 'M', '黃'), + (0x2FC9, 'M', '黍'), + (0x2FCA, 'M', '黑'), + (0x2FCB, 'M', '黹'), + (0x2FCC, 'M', '黽'), + (0x2FCD, 'M', '鼎'), + (0x2FCE, 'M', '鼓'), + (0x2FCF, 'M', '鼠'), + (0x2FD0, 'M', '鼻'), + (0x2FD1, 'M', '齊'), + (0x2FD2, 'M', '齒'), + (0x2FD3, 'M', '龍'), + (0x2FD4, 'M', '龜'), + (0x2FD5, 'M', '龠'), + (0x2FD6, 'X'), + (0x3000, '3', ' '), + (0x3001, 'V'), + (0x3002, 'M', '.'), + (0x3003, 'V'), + (0x3036, 'M', '〒'), + (0x3037, 'V'), + (0x3038, 'M', '十'), + (0x3039, 'M', '卄'), + (0x303A, 'M', '卅'), + (0x303B, 'V'), + (0x3040, 'X'), + ] + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', ' ゙'), + (0x309C, '3', ' ゚'), + (0x309D, 'V'), + (0x309F, 'M', 'より'), + (0x30A0, 'V'), + (0x30FF, 'M', 'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x3130, 'X'), + (0x3131, 'M', 'ᄀ'), + (0x3132, 'M', 'ᄁ'), + (0x3133, 'M', 'ᆪ'), + (0x3134, 'M', 'ᄂ'), + (0x3135, 'M', 'ᆬ'), + (0x3136, 'M', 'ᆭ'), + (0x3137, 'M', 'ᄃ'), + (0x3138, 'M', 'ᄄ'), + (0x3139, 'M', 'ᄅ'), + (0x313A, 'M', 'ᆰ'), + (0x313B, 'M', 'ᆱ'), + (0x313C, 'M', 'ᆲ'), + (0x313D, 'M', 'ᆳ'), + (0x313E, 'M', 'ᆴ'), + (0x313F, 'M', 'ᆵ'), + (0x3140, 'M', 'ᄚ'), + (0x3141, 'M', 'ᄆ'), + (0x3142, 'M', 'ᄇ'), + (0x3143, 'M', 'ᄈ'), + (0x3144, 'M', 'ᄡ'), + (0x3145, 'M', 'ᄉ'), + (0x3146, 'M', 'ᄊ'), + (0x3147, 'M', 'ᄋ'), + (0x3148, 'M', 'ᄌ'), + (0x3149, 'M', 'ᄍ'), + (0x314A, 'M', 'ᄎ'), + (0x314B, 'M', 'ᄏ'), + (0x314C, 'M', 'ᄐ'), + (0x314D, 'M', 'ᄑ'), + (0x314E, 'M', 'ᄒ'), + (0x314F, 'M', 'ᅡ'), + (0x3150, 'M', 'ᅢ'), + (0x3151, 'M', 'ᅣ'), + (0x3152, 'M', 'ᅤ'), + (0x3153, 'M', 'ᅥ'), + (0x3154, 'M', 'ᅦ'), + (0x3155, 'M', 'ᅧ'), + (0x3156, 'M', 'ᅨ'), + (0x3157, 'M', 'ᅩ'), + (0x3158, 'M', 'ᅪ'), + (0x3159, 'M', 'ᅫ'), + (0x315A, 'M', 'ᅬ'), + (0x315B, 'M', 'ᅭ'), + (0x315C, 'M', 'ᅮ'), + (0x315D, 'M', 'ᅯ'), + (0x315E, 'M', 'ᅰ'), + (0x315F, 'M', 'ᅱ'), + (0x3160, 'M', 'ᅲ'), + (0x3161, 'M', 'ᅳ'), + (0x3162, 'M', 'ᅴ'), + (0x3163, 'M', 'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', 'ᄔ'), + (0x3166, 'M', 'ᄕ'), + (0x3167, 'M', 'ᇇ'), + (0x3168, 'M', 'ᇈ'), + (0x3169, 'M', 'ᇌ'), + (0x316A, 'M', 'ᇎ'), + (0x316B, 'M', 'ᇓ'), + (0x316C, 'M', 'ᇗ'), + (0x316D, 'M', 'ᇙ'), + (0x316E, 'M', 'ᄜ'), + (0x316F, 'M', 'ᇝ'), + (0x3170, 'M', 'ᇟ'), + (0x3171, 'M', 'ᄝ'), + (0x3172, 'M', 'ᄞ'), + (0x3173, 'M', 'ᄠ'), + (0x3174, 'M', 'ᄢ'), + (0x3175, 'M', 'ᄣ'), + (0x3176, 'M', 'ᄧ'), + (0x3177, 'M', 'ᄩ'), + (0x3178, 'M', 'ᄫ'), + (0x3179, 'M', 'ᄬ'), + (0x317A, 'M', 'ᄭ'), + (0x317B, 'M', 'ᄮ'), + (0x317C, 'M', 'ᄯ'), + (0x317D, 'M', 'ᄲ'), + (0x317E, 'M', 'ᄶ'), + (0x317F, 'M', 'ᅀ'), + (0x3180, 'M', 'ᅇ'), + (0x3181, 'M', 'ᅌ'), + (0x3182, 'M', 'ᇱ'), + (0x3183, 'M', 'ᇲ'), + (0x3184, 'M', 'ᅗ'), + (0x3185, 'M', 'ᅘ'), + (0x3186, 'M', 'ᅙ'), + (0x3187, 'M', 'ᆄ'), + (0x3188, 'M', 'ᆅ'), + ] + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3189, 'M', 'ᆈ'), + (0x318A, 'M', 'ᆑ'), + (0x318B, 'M', 'ᆒ'), + (0x318C, 'M', 'ᆔ'), + (0x318D, 'M', 'ᆞ'), + (0x318E, 'M', 'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', '一'), + (0x3193, 'M', '二'), + (0x3194, 'M', '三'), + (0x3195, 'M', '四'), + (0x3196, 'M', '上'), + (0x3197, 'M', '中'), + (0x3198, 'M', '下'), + (0x3199, 'M', '甲'), + (0x319A, 'M', '乙'), + (0x319B, 'M', '丙'), + (0x319C, 'M', '丁'), + (0x319D, 'M', '天'), + (0x319E, 'M', '地'), + (0x319F, 'M', '人'), + (0x31A0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', '(ᄀ)'), + (0x3201, '3', '(ᄂ)'), + (0x3202, '3', '(ᄃ)'), + (0x3203, '3', '(ᄅ)'), + (0x3204, '3', '(ᄆ)'), + (0x3205, '3', '(ᄇ)'), + (0x3206, '3', '(ᄉ)'), + (0x3207, '3', '(ᄋ)'), + (0x3208, '3', '(ᄌ)'), + (0x3209, '3', '(ᄎ)'), + (0x320A, '3', '(ᄏ)'), + (0x320B, '3', '(ᄐ)'), + (0x320C, '3', '(ᄑ)'), + (0x320D, '3', '(ᄒ)'), + (0x320E, '3', '(가)'), + (0x320F, '3', '(나)'), + (0x3210, '3', '(다)'), + (0x3211, '3', '(라)'), + (0x3212, '3', '(마)'), + (0x3213, '3', '(바)'), + (0x3214, '3', '(사)'), + (0x3215, '3', '(아)'), + (0x3216, '3', '(자)'), + (0x3217, '3', '(차)'), + (0x3218, '3', '(카)'), + (0x3219, '3', '(타)'), + (0x321A, '3', '(파)'), + (0x321B, '3', '(하)'), + (0x321C, '3', '(주)'), + (0x321D, '3', '(오전)'), + (0x321E, '3', '(오후)'), + (0x321F, 'X'), + (0x3220, '3', '(一)'), + (0x3221, '3', '(二)'), + (0x3222, '3', '(三)'), + (0x3223, '3', '(四)'), + (0x3224, '3', '(五)'), + (0x3225, '3', '(六)'), + (0x3226, '3', '(七)'), + (0x3227, '3', '(八)'), + (0x3228, '3', '(九)'), + (0x3229, '3', '(十)'), + (0x322A, '3', '(月)'), + (0x322B, '3', '(火)'), + (0x322C, '3', '(水)'), + (0x322D, '3', '(木)'), + (0x322E, '3', '(金)'), + (0x322F, '3', '(土)'), + (0x3230, '3', '(日)'), + (0x3231, '3', '(株)'), + (0x3232, '3', '(有)'), + (0x3233, '3', '(社)'), + (0x3234, '3', '(名)'), + (0x3235, '3', '(特)'), + (0x3236, '3', '(財)'), + (0x3237, '3', '(祝)'), + (0x3238, '3', '(労)'), + (0x3239, '3', '(代)'), + (0x323A, '3', '(呼)'), + (0x323B, '3', '(学)'), + (0x323C, '3', '(監)'), + (0x323D, '3', '(企)'), + (0x323E, '3', '(資)'), + (0x323F, '3', '(協)'), + (0x3240, '3', '(祭)'), + (0x3241, '3', '(休)'), + (0x3242, '3', '(自)'), + (0x3243, '3', '(至)'), + (0x3244, 'M', '問'), + (0x3245, 'M', '幼'), + (0x3246, 'M', '文'), + (0x3247, 'M', '箏'), + (0x3248, 'V'), + (0x3250, 'M', 'pte'), + (0x3251, 'M', '21'), + ] + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3252, 'M', '22'), + (0x3253, 'M', '23'), + (0x3254, 'M', '24'), + (0x3255, 'M', '25'), + (0x3256, 'M', '26'), + (0x3257, 'M', '27'), + (0x3258, 'M', '28'), + (0x3259, 'M', '29'), + (0x325A, 'M', '30'), + (0x325B, 'M', '31'), + (0x325C, 'M', '32'), + (0x325D, 'M', '33'), + (0x325E, 'M', '34'), + (0x325F, 'M', '35'), + (0x3260, 'M', 'ᄀ'), + (0x3261, 'M', 'ᄂ'), + (0x3262, 'M', 'ᄃ'), + (0x3263, 'M', 'ᄅ'), + (0x3264, 'M', 'ᄆ'), + (0x3265, 'M', 'ᄇ'), + (0x3266, 'M', 'ᄉ'), + (0x3267, 'M', 'ᄋ'), + (0x3268, 'M', 'ᄌ'), + (0x3269, 'M', 'ᄎ'), + (0x326A, 'M', 'ᄏ'), + (0x326B, 'M', 'ᄐ'), + (0x326C, 'M', 'ᄑ'), + (0x326D, 'M', 'ᄒ'), + (0x326E, 'M', '가'), + (0x326F, 'M', '나'), + (0x3270, 'M', '다'), + (0x3271, 'M', '라'), + (0x3272, 'M', '마'), + (0x3273, 'M', '바'), + (0x3274, 'M', '사'), + (0x3275, 'M', '아'), + (0x3276, 'M', '자'), + (0x3277, 'M', '차'), + (0x3278, 'M', '카'), + (0x3279, 'M', '타'), + (0x327A, 'M', '파'), + (0x327B, 'M', '하'), + (0x327C, 'M', '참고'), + (0x327D, 'M', '주의'), + (0x327E, 'M', '우'), + (0x327F, 'V'), + (0x3280, 'M', '一'), + (0x3281, 'M', '二'), + (0x3282, 'M', '三'), + (0x3283, 'M', '四'), + (0x3284, 'M', '五'), + (0x3285, 'M', '六'), + (0x3286, 'M', '七'), + (0x3287, 'M', '八'), + (0x3288, 'M', '九'), + (0x3289, 'M', '十'), + (0x328A, 'M', '月'), + (0x328B, 'M', '火'), + (0x328C, 'M', '水'), + (0x328D, 'M', '木'), + (0x328E, 'M', '金'), + (0x328F, 'M', '土'), + (0x3290, 'M', '日'), + (0x3291, 'M', '株'), + (0x3292, 'M', '有'), + (0x3293, 'M', '社'), + (0x3294, 'M', '名'), + (0x3295, 'M', '特'), + (0x3296, 'M', '財'), + (0x3297, 'M', '祝'), + (0x3298, 'M', '労'), + (0x3299, 'M', '秘'), + (0x329A, 'M', '男'), + (0x329B, 'M', '女'), + (0x329C, 'M', '適'), + (0x329D, 'M', '優'), + (0x329E, 'M', '印'), + (0x329F, 'M', '注'), + (0x32A0, 'M', '項'), + (0x32A1, 'M', '休'), + (0x32A2, 'M', '写'), + (0x32A3, 'M', '正'), + (0x32A4, 'M', '上'), + (0x32A5, 'M', '中'), + (0x32A6, 'M', '下'), + (0x32A7, 'M', '左'), + (0x32A8, 'M', '右'), + (0x32A9, 'M', '医'), + (0x32AA, 'M', '宗'), + (0x32AB, 'M', '学'), + (0x32AC, 'M', '監'), + (0x32AD, 'M', '企'), + (0x32AE, 'M', '資'), + (0x32AF, 'M', '協'), + (0x32B0, 'M', '夜'), + (0x32B1, 'M', '36'), + (0x32B2, 'M', '37'), + (0x32B3, 'M', '38'), + (0x32B4, 'M', '39'), + (0x32B5, 'M', '40'), + ] + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x32B6, 'M', '41'), + (0x32B7, 'M', '42'), + (0x32B8, 'M', '43'), + (0x32B9, 'M', '44'), + (0x32BA, 'M', '45'), + (0x32BB, 'M', '46'), + (0x32BC, 'M', '47'), + (0x32BD, 'M', '48'), + (0x32BE, 'M', '49'), + (0x32BF, 'M', '50'), + (0x32C0, 'M', '1月'), + (0x32C1, 'M', '2月'), + (0x32C2, 'M', '3月'), + (0x32C3, 'M', '4月'), + (0x32C4, 'M', '5月'), + (0x32C5, 'M', '6月'), + (0x32C6, 'M', '7月'), + (0x32C7, 'M', '8月'), + (0x32C8, 'M', '9月'), + (0x32C9, 'M', '10月'), + (0x32CA, 'M', '11月'), + (0x32CB, 'M', '12月'), + (0x32CC, 'M', 'hg'), + (0x32CD, 'M', 'erg'), + (0x32CE, 'M', 'ev'), + (0x32CF, 'M', 'ltd'), + (0x32D0, 'M', 'ア'), + (0x32D1, 'M', 'イ'), + (0x32D2, 'M', 'ウ'), + (0x32D3, 'M', 'エ'), + (0x32D4, 'M', 'オ'), + (0x32D5, 'M', 'カ'), + (0x32D6, 'M', 'キ'), + (0x32D7, 'M', 'ク'), + (0x32D8, 'M', 'ケ'), + (0x32D9, 'M', 'コ'), + (0x32DA, 'M', 'サ'), + (0x32DB, 'M', 'シ'), + (0x32DC, 'M', 'ス'), + (0x32DD, 'M', 'セ'), + (0x32DE, 'M', 'ソ'), + (0x32DF, 'M', 'タ'), + (0x32E0, 'M', 'チ'), + (0x32E1, 'M', 'ツ'), + (0x32E2, 'M', 'テ'), + (0x32E3, 'M', 'ト'), + (0x32E4, 'M', 'ナ'), + (0x32E5, 'M', 'ニ'), + (0x32E6, 'M', 'ヌ'), + (0x32E7, 'M', 'ネ'), + (0x32E8, 'M', 'ノ'), + (0x32E9, 'M', 'ハ'), + (0x32EA, 'M', 'ヒ'), + (0x32EB, 'M', 'フ'), + (0x32EC, 'M', 'ヘ'), + (0x32ED, 'M', 'ホ'), + (0x32EE, 'M', 'マ'), + (0x32EF, 'M', 'ミ'), + (0x32F0, 'M', 'ム'), + (0x32F1, 'M', 'メ'), + (0x32F2, 'M', 'モ'), + (0x32F3, 'M', 'ヤ'), + (0x32F4, 'M', 'ユ'), + (0x32F5, 'M', 'ヨ'), + (0x32F6, 'M', 'ラ'), + (0x32F7, 'M', 'リ'), + (0x32F8, 'M', 'ル'), + (0x32F9, 'M', 'レ'), + (0x32FA, 'M', 'ロ'), + (0x32FB, 'M', 'ワ'), + (0x32FC, 'M', 'ヰ'), + (0x32FD, 'M', 'ヱ'), + (0x32FE, 'M', 'ヲ'), + (0x32FF, 'M', '令和'), + (0x3300, 'M', 'アパート'), + (0x3301, 'M', 'アルファ'), + (0x3302, 'M', 'アンペア'), + (0x3303, 'M', 'アール'), + (0x3304, 'M', 'イニング'), + (0x3305, 'M', 'インチ'), + (0x3306, 'M', 'ウォン'), + (0x3307, 'M', 'エスクード'), + (0x3308, 'M', 'エーカー'), + (0x3309, 'M', 'オンス'), + (0x330A, 'M', 'オーム'), + (0x330B, 'M', 'カイリ'), + (0x330C, 'M', 'カラット'), + (0x330D, 'M', 'カロリー'), + (0x330E, 'M', 'ガロン'), + (0x330F, 'M', 'ガンマ'), + (0x3310, 'M', 'ギガ'), + (0x3311, 'M', 'ギニー'), + (0x3312, 'M', 'キュリー'), + (0x3313, 'M', 'ギルダー'), + (0x3314, 'M', 'キロ'), + (0x3315, 'M', 'キログラム'), + (0x3316, 'M', 'キロメートル'), + (0x3317, 'M', 'キロワット'), + (0x3318, 'M', 'グラム'), + (0x3319, 'M', 'グラムトン'), + ] + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x331A, 'M', 'クルゼイロ'), + (0x331B, 'M', 'クローネ'), + (0x331C, 'M', 'ケース'), + (0x331D, 'M', 'コルナ'), + (0x331E, 'M', 'コーポ'), + (0x331F, 'M', 'サイクル'), + (0x3320, 'M', 'サンチーム'), + (0x3321, 'M', 'シリング'), + (0x3322, 'M', 'センチ'), + (0x3323, 'M', 'セント'), + (0x3324, 'M', 'ダース'), + (0x3325, 'M', 'デシ'), + (0x3326, 'M', 'ドル'), + (0x3327, 'M', 'トン'), + (0x3328, 'M', 'ナノ'), + (0x3329, 'M', 'ノット'), + (0x332A, 'M', 'ハイツ'), + (0x332B, 'M', 'パーセント'), + (0x332C, 'M', 'パーツ'), + (0x332D, 'M', 'バーレル'), + (0x332E, 'M', 'ピアストル'), + (0x332F, 'M', 'ピクル'), + (0x3330, 'M', 'ピコ'), + (0x3331, 'M', 'ビル'), + (0x3332, 'M', 'ファラッド'), + (0x3333, 'M', 'フィート'), + (0x3334, 'M', 'ブッシェル'), + (0x3335, 'M', 'フラン'), + (0x3336, 'M', 'ヘクタール'), + (0x3337, 'M', 'ペソ'), + (0x3338, 'M', 'ペニヒ'), + (0x3339, 'M', 'ヘルツ'), + (0x333A, 'M', 'ペンス'), + (0x333B, 'M', 'ページ'), + (0x333C, 'M', 'ベータ'), + (0x333D, 'M', 'ポイント'), + (0x333E, 'M', 'ボルト'), + (0x333F, 'M', 'ホン'), + (0x3340, 'M', 'ポンド'), + (0x3341, 'M', 'ホール'), + (0x3342, 'M', 'ホーン'), + (0x3343, 'M', 'マイクロ'), + (0x3344, 'M', 'マイル'), + (0x3345, 'M', 'マッハ'), + (0x3346, 'M', 'マルク'), + (0x3347, 'M', 'マンション'), + (0x3348, 'M', 'ミクロン'), + (0x3349, 'M', 'ミリ'), + (0x334A, 'M', 'ミリバール'), + (0x334B, 'M', 'メガ'), + (0x334C, 'M', 'メガトン'), + (0x334D, 'M', 'メートル'), + (0x334E, 'M', 'ヤード'), + (0x334F, 'M', 'ヤール'), + (0x3350, 'M', 'ユアン'), + (0x3351, 'M', 'リットル'), + (0x3352, 'M', 'リラ'), + (0x3353, 'M', 'ルピー'), + (0x3354, 'M', 'ルーブル'), + (0x3355, 'M', 'レム'), + (0x3356, 'M', 'レントゲン'), + (0x3357, 'M', 'ワット'), + (0x3358, 'M', '0点'), + (0x3359, 'M', '1点'), + (0x335A, 'M', '2点'), + (0x335B, 'M', '3点'), + (0x335C, 'M', '4点'), + (0x335D, 'M', '5点'), + (0x335E, 'M', '6点'), + (0x335F, 'M', '7点'), + (0x3360, 'M', '8点'), + (0x3361, 'M', '9点'), + (0x3362, 'M', '10点'), + (0x3363, 'M', '11点'), + (0x3364, 'M', '12点'), + (0x3365, 'M', '13点'), + (0x3366, 'M', '14点'), + (0x3367, 'M', '15点'), + (0x3368, 'M', '16点'), + (0x3369, 'M', '17点'), + (0x336A, 'M', '18点'), + (0x336B, 'M', '19点'), + (0x336C, 'M', '20点'), + (0x336D, 'M', '21点'), + (0x336E, 'M', '22点'), + (0x336F, 'M', '23点'), + (0x3370, 'M', '24点'), + (0x3371, 'M', 'hpa'), + (0x3372, 'M', 'da'), + (0x3373, 'M', 'au'), + (0x3374, 'M', 'bar'), + (0x3375, 'M', 'ov'), + (0x3376, 'M', 'pc'), + (0x3377, 'M', 'dm'), + (0x3378, 'M', 'dm2'), + (0x3379, 'M', 'dm3'), + (0x337A, 'M', 'iu'), + (0x337B, 'M', '平成'), + (0x337C, 'M', '昭和'), + (0x337D, 'M', '大正'), + ] + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x337E, 'M', '明治'), + (0x337F, 'M', '株式会社'), + (0x3380, 'M', 'pa'), + (0x3381, 'M', 'na'), + (0x3382, 'M', 'μa'), + (0x3383, 'M', 'ma'), + (0x3384, 'M', 'ka'), + (0x3385, 'M', 'kb'), + (0x3386, 'M', 'mb'), + (0x3387, 'M', 'gb'), + (0x3388, 'M', 'cal'), + (0x3389, 'M', 'kcal'), + (0x338A, 'M', 'pf'), + (0x338B, 'M', 'nf'), + (0x338C, 'M', 'μf'), + (0x338D, 'M', 'μg'), + (0x338E, 'M', 'mg'), + (0x338F, 'M', 'kg'), + (0x3390, 'M', 'hz'), + (0x3391, 'M', 'khz'), + (0x3392, 'M', 'mhz'), + (0x3393, 'M', 'ghz'), + (0x3394, 'M', 'thz'), + (0x3395, 'M', 'μl'), + (0x3396, 'M', 'ml'), + (0x3397, 'M', 'dl'), + (0x3398, 'M', 'kl'), + (0x3399, 'M', 'fm'), + (0x339A, 'M', 'nm'), + (0x339B, 'M', 'μm'), + (0x339C, 'M', 'mm'), + (0x339D, 'M', 'cm'), + (0x339E, 'M', 'km'), + (0x339F, 'M', 'mm2'), + (0x33A0, 'M', 'cm2'), + (0x33A1, 'M', 'm2'), + (0x33A2, 'M', 'km2'), + (0x33A3, 'M', 'mm3'), + (0x33A4, 'M', 'cm3'), + (0x33A5, 'M', 'm3'), + (0x33A6, 'M', 'km3'), + (0x33A7, 'M', 'm∕s'), + (0x33A8, 'M', 'm∕s2'), + (0x33A9, 'M', 'pa'), + (0x33AA, 'M', 'kpa'), + (0x33AB, 'M', 'mpa'), + (0x33AC, 'M', 'gpa'), + (0x33AD, 'M', 'rad'), + (0x33AE, 'M', 'rad∕s'), + (0x33AF, 'M', 'rad∕s2'), + (0x33B0, 'M', 'ps'), + (0x33B1, 'M', 'ns'), + (0x33B2, 'M', 'μs'), + (0x33B3, 'M', 'ms'), + (0x33B4, 'M', 'pv'), + (0x33B5, 'M', 'nv'), + (0x33B6, 'M', 'μv'), + (0x33B7, 'M', 'mv'), + (0x33B8, 'M', 'kv'), + (0x33B9, 'M', 'mv'), + (0x33BA, 'M', 'pw'), + (0x33BB, 'M', 'nw'), + (0x33BC, 'M', 'μw'), + (0x33BD, 'M', 'mw'), + (0x33BE, 'M', 'kw'), + (0x33BF, 'M', 'mw'), + (0x33C0, 'M', 'kω'), + (0x33C1, 'M', 'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', 'bq'), + (0x33C4, 'M', 'cc'), + (0x33C5, 'M', 'cd'), + (0x33C6, 'M', 'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', 'db'), + (0x33C9, 'M', 'gy'), + (0x33CA, 'M', 'ha'), + (0x33CB, 'M', 'hp'), + (0x33CC, 'M', 'in'), + (0x33CD, 'M', 'kk'), + (0x33CE, 'M', 'km'), + (0x33CF, 'M', 'kt'), + (0x33D0, 'M', 'lm'), + (0x33D1, 'M', 'ln'), + (0x33D2, 'M', 'log'), + (0x33D3, 'M', 'lx'), + (0x33D4, 'M', 'mb'), + (0x33D5, 'M', 'mil'), + (0x33D6, 'M', 'mol'), + (0x33D7, 'M', 'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', 'ppm'), + (0x33DA, 'M', 'pr'), + (0x33DB, 'M', 'sr'), + (0x33DC, 'M', 'sv'), + (0x33DD, 'M', 'wb'), + (0x33DE, 'M', 'v∕m'), + (0x33DF, 'M', 'a∕m'), + (0x33E0, 'M', '1日'), + (0x33E1, 'M', '2日'), + ] + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x33E2, 'M', '3日'), + (0x33E3, 'M', '4日'), + (0x33E4, 'M', '5日'), + (0x33E5, 'M', '6日'), + (0x33E6, 'M', '7日'), + (0x33E7, 'M', '8日'), + (0x33E8, 'M', '9日'), + (0x33E9, 'M', '10日'), + (0x33EA, 'M', '11日'), + (0x33EB, 'M', '12日'), + (0x33EC, 'M', '13日'), + (0x33ED, 'M', '14日'), + (0x33EE, 'M', '15日'), + (0x33EF, 'M', '16日'), + (0x33F0, 'M', '17日'), + (0x33F1, 'M', '18日'), + (0x33F2, 'M', '19日'), + (0x33F3, 'M', '20日'), + (0x33F4, 'M', '21日'), + (0x33F5, 'M', '22日'), + (0x33F6, 'M', '23日'), + (0x33F7, 'M', '24日'), + (0x33F8, 'M', '25日'), + (0x33F9, 'M', '26日'), + (0x33FA, 'M', '27日'), + (0x33FB, 'M', '28日'), + (0x33FC, 'M', '29日'), + (0x33FD, 'M', '30日'), + (0x33FE, 'M', '31日'), + (0x33FF, 'M', 'gal'), + (0x3400, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', 'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', 'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', 'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', 'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', 'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', 'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', 'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', 'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', 'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', 'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', 'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', 'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', 'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', 'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', 'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', 'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', 'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', 'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', 'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', 'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', 'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', 'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', 'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', 'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', 'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', 'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', 'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', 'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', 'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', 'ꚍ'), + (0xA68D, 'V'), + (0xA68E, 'M', 'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', 'ꚑ'), + (0xA691, 'V'), + ] + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA692, 'M', 'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', 'ꚕ'), + (0xA695, 'V'), + (0xA696, 'M', 'ꚗ'), + (0xA697, 'V'), + (0xA698, 'M', 'ꚙ'), + (0xA699, 'V'), + (0xA69A, 'M', 'ꚛ'), + (0xA69B, 'V'), + (0xA69C, 'M', 'ъ'), + (0xA69D, 'M', 'ь'), + (0xA69E, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', 'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', 'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', 'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', 'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', 'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', 'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', 'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', 'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', 'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', 'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', 'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', 'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', 'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', 'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', 'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', 'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', 'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', 'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', 'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', 'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', 'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', 'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', 'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', 'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', 'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', 'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', 'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', 'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', 'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', 'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', 'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', 'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', 'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', 'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', 'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', 'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', 'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', 'ꝯ'), + (0xA76F, 'V'), + (0xA770, 'M', 'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', 'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', 'ꝼ'), + (0xA77C, 'V'), + (0xA77D, 'M', 'ᵹ'), + (0xA77E, 'M', 'ꝿ'), + (0xA77F, 'V'), + ] + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA780, 'M', 'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', 'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', 'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', 'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', 'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', 'ɥ'), + (0xA78E, 'V'), + (0xA790, 'M', 'ꞑ'), + (0xA791, 'V'), + (0xA792, 'M', 'ꞓ'), + (0xA793, 'V'), + (0xA796, 'M', 'ꞗ'), + (0xA797, 'V'), + (0xA798, 'M', 'ꞙ'), + (0xA799, 'V'), + (0xA79A, 'M', 'ꞛ'), + (0xA79B, 'V'), + (0xA79C, 'M', 'ꞝ'), + (0xA79D, 'V'), + (0xA79E, 'M', 'ꞟ'), + (0xA79F, 'V'), + (0xA7A0, 'M', 'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', 'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', 'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', 'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', 'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', 'ɦ'), + (0xA7AB, 'M', 'ɜ'), + (0xA7AC, 'M', 'ɡ'), + (0xA7AD, 'M', 'ɬ'), + (0xA7AE, 'M', 'ɪ'), + (0xA7AF, 'V'), + (0xA7B0, 'M', 'ʞ'), + (0xA7B1, 'M', 'ʇ'), + (0xA7B2, 'M', 'ʝ'), + (0xA7B3, 'M', 'ꭓ'), + (0xA7B4, 'M', 'ꞵ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', 'ꞷ'), + (0xA7B7, 'V'), + (0xA7B8, 'M', 'ꞹ'), + (0xA7B9, 'V'), + (0xA7BA, 'M', 'ꞻ'), + (0xA7BB, 'V'), + (0xA7BC, 'M', 'ꞽ'), + (0xA7BD, 'V'), + (0xA7BE, 'M', 'ꞿ'), + (0xA7BF, 'V'), + (0xA7C0, 'M', 'ꟁ'), + (0xA7C1, 'V'), + (0xA7C2, 'M', 'ꟃ'), + (0xA7C3, 'V'), + (0xA7C4, 'M', 'ꞔ'), + (0xA7C5, 'M', 'ʂ'), + (0xA7C6, 'M', 'ᶎ'), + (0xA7C7, 'M', 'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', 'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7D0, 'M', 'ꟑ'), + (0xA7D1, 'V'), + (0xA7D2, 'X'), + (0xA7D3, 'V'), + (0xA7D4, 'X'), + (0xA7D5, 'V'), + (0xA7D6, 'M', 'ꟗ'), + (0xA7D7, 'V'), + (0xA7D8, 'M', 'ꟙ'), + (0xA7D9, 'V'), + (0xA7DA, 'X'), + (0xA7F2, 'M', 'c'), + (0xA7F3, 'M', 'f'), + (0xA7F4, 'M', 'q'), + (0xA7F5, 'M', 'ꟶ'), + (0xA7F6, 'V'), + (0xA7F8, 'M', 'ħ'), + (0xA7F9, 'M', 'œ'), + (0xA7FA, 'V'), + (0xA82D, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C6, 'X'), + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA954, 'X'), + ] + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9FF, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', 'ꜧ'), + (0xAB5D, 'M', 'ꬷ'), + (0xAB5E, 'M', 'ɫ'), + (0xAB5F, 'M', 'ꭒ'), + (0xAB60, 'V'), + (0xAB69, 'M', 'ʍ'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), + (0xAB70, 'M', 'Ꭰ'), + (0xAB71, 'M', 'Ꭱ'), + (0xAB72, 'M', 'Ꭲ'), + (0xAB73, 'M', 'Ꭳ'), + (0xAB74, 'M', 'Ꭴ'), + (0xAB75, 'M', 'Ꭵ'), + (0xAB76, 'M', 'Ꭶ'), + (0xAB77, 'M', 'Ꭷ'), + (0xAB78, 'M', 'Ꭸ'), + (0xAB79, 'M', 'Ꭹ'), + (0xAB7A, 'M', 'Ꭺ'), + (0xAB7B, 'M', 'Ꭻ'), + (0xAB7C, 'M', 'Ꭼ'), + (0xAB7D, 'M', 'Ꭽ'), + (0xAB7E, 'M', 'Ꭾ'), + (0xAB7F, 'M', 'Ꭿ'), + (0xAB80, 'M', 'Ꮀ'), + (0xAB81, 'M', 'Ꮁ'), + (0xAB82, 'M', 'Ꮂ'), + (0xAB83, 'M', 'Ꮃ'), + (0xAB84, 'M', 'Ꮄ'), + (0xAB85, 'M', 'Ꮅ'), + (0xAB86, 'M', 'Ꮆ'), + (0xAB87, 'M', 'Ꮇ'), + (0xAB88, 'M', 'Ꮈ'), + (0xAB89, 'M', 'Ꮉ'), + (0xAB8A, 'M', 'Ꮊ'), + (0xAB8B, 'M', 'Ꮋ'), + (0xAB8C, 'M', 'Ꮌ'), + (0xAB8D, 'M', 'Ꮍ'), + (0xAB8E, 'M', 'Ꮎ'), + (0xAB8F, 'M', 'Ꮏ'), + (0xAB90, 'M', 'Ꮐ'), + (0xAB91, 'M', 'Ꮑ'), + (0xAB92, 'M', 'Ꮒ'), + (0xAB93, 'M', 'Ꮓ'), + (0xAB94, 'M', 'Ꮔ'), + (0xAB95, 'M', 'Ꮕ'), + (0xAB96, 'M', 'Ꮖ'), + (0xAB97, 'M', 'Ꮗ'), + (0xAB98, 'M', 'Ꮘ'), + (0xAB99, 'M', 'Ꮙ'), + (0xAB9A, 'M', 'Ꮚ'), + (0xAB9B, 'M', 'Ꮛ'), + (0xAB9C, 'M', 'Ꮜ'), + (0xAB9D, 'M', 'Ꮝ'), + (0xAB9E, 'M', 'Ꮞ'), + (0xAB9F, 'M', 'Ꮟ'), + (0xABA0, 'M', 'Ꮠ'), + (0xABA1, 'M', 'Ꮡ'), + (0xABA2, 'M', 'Ꮢ'), + (0xABA3, 'M', 'Ꮣ'), + (0xABA4, 'M', 'Ꮤ'), + (0xABA5, 'M', 'Ꮥ'), + (0xABA6, 'M', 'Ꮦ'), + (0xABA7, 'M', 'Ꮧ'), + (0xABA8, 'M', 'Ꮨ'), + (0xABA9, 'M', 'Ꮩ'), + (0xABAA, 'M', 'Ꮪ'), + (0xABAB, 'M', 'Ꮫ'), + (0xABAC, 'M', 'Ꮬ'), + (0xABAD, 'M', 'Ꮭ'), + (0xABAE, 'M', 'Ꮮ'), + ] + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xABAF, 'M', 'Ꮯ'), + (0xABB0, 'M', 'Ꮰ'), + (0xABB1, 'M', 'Ꮱ'), + (0xABB2, 'M', 'Ꮲ'), + (0xABB3, 'M', 'Ꮳ'), + (0xABB4, 'M', 'Ꮴ'), + (0xABB5, 'M', 'Ꮵ'), + (0xABB6, 'M', 'Ꮶ'), + (0xABB7, 'M', 'Ꮷ'), + (0xABB8, 'M', 'Ꮸ'), + (0xABB9, 'M', 'Ꮹ'), + (0xABBA, 'M', 'Ꮺ'), + (0xABBB, 'M', 'Ꮻ'), + (0xABBC, 'M', 'Ꮼ'), + (0xABBD, 'M', 'Ꮽ'), + (0xABBE, 'M', 'Ꮾ'), + (0xABBF, 'M', 'Ꮿ'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', '豈'), + (0xF901, 'M', '更'), + (0xF902, 'M', '車'), + (0xF903, 'M', '賈'), + (0xF904, 'M', '滑'), + (0xF905, 'M', '串'), + (0xF906, 'M', '句'), + (0xF907, 'M', '龜'), + (0xF909, 'M', '契'), + (0xF90A, 'M', '金'), + (0xF90B, 'M', '喇'), + (0xF90C, 'M', '奈'), + (0xF90D, 'M', '懶'), + (0xF90E, 'M', '癩'), + (0xF90F, 'M', '羅'), + (0xF910, 'M', '蘿'), + (0xF911, 'M', '螺'), + (0xF912, 'M', '裸'), + (0xF913, 'M', '邏'), + (0xF914, 'M', '樂'), + (0xF915, 'M', '洛'), + (0xF916, 'M', '烙'), + (0xF917, 'M', '珞'), + (0xF918, 'M', '落'), + (0xF919, 'M', '酪'), + (0xF91A, 'M', '駱'), + (0xF91B, 'M', '亂'), + (0xF91C, 'M', '卵'), + (0xF91D, 'M', '欄'), + (0xF91E, 'M', '爛'), + (0xF91F, 'M', '蘭'), + (0xF920, 'M', '鸞'), + (0xF921, 'M', '嵐'), + (0xF922, 'M', '濫'), + (0xF923, 'M', '藍'), + (0xF924, 'M', '襤'), + (0xF925, 'M', '拉'), + (0xF926, 'M', '臘'), + (0xF927, 'M', '蠟'), + (0xF928, 'M', '廊'), + (0xF929, 'M', '朗'), + (0xF92A, 'M', '浪'), + (0xF92B, 'M', '狼'), + (0xF92C, 'M', '郎'), + (0xF92D, 'M', '來'), + (0xF92E, 'M', '冷'), + (0xF92F, 'M', '勞'), + (0xF930, 'M', '擄'), + (0xF931, 'M', '櫓'), + (0xF932, 'M', '爐'), + (0xF933, 'M', '盧'), + (0xF934, 'M', '老'), + (0xF935, 'M', '蘆'), + (0xF936, 'M', '虜'), + (0xF937, 'M', '路'), + (0xF938, 'M', '露'), + (0xF939, 'M', '魯'), + (0xF93A, 'M', '鷺'), + (0xF93B, 'M', '碌'), + (0xF93C, 'M', '祿'), + (0xF93D, 'M', '綠'), + (0xF93E, 'M', '菉'), + (0xF93F, 'M', '錄'), + (0xF940, 'M', '鹿'), + (0xF941, 'M', '論'), + (0xF942, 'M', '壟'), + (0xF943, 'M', '弄'), + (0xF944, 'M', '籠'), + (0xF945, 'M', '聾'), + (0xF946, 'M', '牢'), + (0xF947, 'M', '磊'), + (0xF948, 'M', '賂'), + (0xF949, 'M', '雷'), + ] + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF94A, 'M', '壘'), + (0xF94B, 'M', '屢'), + (0xF94C, 'M', '樓'), + (0xF94D, 'M', '淚'), + (0xF94E, 'M', '漏'), + (0xF94F, 'M', '累'), + (0xF950, 'M', '縷'), + (0xF951, 'M', '陋'), + (0xF952, 'M', '勒'), + (0xF953, 'M', '肋'), + (0xF954, 'M', '凜'), + (0xF955, 'M', '凌'), + (0xF956, 'M', '稜'), + (0xF957, 'M', '綾'), + (0xF958, 'M', '菱'), + (0xF959, 'M', '陵'), + (0xF95A, 'M', '讀'), + (0xF95B, 'M', '拏'), + (0xF95C, 'M', '樂'), + (0xF95D, 'M', '諾'), + (0xF95E, 'M', '丹'), + (0xF95F, 'M', '寧'), + (0xF960, 'M', '怒'), + (0xF961, 'M', '率'), + (0xF962, 'M', '異'), + (0xF963, 'M', '北'), + (0xF964, 'M', '磻'), + (0xF965, 'M', '便'), + (0xF966, 'M', '復'), + (0xF967, 'M', '不'), + (0xF968, 'M', '泌'), + (0xF969, 'M', '數'), + (0xF96A, 'M', '索'), + (0xF96B, 'M', '參'), + (0xF96C, 'M', '塞'), + (0xF96D, 'M', '省'), + (0xF96E, 'M', '葉'), + (0xF96F, 'M', '說'), + (0xF970, 'M', '殺'), + (0xF971, 'M', '辰'), + (0xF972, 'M', '沈'), + (0xF973, 'M', '拾'), + (0xF974, 'M', '若'), + (0xF975, 'M', '掠'), + (0xF976, 'M', '略'), + (0xF977, 'M', '亮'), + (0xF978, 'M', '兩'), + (0xF979, 'M', '凉'), + (0xF97A, 'M', '梁'), + (0xF97B, 'M', '糧'), + (0xF97C, 'M', '良'), + (0xF97D, 'M', '諒'), + (0xF97E, 'M', '量'), + (0xF97F, 'M', '勵'), + (0xF980, 'M', '呂'), + (0xF981, 'M', '女'), + (0xF982, 'M', '廬'), + (0xF983, 'M', '旅'), + (0xF984, 'M', '濾'), + (0xF985, 'M', '礪'), + (0xF986, 'M', '閭'), + (0xF987, 'M', '驪'), + (0xF988, 'M', '麗'), + (0xF989, 'M', '黎'), + (0xF98A, 'M', '力'), + (0xF98B, 'M', '曆'), + (0xF98C, 'M', '歷'), + (0xF98D, 'M', '轢'), + (0xF98E, 'M', '年'), + (0xF98F, 'M', '憐'), + (0xF990, 'M', '戀'), + (0xF991, 'M', '撚'), + (0xF992, 'M', '漣'), + (0xF993, 'M', '煉'), + (0xF994, 'M', '璉'), + (0xF995, 'M', '秊'), + (0xF996, 'M', '練'), + (0xF997, 'M', '聯'), + (0xF998, 'M', '輦'), + (0xF999, 'M', '蓮'), + (0xF99A, 'M', '連'), + (0xF99B, 'M', '鍊'), + (0xF99C, 'M', '列'), + (0xF99D, 'M', '劣'), + (0xF99E, 'M', '咽'), + (0xF99F, 'M', '烈'), + (0xF9A0, 'M', '裂'), + (0xF9A1, 'M', '說'), + (0xF9A2, 'M', '廉'), + (0xF9A3, 'M', '念'), + (0xF9A4, 'M', '捻'), + (0xF9A5, 'M', '殮'), + (0xF9A6, 'M', '簾'), + (0xF9A7, 'M', '獵'), + (0xF9A8, 'M', '令'), + (0xF9A9, 'M', '囹'), + (0xF9AA, 'M', '寧'), + (0xF9AB, 'M', '嶺'), + (0xF9AC, 'M', '怜'), + (0xF9AD, 'M', '玲'), + ] + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF9AE, 'M', '瑩'), + (0xF9AF, 'M', '羚'), + (0xF9B0, 'M', '聆'), + (0xF9B1, 'M', '鈴'), + (0xF9B2, 'M', '零'), + (0xF9B3, 'M', '靈'), + (0xF9B4, 'M', '領'), + (0xF9B5, 'M', '例'), + (0xF9B6, 'M', '禮'), + (0xF9B7, 'M', '醴'), + (0xF9B8, 'M', '隸'), + (0xF9B9, 'M', '惡'), + (0xF9BA, 'M', '了'), + (0xF9BB, 'M', '僚'), + (0xF9BC, 'M', '寮'), + (0xF9BD, 'M', '尿'), + (0xF9BE, 'M', '料'), + (0xF9BF, 'M', '樂'), + (0xF9C0, 'M', '燎'), + (0xF9C1, 'M', '療'), + (0xF9C2, 'M', '蓼'), + (0xF9C3, 'M', '遼'), + (0xF9C4, 'M', '龍'), + (0xF9C5, 'M', '暈'), + (0xF9C6, 'M', '阮'), + (0xF9C7, 'M', '劉'), + (0xF9C8, 'M', '杻'), + (0xF9C9, 'M', '柳'), + (0xF9CA, 'M', '流'), + (0xF9CB, 'M', '溜'), + (0xF9CC, 'M', '琉'), + (0xF9CD, 'M', '留'), + (0xF9CE, 'M', '硫'), + (0xF9CF, 'M', '紐'), + (0xF9D0, 'M', '類'), + (0xF9D1, 'M', '六'), + (0xF9D2, 'M', '戮'), + (0xF9D3, 'M', '陸'), + (0xF9D4, 'M', '倫'), + (0xF9D5, 'M', '崙'), + (0xF9D6, 'M', '淪'), + (0xF9D7, 'M', '輪'), + (0xF9D8, 'M', '律'), + (0xF9D9, 'M', '慄'), + (0xF9DA, 'M', '栗'), + (0xF9DB, 'M', '率'), + (0xF9DC, 'M', '隆'), + (0xF9DD, 'M', '利'), + (0xF9DE, 'M', '吏'), + (0xF9DF, 'M', '履'), + (0xF9E0, 'M', '易'), + (0xF9E1, 'M', '李'), + (0xF9E2, 'M', '梨'), + (0xF9E3, 'M', '泥'), + (0xF9E4, 'M', '理'), + (0xF9E5, 'M', '痢'), + (0xF9E6, 'M', '罹'), + (0xF9E7, 'M', '裏'), + (0xF9E8, 'M', '裡'), + (0xF9E9, 'M', '里'), + (0xF9EA, 'M', '離'), + (0xF9EB, 'M', '匿'), + (0xF9EC, 'M', '溺'), + (0xF9ED, 'M', '吝'), + (0xF9EE, 'M', '燐'), + (0xF9EF, 'M', '璘'), + (0xF9F0, 'M', '藺'), + (0xF9F1, 'M', '隣'), + (0xF9F2, 'M', '鱗'), + (0xF9F3, 'M', '麟'), + (0xF9F4, 'M', '林'), + (0xF9F5, 'M', '淋'), + (0xF9F6, 'M', '臨'), + (0xF9F7, 'M', '立'), + (0xF9F8, 'M', '笠'), + (0xF9F9, 'M', '粒'), + (0xF9FA, 'M', '狀'), + (0xF9FB, 'M', '炙'), + (0xF9FC, 'M', '識'), + (0xF9FD, 'M', '什'), + (0xF9FE, 'M', '茶'), + (0xF9FF, 'M', '刺'), + (0xFA00, 'M', '切'), + (0xFA01, 'M', '度'), + (0xFA02, 'M', '拓'), + (0xFA03, 'M', '糖'), + (0xFA04, 'M', '宅'), + (0xFA05, 'M', '洞'), + (0xFA06, 'M', '暴'), + (0xFA07, 'M', '輻'), + (0xFA08, 'M', '行'), + (0xFA09, 'M', '降'), + (0xFA0A, 'M', '見'), + (0xFA0B, 'M', '廓'), + (0xFA0C, 'M', '兀'), + (0xFA0D, 'M', '嗀'), + (0xFA0E, 'V'), + (0xFA10, 'M', '塚'), + (0xFA11, 'V'), + (0xFA12, 'M', '晴'), + ] + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA13, 'V'), + (0xFA15, 'M', '凞'), + (0xFA16, 'M', '猪'), + (0xFA17, 'M', '益'), + (0xFA18, 'M', '礼'), + (0xFA19, 'M', '神'), + (0xFA1A, 'M', '祥'), + (0xFA1B, 'M', '福'), + (0xFA1C, 'M', '靖'), + (0xFA1D, 'M', '精'), + (0xFA1E, 'M', '羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', '蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', '諸'), + (0xFA23, 'V'), + (0xFA25, 'M', '逸'), + (0xFA26, 'M', '都'), + (0xFA27, 'V'), + (0xFA2A, 'M', '飯'), + (0xFA2B, 'M', '飼'), + (0xFA2C, 'M', '館'), + (0xFA2D, 'M', '鶴'), + (0xFA2E, 'M', '郞'), + (0xFA2F, 'M', '隷'), + (0xFA30, 'M', '侮'), + (0xFA31, 'M', '僧'), + (0xFA32, 'M', '免'), + (0xFA33, 'M', '勉'), + (0xFA34, 'M', '勤'), + (0xFA35, 'M', '卑'), + (0xFA36, 'M', '喝'), + (0xFA37, 'M', '嘆'), + (0xFA38, 'M', '器'), + (0xFA39, 'M', '塀'), + (0xFA3A, 'M', '墨'), + (0xFA3B, 'M', '層'), + (0xFA3C, 'M', '屮'), + (0xFA3D, 'M', '悔'), + (0xFA3E, 'M', '慨'), + (0xFA3F, 'M', '憎'), + (0xFA40, 'M', '懲'), + (0xFA41, 'M', '敏'), + (0xFA42, 'M', '既'), + (0xFA43, 'M', '暑'), + (0xFA44, 'M', '梅'), + (0xFA45, 'M', '海'), + (0xFA46, 'M', '渚'), + (0xFA47, 'M', '漢'), + (0xFA48, 'M', '煮'), + (0xFA49, 'M', '爫'), + (0xFA4A, 'M', '琢'), + (0xFA4B, 'M', '碑'), + (0xFA4C, 'M', '社'), + (0xFA4D, 'M', '祉'), + (0xFA4E, 'M', '祈'), + (0xFA4F, 'M', '祐'), + (0xFA50, 'M', '祖'), + (0xFA51, 'M', '祝'), + (0xFA52, 'M', '禍'), + (0xFA53, 'M', '禎'), + (0xFA54, 'M', '穀'), + (0xFA55, 'M', '突'), + (0xFA56, 'M', '節'), + (0xFA57, 'M', '練'), + (0xFA58, 'M', '縉'), + (0xFA59, 'M', '繁'), + (0xFA5A, 'M', '署'), + (0xFA5B, 'M', '者'), + (0xFA5C, 'M', '臭'), + (0xFA5D, 'M', '艹'), + (0xFA5F, 'M', '著'), + (0xFA60, 'M', '褐'), + (0xFA61, 'M', '視'), + (0xFA62, 'M', '謁'), + (0xFA63, 'M', '謹'), + (0xFA64, 'M', '賓'), + (0xFA65, 'M', '贈'), + (0xFA66, 'M', '辶'), + (0xFA67, 'M', '逸'), + (0xFA68, 'M', '難'), + (0xFA69, 'M', '響'), + (0xFA6A, 'M', '頻'), + (0xFA6B, 'M', '恵'), + (0xFA6C, 'M', '𤋮'), + (0xFA6D, 'M', '舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', '並'), + (0xFA71, 'M', '况'), + (0xFA72, 'M', '全'), + (0xFA73, 'M', '侀'), + (0xFA74, 'M', '充'), + (0xFA75, 'M', '冀'), + (0xFA76, 'M', '勇'), + (0xFA77, 'M', '勺'), + (0xFA78, 'M', '喝'), + (0xFA79, 'M', '啕'), + (0xFA7A, 'M', '喙'), + (0xFA7B, 'M', '嗢'), + (0xFA7C, 'M', '塚'), + ] + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA7D, 'M', '墳'), + (0xFA7E, 'M', '奄'), + (0xFA7F, 'M', '奔'), + (0xFA80, 'M', '婢'), + (0xFA81, 'M', '嬨'), + (0xFA82, 'M', '廒'), + (0xFA83, 'M', '廙'), + (0xFA84, 'M', '彩'), + (0xFA85, 'M', '徭'), + (0xFA86, 'M', '惘'), + (0xFA87, 'M', '慎'), + (0xFA88, 'M', '愈'), + (0xFA89, 'M', '憎'), + (0xFA8A, 'M', '慠'), + (0xFA8B, 'M', '懲'), + (0xFA8C, 'M', '戴'), + (0xFA8D, 'M', '揄'), + (0xFA8E, 'M', '搜'), + (0xFA8F, 'M', '摒'), + (0xFA90, 'M', '敖'), + (0xFA91, 'M', '晴'), + (0xFA92, 'M', '朗'), + (0xFA93, 'M', '望'), + (0xFA94, 'M', '杖'), + (0xFA95, 'M', '歹'), + (0xFA96, 'M', '殺'), + (0xFA97, 'M', '流'), + (0xFA98, 'M', '滛'), + (0xFA99, 'M', '滋'), + (0xFA9A, 'M', '漢'), + (0xFA9B, 'M', '瀞'), + (0xFA9C, 'M', '煮'), + (0xFA9D, 'M', '瞧'), + (0xFA9E, 'M', '爵'), + (0xFA9F, 'M', '犯'), + (0xFAA0, 'M', '猪'), + (0xFAA1, 'M', '瑱'), + (0xFAA2, 'M', '甆'), + (0xFAA3, 'M', '画'), + (0xFAA4, 'M', '瘝'), + (0xFAA5, 'M', '瘟'), + (0xFAA6, 'M', '益'), + (0xFAA7, 'M', '盛'), + (0xFAA8, 'M', '直'), + (0xFAA9, 'M', '睊'), + (0xFAAA, 'M', '着'), + (0xFAAB, 'M', '磌'), + (0xFAAC, 'M', '窱'), + (0xFAAD, 'M', '節'), + (0xFAAE, 'M', '类'), + (0xFAAF, 'M', '絛'), + (0xFAB0, 'M', '練'), + (0xFAB1, 'M', '缾'), + (0xFAB2, 'M', '者'), + (0xFAB3, 'M', '荒'), + (0xFAB4, 'M', '華'), + (0xFAB5, 'M', '蝹'), + (0xFAB6, 'M', '襁'), + (0xFAB7, 'M', '覆'), + (0xFAB8, 'M', '視'), + (0xFAB9, 'M', '調'), + (0xFABA, 'M', '諸'), + (0xFABB, 'M', '請'), + (0xFABC, 'M', '謁'), + (0xFABD, 'M', '諾'), + (0xFABE, 'M', '諭'), + (0xFABF, 'M', '謹'), + (0xFAC0, 'M', '變'), + (0xFAC1, 'M', '贈'), + (0xFAC2, 'M', '輸'), + (0xFAC3, 'M', '遲'), + (0xFAC4, 'M', '醙'), + (0xFAC5, 'M', '鉶'), + (0xFAC6, 'M', '陼'), + (0xFAC7, 'M', '難'), + (0xFAC8, 'M', '靖'), + (0xFAC9, 'M', '韛'), + (0xFACA, 'M', '響'), + (0xFACB, 'M', '頋'), + (0xFACC, 'M', '頻'), + (0xFACD, 'M', '鬒'), + (0xFACE, 'M', '龜'), + (0xFACF, 'M', '𢡊'), + (0xFAD0, 'M', '𢡄'), + (0xFAD1, 'M', '𣏕'), + (0xFAD2, 'M', '㮝'), + (0xFAD3, 'M', '䀘'), + (0xFAD4, 'M', '䀹'), + (0xFAD5, 'M', '𥉉'), + (0xFAD6, 'M', '𥳐'), + (0xFAD7, 'M', '𧻓'), + (0xFAD8, 'M', '齃'), + (0xFAD9, 'M', '龎'), + (0xFADA, 'X'), + (0xFB00, 'M', 'ff'), + (0xFB01, 'M', 'fi'), + (0xFB02, 'M', 'fl'), + (0xFB03, 'M', 'ffi'), + (0xFB04, 'M', 'ffl'), + (0xFB05, 'M', 'st'), + ] + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFB07, 'X'), + (0xFB13, 'M', 'մն'), + (0xFB14, 'M', 'մե'), + (0xFB15, 'M', 'մի'), + (0xFB16, 'M', 'վն'), + (0xFB17, 'M', 'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', 'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', 'ײַ'), + (0xFB20, 'M', 'ע'), + (0xFB21, 'M', 'א'), + (0xFB22, 'M', 'ד'), + (0xFB23, 'M', 'ה'), + (0xFB24, 'M', 'כ'), + (0xFB25, 'M', 'ל'), + (0xFB26, 'M', 'ם'), + (0xFB27, 'M', 'ר'), + (0xFB28, 'M', 'ת'), + (0xFB29, '3', '+'), + (0xFB2A, 'M', 'שׁ'), + (0xFB2B, 'M', 'שׂ'), + (0xFB2C, 'M', 'שּׁ'), + (0xFB2D, 'M', 'שּׂ'), + (0xFB2E, 'M', 'אַ'), + (0xFB2F, 'M', 'אָ'), + (0xFB30, 'M', 'אּ'), + (0xFB31, 'M', 'בּ'), + (0xFB32, 'M', 'גּ'), + (0xFB33, 'M', 'דּ'), + (0xFB34, 'M', 'הּ'), + (0xFB35, 'M', 'וּ'), + (0xFB36, 'M', 'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', 'טּ'), + (0xFB39, 'M', 'יּ'), + (0xFB3A, 'M', 'ךּ'), + (0xFB3B, 'M', 'כּ'), + (0xFB3C, 'M', 'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', 'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', 'נּ'), + (0xFB41, 'M', 'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', 'ףּ'), + (0xFB44, 'M', 'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', 'צּ'), + (0xFB47, 'M', 'קּ'), + (0xFB48, 'M', 'רּ'), + (0xFB49, 'M', 'שּ'), + (0xFB4A, 'M', 'תּ'), + (0xFB4B, 'M', 'וֹ'), + (0xFB4C, 'M', 'בֿ'), + (0xFB4D, 'M', 'כֿ'), + (0xFB4E, 'M', 'פֿ'), + (0xFB4F, 'M', 'אל'), + (0xFB50, 'M', 'ٱ'), + (0xFB52, 'M', 'ٻ'), + (0xFB56, 'M', 'پ'), + (0xFB5A, 'M', 'ڀ'), + (0xFB5E, 'M', 'ٺ'), + (0xFB62, 'M', 'ٿ'), + (0xFB66, 'M', 'ٹ'), + (0xFB6A, 'M', 'ڤ'), + (0xFB6E, 'M', 'ڦ'), + (0xFB72, 'M', 'ڄ'), + (0xFB76, 'M', 'ڃ'), + (0xFB7A, 'M', 'چ'), + (0xFB7E, 'M', 'ڇ'), + (0xFB82, 'M', 'ڍ'), + (0xFB84, 'M', 'ڌ'), + (0xFB86, 'M', 'ڎ'), + (0xFB88, 'M', 'ڈ'), + (0xFB8A, 'M', 'ژ'), + (0xFB8C, 'M', 'ڑ'), + (0xFB8E, 'M', 'ک'), + (0xFB92, 'M', 'گ'), + (0xFB96, 'M', 'ڳ'), + (0xFB9A, 'M', 'ڱ'), + (0xFB9E, 'M', 'ں'), + (0xFBA0, 'M', 'ڻ'), + (0xFBA4, 'M', 'ۀ'), + (0xFBA6, 'M', 'ہ'), + (0xFBAA, 'M', 'ھ'), + (0xFBAE, 'M', 'ے'), + (0xFBB0, 'M', 'ۓ'), + (0xFBB2, 'V'), + (0xFBC3, 'X'), + (0xFBD3, 'M', 'ڭ'), + (0xFBD7, 'M', 'ۇ'), + (0xFBD9, 'M', 'ۆ'), + (0xFBDB, 'M', 'ۈ'), + (0xFBDD, 'M', 'ۇٴ'), + (0xFBDE, 'M', 'ۋ'), + (0xFBE0, 'M', 'ۅ'), + (0xFBE2, 'M', 'ۉ'), + (0xFBE4, 'M', 'ې'), + (0xFBE8, 'M', 'ى'), + ] + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFBEA, 'M', 'ئا'), + (0xFBEC, 'M', 'ئە'), + (0xFBEE, 'M', 'ئو'), + (0xFBF0, 'M', 'ئۇ'), + (0xFBF2, 'M', 'ئۆ'), + (0xFBF4, 'M', 'ئۈ'), + (0xFBF6, 'M', 'ئې'), + (0xFBF9, 'M', 'ئى'), + (0xFBFC, 'M', 'ی'), + (0xFC00, 'M', 'ئج'), + (0xFC01, 'M', 'ئح'), + (0xFC02, 'M', 'ئم'), + (0xFC03, 'M', 'ئى'), + (0xFC04, 'M', 'ئي'), + (0xFC05, 'M', 'بج'), + (0xFC06, 'M', 'بح'), + (0xFC07, 'M', 'بخ'), + (0xFC08, 'M', 'بم'), + (0xFC09, 'M', 'بى'), + (0xFC0A, 'M', 'بي'), + (0xFC0B, 'M', 'تج'), + (0xFC0C, 'M', 'تح'), + (0xFC0D, 'M', 'تخ'), + (0xFC0E, 'M', 'تم'), + (0xFC0F, 'M', 'تى'), + (0xFC10, 'M', 'تي'), + (0xFC11, 'M', 'ثج'), + (0xFC12, 'M', 'ثم'), + (0xFC13, 'M', 'ثى'), + (0xFC14, 'M', 'ثي'), + (0xFC15, 'M', 'جح'), + (0xFC16, 'M', 'جم'), + (0xFC17, 'M', 'حج'), + (0xFC18, 'M', 'حم'), + (0xFC19, 'M', 'خج'), + (0xFC1A, 'M', 'خح'), + (0xFC1B, 'M', 'خم'), + (0xFC1C, 'M', 'سج'), + (0xFC1D, 'M', 'سح'), + (0xFC1E, 'M', 'سخ'), + (0xFC1F, 'M', 'سم'), + (0xFC20, 'M', 'صح'), + (0xFC21, 'M', 'صم'), + (0xFC22, 'M', 'ضج'), + (0xFC23, 'M', 'ضح'), + (0xFC24, 'M', 'ضخ'), + (0xFC25, 'M', 'ضم'), + (0xFC26, 'M', 'طح'), + (0xFC27, 'M', 'طم'), + (0xFC28, 'M', 'ظم'), + (0xFC29, 'M', 'عج'), + (0xFC2A, 'M', 'عم'), + (0xFC2B, 'M', 'غج'), + (0xFC2C, 'M', 'غم'), + (0xFC2D, 'M', 'فج'), + (0xFC2E, 'M', 'فح'), + (0xFC2F, 'M', 'فخ'), + (0xFC30, 'M', 'فم'), + (0xFC31, 'M', 'فى'), + (0xFC32, 'M', 'في'), + (0xFC33, 'M', 'قح'), + (0xFC34, 'M', 'قم'), + (0xFC35, 'M', 'قى'), + (0xFC36, 'M', 'قي'), + (0xFC37, 'M', 'كا'), + (0xFC38, 'M', 'كج'), + (0xFC39, 'M', 'كح'), + (0xFC3A, 'M', 'كخ'), + (0xFC3B, 'M', 'كل'), + (0xFC3C, 'M', 'كم'), + (0xFC3D, 'M', 'كى'), + (0xFC3E, 'M', 'كي'), + (0xFC3F, 'M', 'لج'), + (0xFC40, 'M', 'لح'), + (0xFC41, 'M', 'لخ'), + (0xFC42, 'M', 'لم'), + (0xFC43, 'M', 'لى'), + (0xFC44, 'M', 'لي'), + (0xFC45, 'M', 'مج'), + (0xFC46, 'M', 'مح'), + (0xFC47, 'M', 'مخ'), + (0xFC48, 'M', 'مم'), + (0xFC49, 'M', 'مى'), + (0xFC4A, 'M', 'مي'), + (0xFC4B, 'M', 'نج'), + (0xFC4C, 'M', 'نح'), + (0xFC4D, 'M', 'نخ'), + (0xFC4E, 'M', 'نم'), + (0xFC4F, 'M', 'نى'), + (0xFC50, 'M', 'ني'), + (0xFC51, 'M', 'هج'), + (0xFC52, 'M', 'هم'), + (0xFC53, 'M', 'هى'), + (0xFC54, 'M', 'هي'), + (0xFC55, 'M', 'يج'), + (0xFC56, 'M', 'يح'), + (0xFC57, 'M', 'يخ'), + (0xFC58, 'M', 'يم'), + (0xFC59, 'M', 'يى'), + (0xFC5A, 'M', 'يي'), + ] + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC5B, 'M', 'ذٰ'), + (0xFC5C, 'M', 'رٰ'), + (0xFC5D, 'M', 'ىٰ'), + (0xFC5E, '3', ' ٌّ'), + (0xFC5F, '3', ' ٍّ'), + (0xFC60, '3', ' َّ'), + (0xFC61, '3', ' ُّ'), + (0xFC62, '3', ' ِّ'), + (0xFC63, '3', ' ّٰ'), + (0xFC64, 'M', 'ئر'), + (0xFC65, 'M', 'ئز'), + (0xFC66, 'M', 'ئم'), + (0xFC67, 'M', 'ئن'), + (0xFC68, 'M', 'ئى'), + (0xFC69, 'M', 'ئي'), + (0xFC6A, 'M', 'بر'), + (0xFC6B, 'M', 'بز'), + (0xFC6C, 'M', 'بم'), + (0xFC6D, 'M', 'بن'), + (0xFC6E, 'M', 'بى'), + (0xFC6F, 'M', 'بي'), + (0xFC70, 'M', 'تر'), + (0xFC71, 'M', 'تز'), + (0xFC72, 'M', 'تم'), + (0xFC73, 'M', 'تن'), + (0xFC74, 'M', 'تى'), + (0xFC75, 'M', 'تي'), + (0xFC76, 'M', 'ثر'), + (0xFC77, 'M', 'ثز'), + (0xFC78, 'M', 'ثم'), + (0xFC79, 'M', 'ثن'), + (0xFC7A, 'M', 'ثى'), + (0xFC7B, 'M', 'ثي'), + (0xFC7C, 'M', 'فى'), + (0xFC7D, 'M', 'في'), + (0xFC7E, 'M', 'قى'), + (0xFC7F, 'M', 'قي'), + (0xFC80, 'M', 'كا'), + (0xFC81, 'M', 'كل'), + (0xFC82, 'M', 'كم'), + (0xFC83, 'M', 'كى'), + (0xFC84, 'M', 'كي'), + (0xFC85, 'M', 'لم'), + (0xFC86, 'M', 'لى'), + (0xFC87, 'M', 'لي'), + (0xFC88, 'M', 'ما'), + (0xFC89, 'M', 'مم'), + (0xFC8A, 'M', 'نر'), + (0xFC8B, 'M', 'نز'), + (0xFC8C, 'M', 'نم'), + (0xFC8D, 'M', 'نن'), + (0xFC8E, 'M', 'نى'), + (0xFC8F, 'M', 'ني'), + (0xFC90, 'M', 'ىٰ'), + (0xFC91, 'M', 'ير'), + (0xFC92, 'M', 'يز'), + (0xFC93, 'M', 'يم'), + (0xFC94, 'M', 'ين'), + (0xFC95, 'M', 'يى'), + (0xFC96, 'M', 'يي'), + (0xFC97, 'M', 'ئج'), + (0xFC98, 'M', 'ئح'), + (0xFC99, 'M', 'ئخ'), + (0xFC9A, 'M', 'ئم'), + (0xFC9B, 'M', 'ئه'), + (0xFC9C, 'M', 'بج'), + (0xFC9D, 'M', 'بح'), + (0xFC9E, 'M', 'بخ'), + (0xFC9F, 'M', 'بم'), + (0xFCA0, 'M', 'به'), + (0xFCA1, 'M', 'تج'), + (0xFCA2, 'M', 'تح'), + (0xFCA3, 'M', 'تخ'), + (0xFCA4, 'M', 'تم'), + (0xFCA5, 'M', 'ته'), + (0xFCA6, 'M', 'ثم'), + (0xFCA7, 'M', 'جح'), + (0xFCA8, 'M', 'جم'), + (0xFCA9, 'M', 'حج'), + (0xFCAA, 'M', 'حم'), + (0xFCAB, 'M', 'خج'), + (0xFCAC, 'M', 'خم'), + (0xFCAD, 'M', 'سج'), + (0xFCAE, 'M', 'سح'), + (0xFCAF, 'M', 'سخ'), + (0xFCB0, 'M', 'سم'), + (0xFCB1, 'M', 'صح'), + (0xFCB2, 'M', 'صخ'), + (0xFCB3, 'M', 'صم'), + (0xFCB4, 'M', 'ضج'), + (0xFCB5, 'M', 'ضح'), + (0xFCB6, 'M', 'ضخ'), + (0xFCB7, 'M', 'ضم'), + (0xFCB8, 'M', 'طح'), + (0xFCB9, 'M', 'ظم'), + (0xFCBA, 'M', 'عج'), + (0xFCBB, 'M', 'عم'), + (0xFCBC, 'M', 'غج'), + (0xFCBD, 'M', 'غم'), + (0xFCBE, 'M', 'فج'), + ] + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFCBF, 'M', 'فح'), + (0xFCC0, 'M', 'فخ'), + (0xFCC1, 'M', 'فم'), + (0xFCC2, 'M', 'قح'), + (0xFCC3, 'M', 'قم'), + (0xFCC4, 'M', 'كج'), + (0xFCC5, 'M', 'كح'), + (0xFCC6, 'M', 'كخ'), + (0xFCC7, 'M', 'كل'), + (0xFCC8, 'M', 'كم'), + (0xFCC9, 'M', 'لج'), + (0xFCCA, 'M', 'لح'), + (0xFCCB, 'M', 'لخ'), + (0xFCCC, 'M', 'لم'), + (0xFCCD, 'M', 'له'), + (0xFCCE, 'M', 'مج'), + (0xFCCF, 'M', 'مح'), + (0xFCD0, 'M', 'مخ'), + (0xFCD1, 'M', 'مم'), + (0xFCD2, 'M', 'نج'), + (0xFCD3, 'M', 'نح'), + (0xFCD4, 'M', 'نخ'), + (0xFCD5, 'M', 'نم'), + (0xFCD6, 'M', 'نه'), + (0xFCD7, 'M', 'هج'), + (0xFCD8, 'M', 'هم'), + (0xFCD9, 'M', 'هٰ'), + (0xFCDA, 'M', 'يج'), + (0xFCDB, 'M', 'يح'), + (0xFCDC, 'M', 'يخ'), + (0xFCDD, 'M', 'يم'), + (0xFCDE, 'M', 'يه'), + (0xFCDF, 'M', 'ئم'), + (0xFCE0, 'M', 'ئه'), + (0xFCE1, 'M', 'بم'), + (0xFCE2, 'M', 'به'), + (0xFCE3, 'M', 'تم'), + (0xFCE4, 'M', 'ته'), + (0xFCE5, 'M', 'ثم'), + (0xFCE6, 'M', 'ثه'), + (0xFCE7, 'M', 'سم'), + (0xFCE8, 'M', 'سه'), + (0xFCE9, 'M', 'شم'), + (0xFCEA, 'M', 'شه'), + (0xFCEB, 'M', 'كل'), + (0xFCEC, 'M', 'كم'), + (0xFCED, 'M', 'لم'), + (0xFCEE, 'M', 'نم'), + (0xFCEF, 'M', 'نه'), + (0xFCF0, 'M', 'يم'), + (0xFCF1, 'M', 'يه'), + (0xFCF2, 'M', 'ـَّ'), + (0xFCF3, 'M', 'ـُّ'), + (0xFCF4, 'M', 'ـِّ'), + (0xFCF5, 'M', 'طى'), + (0xFCF6, 'M', 'طي'), + (0xFCF7, 'M', 'عى'), + (0xFCF8, 'M', 'عي'), + (0xFCF9, 'M', 'غى'), + (0xFCFA, 'M', 'غي'), + (0xFCFB, 'M', 'سى'), + (0xFCFC, 'M', 'سي'), + (0xFCFD, 'M', 'شى'), + (0xFCFE, 'M', 'شي'), + (0xFCFF, 'M', 'حى'), + (0xFD00, 'M', 'حي'), + (0xFD01, 'M', 'جى'), + (0xFD02, 'M', 'جي'), + (0xFD03, 'M', 'خى'), + (0xFD04, 'M', 'خي'), + (0xFD05, 'M', 'صى'), + (0xFD06, 'M', 'صي'), + (0xFD07, 'M', 'ضى'), + (0xFD08, 'M', 'ضي'), + (0xFD09, 'M', 'شج'), + (0xFD0A, 'M', 'شح'), + (0xFD0B, 'M', 'شخ'), + (0xFD0C, 'M', 'شم'), + (0xFD0D, 'M', 'شر'), + (0xFD0E, 'M', 'سر'), + (0xFD0F, 'M', 'صر'), + (0xFD10, 'M', 'ضر'), + (0xFD11, 'M', 'طى'), + (0xFD12, 'M', 'طي'), + (0xFD13, 'M', 'عى'), + (0xFD14, 'M', 'عي'), + (0xFD15, 'M', 'غى'), + (0xFD16, 'M', 'غي'), + (0xFD17, 'M', 'سى'), + (0xFD18, 'M', 'سي'), + (0xFD19, 'M', 'شى'), + (0xFD1A, 'M', 'شي'), + (0xFD1B, 'M', 'حى'), + (0xFD1C, 'M', 'حي'), + (0xFD1D, 'M', 'جى'), + (0xFD1E, 'M', 'جي'), + (0xFD1F, 'M', 'خى'), + (0xFD20, 'M', 'خي'), + (0xFD21, 'M', 'صى'), + (0xFD22, 'M', 'صي'), + ] + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFD23, 'M', 'ضى'), + (0xFD24, 'M', 'ضي'), + (0xFD25, 'M', 'شج'), + (0xFD26, 'M', 'شح'), + (0xFD27, 'M', 'شخ'), + (0xFD28, 'M', 'شم'), + (0xFD29, 'M', 'شر'), + (0xFD2A, 'M', 'سر'), + (0xFD2B, 'M', 'صر'), + (0xFD2C, 'M', 'ضر'), + (0xFD2D, 'M', 'شج'), + (0xFD2E, 'M', 'شح'), + (0xFD2F, 'M', 'شخ'), + (0xFD30, 'M', 'شم'), + (0xFD31, 'M', 'سه'), + (0xFD32, 'M', 'شه'), + (0xFD33, 'M', 'طم'), + (0xFD34, 'M', 'سج'), + (0xFD35, 'M', 'سح'), + (0xFD36, 'M', 'سخ'), + (0xFD37, 'M', 'شج'), + (0xFD38, 'M', 'شح'), + (0xFD39, 'M', 'شخ'), + (0xFD3A, 'M', 'طم'), + (0xFD3B, 'M', 'ظم'), + (0xFD3C, 'M', 'اً'), + (0xFD3E, 'V'), + (0xFD50, 'M', 'تجم'), + (0xFD51, 'M', 'تحج'), + (0xFD53, 'M', 'تحم'), + (0xFD54, 'M', 'تخم'), + (0xFD55, 'M', 'تمج'), + (0xFD56, 'M', 'تمح'), + (0xFD57, 'M', 'تمخ'), + (0xFD58, 'M', 'جمح'), + (0xFD5A, 'M', 'حمي'), + (0xFD5B, 'M', 'حمى'), + (0xFD5C, 'M', 'سحج'), + (0xFD5D, 'M', 'سجح'), + (0xFD5E, 'M', 'سجى'), + (0xFD5F, 'M', 'سمح'), + (0xFD61, 'M', 'سمج'), + (0xFD62, 'M', 'سمم'), + (0xFD64, 'M', 'صحح'), + (0xFD66, 'M', 'صمم'), + (0xFD67, 'M', 'شحم'), + (0xFD69, 'M', 'شجي'), + (0xFD6A, 'M', 'شمخ'), + (0xFD6C, 'M', 'شمم'), + (0xFD6E, 'M', 'ضحى'), + (0xFD6F, 'M', 'ضخم'), + (0xFD71, 'M', 'طمح'), + (0xFD73, 'M', 'طمم'), + (0xFD74, 'M', 'طمي'), + (0xFD75, 'M', 'عجم'), + (0xFD76, 'M', 'عمم'), + (0xFD78, 'M', 'عمى'), + (0xFD79, 'M', 'غمم'), + (0xFD7A, 'M', 'غمي'), + (0xFD7B, 'M', 'غمى'), + (0xFD7C, 'M', 'فخم'), + (0xFD7E, 'M', 'قمح'), + (0xFD7F, 'M', 'قمم'), + (0xFD80, 'M', 'لحم'), + (0xFD81, 'M', 'لحي'), + (0xFD82, 'M', 'لحى'), + (0xFD83, 'M', 'لجج'), + (0xFD85, 'M', 'لخم'), + (0xFD87, 'M', 'لمح'), + (0xFD89, 'M', 'محج'), + (0xFD8A, 'M', 'محم'), + (0xFD8B, 'M', 'محي'), + (0xFD8C, 'M', 'مجح'), + (0xFD8D, 'M', 'مجم'), + (0xFD8E, 'M', 'مخج'), + (0xFD8F, 'M', 'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', 'مجخ'), + (0xFD93, 'M', 'همج'), + (0xFD94, 'M', 'همم'), + (0xFD95, 'M', 'نحم'), + (0xFD96, 'M', 'نحى'), + (0xFD97, 'M', 'نجم'), + (0xFD99, 'M', 'نجى'), + (0xFD9A, 'M', 'نمي'), + (0xFD9B, 'M', 'نمى'), + (0xFD9C, 'M', 'يمم'), + (0xFD9E, 'M', 'بخي'), + (0xFD9F, 'M', 'تجي'), + (0xFDA0, 'M', 'تجى'), + (0xFDA1, 'M', 'تخي'), + (0xFDA2, 'M', 'تخى'), + (0xFDA3, 'M', 'تمي'), + (0xFDA4, 'M', 'تمى'), + (0xFDA5, 'M', 'جمي'), + (0xFDA6, 'M', 'جحى'), + (0xFDA7, 'M', 'جمى'), + (0xFDA8, 'M', 'سخى'), + (0xFDA9, 'M', 'صحي'), + (0xFDAA, 'M', 'شحي'), + ] + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFDAB, 'M', 'ضحي'), + (0xFDAC, 'M', 'لجي'), + (0xFDAD, 'M', 'لمي'), + (0xFDAE, 'M', 'يحي'), + (0xFDAF, 'M', 'يجي'), + (0xFDB0, 'M', 'يمي'), + (0xFDB1, 'M', 'ممي'), + (0xFDB2, 'M', 'قمي'), + (0xFDB3, 'M', 'نحي'), + (0xFDB4, 'M', 'قمح'), + (0xFDB5, 'M', 'لحم'), + (0xFDB6, 'M', 'عمي'), + (0xFDB7, 'M', 'كمي'), + (0xFDB8, 'M', 'نجح'), + (0xFDB9, 'M', 'مخي'), + (0xFDBA, 'M', 'لجم'), + (0xFDBB, 'M', 'كمم'), + (0xFDBC, 'M', 'لجم'), + (0xFDBD, 'M', 'نجح'), + (0xFDBE, 'M', 'جحي'), + (0xFDBF, 'M', 'حجي'), + (0xFDC0, 'M', 'مجي'), + (0xFDC1, 'M', 'فمي'), + (0xFDC2, 'M', 'بحي'), + (0xFDC3, 'M', 'كمم'), + (0xFDC4, 'M', 'عجم'), + (0xFDC5, 'M', 'صمم'), + (0xFDC6, 'M', 'سخي'), + (0xFDC7, 'M', 'نجي'), + (0xFDC8, 'X'), + (0xFDCF, 'V'), + (0xFDD0, 'X'), + (0xFDF0, 'M', 'صلے'), + (0xFDF1, 'M', 'قلے'), + (0xFDF2, 'M', 'الله'), + (0xFDF3, 'M', 'اكبر'), + (0xFDF4, 'M', 'محمد'), + (0xFDF5, 'M', 'صلعم'), + (0xFDF6, 'M', 'رسول'), + (0xFDF7, 'M', 'عليه'), + (0xFDF8, 'M', 'وسلم'), + (0xFDF9, 'M', 'صلى'), + (0xFDFA, '3', 'صلى الله عليه وسلم'), + (0xFDFB, '3', 'جل جلاله'), + (0xFDFC, 'M', 'ریال'), + (0xFDFD, 'V'), + (0xFE00, 'I'), + (0xFE10, '3', ','), + (0xFE11, 'M', '、'), + (0xFE12, 'X'), + (0xFE13, '3', ':'), + (0xFE14, '3', ';'), + (0xFE15, '3', '!'), + (0xFE16, '3', '?'), + (0xFE17, 'M', '〖'), + (0xFE18, 'M', '〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE30, 'X'), + (0xFE31, 'M', '—'), + (0xFE32, 'M', '–'), + (0xFE33, '3', '_'), + (0xFE35, '3', '('), + (0xFE36, '3', ')'), + (0xFE37, '3', '{'), + (0xFE38, '3', '}'), + (0xFE39, 'M', '〔'), + (0xFE3A, 'M', '〕'), + (0xFE3B, 'M', '【'), + (0xFE3C, 'M', '】'), + (0xFE3D, 'M', '《'), + (0xFE3E, 'M', '》'), + (0xFE3F, 'M', '〈'), + (0xFE40, 'M', '〉'), + (0xFE41, 'M', '「'), + (0xFE42, 'M', '」'), + (0xFE43, 'M', '『'), + (0xFE44, 'M', '』'), + (0xFE45, 'V'), + (0xFE47, '3', '['), + (0xFE48, '3', ']'), + (0xFE49, '3', ' ̅'), + (0xFE4D, '3', '_'), + (0xFE50, '3', ','), + (0xFE51, 'M', '、'), + (0xFE52, 'X'), + (0xFE54, '3', ';'), + (0xFE55, '3', ':'), + (0xFE56, '3', '?'), + (0xFE57, '3', '!'), + (0xFE58, 'M', '—'), + (0xFE59, '3', '('), + (0xFE5A, '3', ')'), + (0xFE5B, '3', '{'), + (0xFE5C, '3', '}'), + (0xFE5D, 'M', '〔'), + (0xFE5E, 'M', '〕'), + (0xFE5F, '3', '#'), + (0xFE60, '3', '&'), + (0xFE61, '3', '*'), + ] + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE62, '3', '+'), + (0xFE63, 'M', '-'), + (0xFE64, '3', '<'), + (0xFE65, '3', '>'), + (0xFE66, '3', '='), + (0xFE67, 'X'), + (0xFE68, '3', '\\'), + (0xFE69, '3', '$'), + (0xFE6A, '3', '%'), + (0xFE6B, '3', '@'), + (0xFE6C, 'X'), + (0xFE70, '3', ' ً'), + (0xFE71, 'M', 'ـً'), + (0xFE72, '3', ' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', ' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', ' َ'), + (0xFE77, 'M', 'ـَ'), + (0xFE78, '3', ' ُ'), + (0xFE79, 'M', 'ـُ'), + (0xFE7A, '3', ' ِ'), + (0xFE7B, 'M', 'ـِ'), + (0xFE7C, '3', ' ّ'), + (0xFE7D, 'M', 'ـّ'), + (0xFE7E, '3', ' ْ'), + (0xFE7F, 'M', 'ـْ'), + (0xFE80, 'M', 'ء'), + (0xFE81, 'M', 'آ'), + (0xFE83, 'M', 'أ'), + (0xFE85, 'M', 'ؤ'), + (0xFE87, 'M', 'إ'), + (0xFE89, 'M', 'ئ'), + (0xFE8D, 'M', 'ا'), + (0xFE8F, 'M', 'ب'), + (0xFE93, 'M', 'ة'), + (0xFE95, 'M', 'ت'), + (0xFE99, 'M', 'ث'), + (0xFE9D, 'M', 'ج'), + (0xFEA1, 'M', 'ح'), + (0xFEA5, 'M', 'خ'), + (0xFEA9, 'M', 'د'), + (0xFEAB, 'M', 'ذ'), + (0xFEAD, 'M', 'ر'), + (0xFEAF, 'M', 'ز'), + (0xFEB1, 'M', 'س'), + (0xFEB5, 'M', 'ش'), + (0xFEB9, 'M', 'ص'), + (0xFEBD, 'M', 'ض'), + (0xFEC1, 'M', 'ط'), + (0xFEC5, 'M', 'ظ'), + (0xFEC9, 'M', 'ع'), + (0xFECD, 'M', 'غ'), + (0xFED1, 'M', 'ف'), + (0xFED5, 'M', 'ق'), + (0xFED9, 'M', 'ك'), + (0xFEDD, 'M', 'ل'), + (0xFEE1, 'M', 'م'), + (0xFEE5, 'M', 'ن'), + (0xFEE9, 'M', 'ه'), + (0xFEED, 'M', 'و'), + (0xFEEF, 'M', 'ى'), + (0xFEF1, 'M', 'ي'), + (0xFEF5, 'M', 'لآ'), + (0xFEF7, 'M', 'لأ'), + (0xFEF9, 'M', 'لإ'), + (0xFEFB, 'M', 'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', '!'), + (0xFF02, '3', '"'), + (0xFF03, '3', '#'), + (0xFF04, '3', '$'), + (0xFF05, '3', '%'), + (0xFF06, '3', '&'), + (0xFF07, '3', '\''), + (0xFF08, '3', '('), + (0xFF09, '3', ')'), + (0xFF0A, '3', '*'), + (0xFF0B, '3', '+'), + (0xFF0C, '3', ','), + (0xFF0D, 'M', '-'), + (0xFF0E, 'M', '.'), + (0xFF0F, '3', '/'), + (0xFF10, 'M', '0'), + (0xFF11, 'M', '1'), + (0xFF12, 'M', '2'), + (0xFF13, 'M', '3'), + (0xFF14, 'M', '4'), + (0xFF15, 'M', '5'), + (0xFF16, 'M', '6'), + (0xFF17, 'M', '7'), + (0xFF18, 'M', '8'), + (0xFF19, 'M', '9'), + (0xFF1A, '3', ':'), + (0xFF1B, '3', ';'), + (0xFF1C, '3', '<'), + (0xFF1D, '3', '='), + (0xFF1E, '3', '>'), + ] + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF1F, '3', '?'), + (0xFF20, '3', '@'), + (0xFF21, 'M', 'a'), + (0xFF22, 'M', 'b'), + (0xFF23, 'M', 'c'), + (0xFF24, 'M', 'd'), + (0xFF25, 'M', 'e'), + (0xFF26, 'M', 'f'), + (0xFF27, 'M', 'g'), + (0xFF28, 'M', 'h'), + (0xFF29, 'M', 'i'), + (0xFF2A, 'M', 'j'), + (0xFF2B, 'M', 'k'), + (0xFF2C, 'M', 'l'), + (0xFF2D, 'M', 'm'), + (0xFF2E, 'M', 'n'), + (0xFF2F, 'M', 'o'), + (0xFF30, 'M', 'p'), + (0xFF31, 'M', 'q'), + (0xFF32, 'M', 'r'), + (0xFF33, 'M', 's'), + (0xFF34, 'M', 't'), + (0xFF35, 'M', 'u'), + (0xFF36, 'M', 'v'), + (0xFF37, 'M', 'w'), + (0xFF38, 'M', 'x'), + (0xFF39, 'M', 'y'), + (0xFF3A, 'M', 'z'), + (0xFF3B, '3', '['), + (0xFF3C, '3', '\\'), + (0xFF3D, '3', ']'), + (0xFF3E, '3', '^'), + (0xFF3F, '3', '_'), + (0xFF40, '3', '`'), + (0xFF41, 'M', 'a'), + (0xFF42, 'M', 'b'), + (0xFF43, 'M', 'c'), + (0xFF44, 'M', 'd'), + (0xFF45, 'M', 'e'), + (0xFF46, 'M', 'f'), + (0xFF47, 'M', 'g'), + (0xFF48, 'M', 'h'), + (0xFF49, 'M', 'i'), + (0xFF4A, 'M', 'j'), + (0xFF4B, 'M', 'k'), + (0xFF4C, 'M', 'l'), + (0xFF4D, 'M', 'm'), + (0xFF4E, 'M', 'n'), + (0xFF4F, 'M', 'o'), + (0xFF50, 'M', 'p'), + (0xFF51, 'M', 'q'), + (0xFF52, 'M', 'r'), + (0xFF53, 'M', 's'), + (0xFF54, 'M', 't'), + (0xFF55, 'M', 'u'), + (0xFF56, 'M', 'v'), + (0xFF57, 'M', 'w'), + (0xFF58, 'M', 'x'), + (0xFF59, 'M', 'y'), + (0xFF5A, 'M', 'z'), + (0xFF5B, '3', '{'), + (0xFF5C, '3', '|'), + (0xFF5D, '3', '}'), + (0xFF5E, '3', '~'), + (0xFF5F, 'M', '⦅'), + (0xFF60, 'M', '⦆'), + (0xFF61, 'M', '.'), + (0xFF62, 'M', '「'), + (0xFF63, 'M', '」'), + (0xFF64, 'M', '、'), + (0xFF65, 'M', '・'), + (0xFF66, 'M', 'ヲ'), + (0xFF67, 'M', 'ァ'), + (0xFF68, 'M', 'ィ'), + (0xFF69, 'M', 'ゥ'), + (0xFF6A, 'M', 'ェ'), + (0xFF6B, 'M', 'ォ'), + (0xFF6C, 'M', 'ャ'), + (0xFF6D, 'M', 'ュ'), + (0xFF6E, 'M', 'ョ'), + (0xFF6F, 'M', 'ッ'), + (0xFF70, 'M', 'ー'), + (0xFF71, 'M', 'ア'), + (0xFF72, 'M', 'イ'), + (0xFF73, 'M', 'ウ'), + (0xFF74, 'M', 'エ'), + (0xFF75, 'M', 'オ'), + (0xFF76, 'M', 'カ'), + (0xFF77, 'M', 'キ'), + (0xFF78, 'M', 'ク'), + (0xFF79, 'M', 'ケ'), + (0xFF7A, 'M', 'コ'), + (0xFF7B, 'M', 'サ'), + (0xFF7C, 'M', 'シ'), + (0xFF7D, 'M', 'ス'), + (0xFF7E, 'M', 'セ'), + (0xFF7F, 'M', 'ソ'), + (0xFF80, 'M', 'タ'), + (0xFF81, 'M', 'チ'), + (0xFF82, 'M', 'ツ'), + ] + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF83, 'M', 'テ'), + (0xFF84, 'M', 'ト'), + (0xFF85, 'M', 'ナ'), + (0xFF86, 'M', 'ニ'), + (0xFF87, 'M', 'ヌ'), + (0xFF88, 'M', 'ネ'), + (0xFF89, 'M', 'ノ'), + (0xFF8A, 'M', 'ハ'), + (0xFF8B, 'M', 'ヒ'), + (0xFF8C, 'M', 'フ'), + (0xFF8D, 'M', 'ヘ'), + (0xFF8E, 'M', 'ホ'), + (0xFF8F, 'M', 'マ'), + (0xFF90, 'M', 'ミ'), + (0xFF91, 'M', 'ム'), + (0xFF92, 'M', 'メ'), + (0xFF93, 'M', 'モ'), + (0xFF94, 'M', 'ヤ'), + (0xFF95, 'M', 'ユ'), + (0xFF96, 'M', 'ヨ'), + (0xFF97, 'M', 'ラ'), + (0xFF98, 'M', 'リ'), + (0xFF99, 'M', 'ル'), + (0xFF9A, 'M', 'レ'), + (0xFF9B, 'M', 'ロ'), + (0xFF9C, 'M', 'ワ'), + (0xFF9D, 'M', 'ン'), + (0xFF9E, 'M', '゙'), + (0xFF9F, 'M', '゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', 'ᄀ'), + (0xFFA2, 'M', 'ᄁ'), + (0xFFA3, 'M', 'ᆪ'), + (0xFFA4, 'M', 'ᄂ'), + (0xFFA5, 'M', 'ᆬ'), + (0xFFA6, 'M', 'ᆭ'), + (0xFFA7, 'M', 'ᄃ'), + (0xFFA8, 'M', 'ᄄ'), + (0xFFA9, 'M', 'ᄅ'), + (0xFFAA, 'M', 'ᆰ'), + (0xFFAB, 'M', 'ᆱ'), + (0xFFAC, 'M', 'ᆲ'), + (0xFFAD, 'M', 'ᆳ'), + (0xFFAE, 'M', 'ᆴ'), + (0xFFAF, 'M', 'ᆵ'), + (0xFFB0, 'M', 'ᄚ'), + (0xFFB1, 'M', 'ᄆ'), + (0xFFB2, 'M', 'ᄇ'), + (0xFFB3, 'M', 'ᄈ'), + (0xFFB4, 'M', 'ᄡ'), + (0xFFB5, 'M', 'ᄉ'), + (0xFFB6, 'M', 'ᄊ'), + (0xFFB7, 'M', 'ᄋ'), + (0xFFB8, 'M', 'ᄌ'), + (0xFFB9, 'M', 'ᄍ'), + (0xFFBA, 'M', 'ᄎ'), + (0xFFBB, 'M', 'ᄏ'), + (0xFFBC, 'M', 'ᄐ'), + (0xFFBD, 'M', 'ᄑ'), + (0xFFBE, 'M', 'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', 'ᅡ'), + (0xFFC3, 'M', 'ᅢ'), + (0xFFC4, 'M', 'ᅣ'), + (0xFFC5, 'M', 'ᅤ'), + (0xFFC6, 'M', 'ᅥ'), + (0xFFC7, 'M', 'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', 'ᅧ'), + (0xFFCB, 'M', 'ᅨ'), + (0xFFCC, 'M', 'ᅩ'), + (0xFFCD, 'M', 'ᅪ'), + (0xFFCE, 'M', 'ᅫ'), + (0xFFCF, 'M', 'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', 'ᅭ'), + (0xFFD3, 'M', 'ᅮ'), + (0xFFD4, 'M', 'ᅯ'), + (0xFFD5, 'M', 'ᅰ'), + (0xFFD6, 'M', 'ᅱ'), + (0xFFD7, 'M', 'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', 'ᅳ'), + (0xFFDB, 'M', 'ᅴ'), + (0xFFDC, 'M', 'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', '¢'), + (0xFFE1, 'M', '£'), + (0xFFE2, 'M', '¬'), + (0xFFE3, '3', ' ̄'), + (0xFFE4, 'M', '¦'), + (0xFFE5, 'M', '¥'), + (0xFFE6, 'M', '₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', '│'), + (0xFFE9, 'M', '←'), + (0xFFEA, 'M', '↑'), + (0xFFEB, 'M', '→'), + (0xFFEC, 'M', '↓'), + (0xFFED, 'M', '■'), + ] + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFEE, 'M', '○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018F, 'X'), + (0x10190, 'V'), + (0x1019D, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), + (0x10300, 'V'), + (0x10324, 'X'), + (0x1032D, 'V'), + (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', '𐐨'), + (0x10401, 'M', '𐐩'), + (0x10402, 'M', '𐐪'), + (0x10403, 'M', '𐐫'), + (0x10404, 'M', '𐐬'), + (0x10405, 'M', '𐐭'), + (0x10406, 'M', '𐐮'), + (0x10407, 'M', '𐐯'), + (0x10408, 'M', '𐐰'), + (0x10409, 'M', '𐐱'), + (0x1040A, 'M', '𐐲'), + (0x1040B, 'M', '𐐳'), + (0x1040C, 'M', '𐐴'), + (0x1040D, 'M', '𐐵'), + (0x1040E, 'M', '𐐶'), + (0x1040F, 'M', '𐐷'), + (0x10410, 'M', '𐐸'), + (0x10411, 'M', '𐐹'), + (0x10412, 'M', '𐐺'), + (0x10413, 'M', '𐐻'), + (0x10414, 'M', '𐐼'), + (0x10415, 'M', '𐐽'), + (0x10416, 'M', '𐐾'), + (0x10417, 'M', '𐐿'), + (0x10418, 'M', '𐑀'), + (0x10419, 'M', '𐑁'), + (0x1041A, 'M', '𐑂'), + (0x1041B, 'M', '𐑃'), + (0x1041C, 'M', '𐑄'), + (0x1041D, 'M', '𐑅'), + (0x1041E, 'M', '𐑆'), + (0x1041F, 'M', '𐑇'), + (0x10420, 'M', '𐑈'), + (0x10421, 'M', '𐑉'), + (0x10422, 'M', '𐑊'), + (0x10423, 'M', '𐑋'), + (0x10424, 'M', '𐑌'), + (0x10425, 'M', '𐑍'), + (0x10426, 'M', '𐑎'), + (0x10427, 'M', '𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x104B0, 'M', '𐓘'), + (0x104B1, 'M', '𐓙'), + (0x104B2, 'M', '𐓚'), + (0x104B3, 'M', '𐓛'), + (0x104B4, 'M', '𐓜'), + (0x104B5, 'M', '𐓝'), + (0x104B6, 'M', '𐓞'), + (0x104B7, 'M', '𐓟'), + (0x104B8, 'M', '𐓠'), + (0x104B9, 'M', '𐓡'), + ] + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x104BA, 'M', '𐓢'), + (0x104BB, 'M', '𐓣'), + (0x104BC, 'M', '𐓤'), + (0x104BD, 'M', '𐓥'), + (0x104BE, 'M', '𐓦'), + (0x104BF, 'M', '𐓧'), + (0x104C0, 'M', '𐓨'), + (0x104C1, 'M', '𐓩'), + (0x104C2, 'M', '𐓪'), + (0x104C3, 'M', '𐓫'), + (0x104C4, 'M', '𐓬'), + (0x104C5, 'M', '𐓭'), + (0x104C6, 'M', '𐓮'), + (0x104C7, 'M', '𐓯'), + (0x104C8, 'M', '𐓰'), + (0x104C9, 'M', '𐓱'), + (0x104CA, 'M', '𐓲'), + (0x104CB, 'M', '𐓳'), + (0x104CC, 'M', '𐓴'), + (0x104CD, 'M', '𐓵'), + (0x104CE, 'M', '𐓶'), + (0x104CF, 'M', '𐓷'), + (0x104D0, 'M', '𐓸'), + (0x104D1, 'M', '𐓹'), + (0x104D2, 'M', '𐓺'), + (0x104D3, 'M', '𐓻'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'M', '𐖗'), + (0x10571, 'M', '𐖘'), + (0x10572, 'M', '𐖙'), + (0x10573, 'M', '𐖚'), + (0x10574, 'M', '𐖛'), + (0x10575, 'M', '𐖜'), + (0x10576, 'M', '𐖝'), + (0x10577, 'M', '𐖞'), + (0x10578, 'M', '𐖟'), + (0x10579, 'M', '𐖠'), + (0x1057A, 'M', '𐖡'), + (0x1057B, 'X'), + (0x1057C, 'M', '𐖣'), + (0x1057D, 'M', '𐖤'), + (0x1057E, 'M', '𐖥'), + (0x1057F, 'M', '𐖦'), + (0x10580, 'M', '𐖧'), + (0x10581, 'M', '𐖨'), + (0x10582, 'M', '𐖩'), + (0x10583, 'M', '𐖪'), + (0x10584, 'M', '𐖫'), + (0x10585, 'M', '𐖬'), + (0x10586, 'M', '𐖭'), + (0x10587, 'M', '𐖮'), + (0x10588, 'M', '𐖯'), + (0x10589, 'M', '𐖰'), + (0x1058A, 'M', '𐖱'), + (0x1058B, 'X'), + (0x1058C, 'M', '𐖳'), + (0x1058D, 'M', '𐖴'), + (0x1058E, 'M', '𐖵'), + (0x1058F, 'M', '𐖶'), + (0x10590, 'M', '𐖷'), + (0x10591, 'M', '𐖸'), + (0x10592, 'M', '𐖹'), + (0x10593, 'X'), + (0x10594, 'M', '𐖻'), + (0x10595, 'M', '𐖼'), + (0x10596, 'X'), + (0x10597, 'V'), + (0x105A2, 'X'), + (0x105A3, 'V'), + (0x105B2, 'X'), + (0x105B3, 'V'), + (0x105BA, 'X'), + (0x105BB, 'V'), + (0x105BD, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), + (0x10780, 'V'), + (0x10781, 'M', 'ː'), + (0x10782, 'M', 'ˑ'), + (0x10783, 'M', 'æ'), + (0x10784, 'M', 'ʙ'), + (0x10785, 'M', 'ɓ'), + (0x10786, 'X'), + (0x10787, 'M', 'ʣ'), + (0x10788, 'M', 'ꭦ'), + (0x10789, 'M', 'ʥ'), + (0x1078A, 'M', 'ʤ'), + (0x1078B, 'M', 'ɖ'), + (0x1078C, 'M', 'ɗ'), + ] + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1078D, 'M', 'ᶑ'), + (0x1078E, 'M', 'ɘ'), + (0x1078F, 'M', 'ɞ'), + (0x10790, 'M', 'ʩ'), + (0x10791, 'M', 'ɤ'), + (0x10792, 'M', 'ɢ'), + (0x10793, 'M', 'ɠ'), + (0x10794, 'M', 'ʛ'), + (0x10795, 'M', 'ħ'), + (0x10796, 'M', 'ʜ'), + (0x10797, 'M', 'ɧ'), + (0x10798, 'M', 'ʄ'), + (0x10799, 'M', 'ʪ'), + (0x1079A, 'M', 'ʫ'), + (0x1079B, 'M', 'ɬ'), + (0x1079C, 'M', '𝼄'), + (0x1079D, 'M', 'ꞎ'), + (0x1079E, 'M', 'ɮ'), + (0x1079F, 'M', '𝼅'), + (0x107A0, 'M', 'ʎ'), + (0x107A1, 'M', '𝼆'), + (0x107A2, 'M', 'ø'), + (0x107A3, 'M', 'ɶ'), + (0x107A4, 'M', 'ɷ'), + (0x107A5, 'M', 'q'), + (0x107A6, 'M', 'ɺ'), + (0x107A7, 'M', '𝼈'), + (0x107A8, 'M', 'ɽ'), + (0x107A9, 'M', 'ɾ'), + (0x107AA, 'M', 'ʀ'), + (0x107AB, 'M', 'ʨ'), + (0x107AC, 'M', 'ʦ'), + (0x107AD, 'M', 'ꭧ'), + (0x107AE, 'M', 'ʧ'), + (0x107AF, 'M', 'ʈ'), + (0x107B0, 'M', 'ⱱ'), + (0x107B1, 'X'), + (0x107B2, 'M', 'ʏ'), + (0x107B3, 'M', 'ʡ'), + (0x107B4, 'M', 'ʢ'), + (0x107B5, 'M', 'ʘ'), + (0x107B6, 'M', 'ǀ'), + (0x107B7, 'M', 'ǁ'), + (0x107B8, 'M', 'ǂ'), + (0x107B9, 'M', '𝼊'), + (0x107BA, 'M', '𝼞'), + (0x107BB, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A36, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A49, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), + (0x10B00, 'V'), + ] + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10C80, 'M', '𐳀'), + (0x10C81, 'M', '𐳁'), + (0x10C82, 'M', '𐳂'), + (0x10C83, 'M', '𐳃'), + (0x10C84, 'M', '𐳄'), + (0x10C85, 'M', '𐳅'), + (0x10C86, 'M', '𐳆'), + (0x10C87, 'M', '𐳇'), + (0x10C88, 'M', '𐳈'), + (0x10C89, 'M', '𐳉'), + (0x10C8A, 'M', '𐳊'), + (0x10C8B, 'M', '𐳋'), + (0x10C8C, 'M', '𐳌'), + (0x10C8D, 'M', '𐳍'), + (0x10C8E, 'M', '𐳎'), + (0x10C8F, 'M', '𐳏'), + (0x10C90, 'M', '𐳐'), + (0x10C91, 'M', '𐳑'), + (0x10C92, 'M', '𐳒'), + (0x10C93, 'M', '𐳓'), + (0x10C94, 'M', '𐳔'), + (0x10C95, 'M', '𐳕'), + (0x10C96, 'M', '𐳖'), + (0x10C97, 'M', '𐳗'), + (0x10C98, 'M', '𐳘'), + (0x10C99, 'M', '𐳙'), + (0x10C9A, 'M', '𐳚'), + (0x10C9B, 'M', '𐳛'), + (0x10C9C, 'M', '𐳜'), + (0x10C9D, 'M', '𐳝'), + (0x10C9E, 'M', '𐳞'), + (0x10C9F, 'M', '𐳟'), + (0x10CA0, 'M', '𐳠'), + (0x10CA1, 'M', '𐳡'), + (0x10CA2, 'M', '𐳢'), + (0x10CA3, 'M', '𐳣'), + (0x10CA4, 'M', '𐳤'), + (0x10CA5, 'M', '𐳥'), + (0x10CA6, 'M', '𐳦'), + (0x10CA7, 'M', '𐳧'), + (0x10CA8, 'M', '𐳨'), + (0x10CA9, 'M', '𐳩'), + (0x10CAA, 'M', '𐳪'), + (0x10CAB, 'M', '𐳫'), + (0x10CAC, 'M', '𐳬'), + (0x10CAD, 'M', '𐳭'), + (0x10CAE, 'M', '𐳮'), + (0x10CAF, 'M', '𐳯'), + (0x10CB0, 'M', '𐳰'), + (0x10CB1, 'M', '𐳱'), + (0x10CB2, 'M', '𐳲'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D28, 'X'), + (0x10D30, 'V'), + (0x10D3A, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), + (0x10EFD, 'V'), + (0x10F28, 'X'), + (0x10F30, 'V'), + (0x10F5A, 'X'), + (0x10F70, 'V'), + (0x10F8A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), + (0x10FE0, 'V'), + (0x10FF7, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11076, 'X'), + (0x1107F, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + (0x110C3, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + ] + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11148, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), + (0x11180, 'V'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + (0x11213, 'V'), + (0x11242, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133B, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + (0x11400, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + (0x11462, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), + (0x11680, 'V'), + (0x116BA, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171B, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11747, 'X'), + (0x11800, 'V'), + (0x1183C, 'X'), + (0x118A0, 'M', '𑣀'), + (0x118A1, 'M', '𑣁'), + (0x118A2, 'M', '𑣂'), + (0x118A3, 'M', '𑣃'), + (0x118A4, 'M', '𑣄'), + (0x118A5, 'M', '𑣅'), + (0x118A6, 'M', '𑣆'), + (0x118A7, 'M', '𑣇'), + (0x118A8, 'M', '𑣈'), + (0x118A9, 'M', '𑣉'), + (0x118AA, 'M', '𑣊'), + ] + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118AB, 'M', '𑣋'), + (0x118AC, 'M', '𑣌'), + (0x118AD, 'M', '𑣍'), + (0x118AE, 'M', '𑣎'), + (0x118AF, 'M', '𑣏'), + (0x118B0, 'M', '𑣐'), + (0x118B1, 'M', '𑣑'), + (0x118B2, 'M', '𑣒'), + (0x118B3, 'M', '𑣓'), + (0x118B4, 'M', '𑣔'), + (0x118B5, 'M', '𑣕'), + (0x118B6, 'M', '𑣖'), + (0x118B7, 'M', '𑣗'), + (0x118B8, 'M', '𑣘'), + (0x118B9, 'M', '𑣙'), + (0x118BA, 'M', '𑣚'), + (0x118BB, 'M', '𑣛'), + (0x118BC, 'M', '𑣜'), + (0x118BD, 'M', '𑣝'), + (0x118BE, 'M', '𑣞'), + (0x118BF, 'M', '𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), + (0x119A0, 'V'), + (0x119A8, 'X'), + (0x119AA, 'V'), + (0x119D8, 'X'), + (0x119DA, 'V'), + (0x119E5, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11AA3, 'X'), + (0x11AB0, 'V'), + (0x11AF9, 'X'), + (0x11B00, 'V'), + (0x11B0A, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), + (0x11D60, 'V'), + (0x11D66, 'X'), + (0x11D67, 'V'), + (0x11D69, 'X'), + (0x11D6A, 'V'), + (0x11D8F, 'X'), + (0x11D90, 'V'), + (0x11D92, 'X'), + (0x11D93, 'V'), + (0x11D99, 'X'), + (0x11DA0, 'V'), + (0x11DAA, 'X'), + (0x11EE0, 'V'), + (0x11EF9, 'X'), + (0x11F00, 'V'), + (0x11F11, 'X'), + (0x11F12, 'V'), + (0x11F3B, 'X'), + (0x11F3E, 'V'), + ] + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11F5A, 'X'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), + (0x11FF2, 'X'), + (0x11FFF, 'V'), + (0x1239A, 'X'), + (0x12400, 'V'), + (0x1246F, 'X'), + (0x12470, 'V'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), + (0x12F90, 'V'), + (0x12FF3, 'X'), + (0x13000, 'V'), + (0x13430, 'X'), + (0x13440, 'V'), + (0x13456, 'X'), + (0x14400, 'V'), + (0x14647, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16ABF, 'X'), + (0x16AC0, 'V'), + (0x16ACA, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), + (0x16E40, 'M', '𖹠'), + (0x16E41, 'M', '𖹡'), + (0x16E42, 'M', '𖹢'), + (0x16E43, 'M', '𖹣'), + (0x16E44, 'M', '𖹤'), + (0x16E45, 'M', '𖹥'), + (0x16E46, 'M', '𖹦'), + (0x16E47, 'M', '𖹧'), + (0x16E48, 'M', '𖹨'), + (0x16E49, 'M', '𖹩'), + (0x16E4A, 'M', '𖹪'), + (0x16E4B, 'M', '𖹫'), + (0x16E4C, 'M', '𖹬'), + (0x16E4D, 'M', '𖹭'), + (0x16E4E, 'M', '𖹮'), + (0x16E4F, 'M', '𖹯'), + (0x16E50, 'M', '𖹰'), + (0x16E51, 'M', '𖹱'), + (0x16E52, 'M', '𖹲'), + (0x16E53, 'M', '𖹳'), + (0x16E54, 'M', '𖹴'), + (0x16E55, 'M', '𖹵'), + (0x16E56, 'M', '𖹶'), + (0x16E57, 'M', '𖹷'), + (0x16E58, 'M', '𖹸'), + (0x16E59, 'M', '𖹹'), + (0x16E5A, 'M', '𖹺'), + (0x16E5B, 'M', '𖹻'), + (0x16E5C, 'M', '𖹼'), + (0x16E5D, 'M', '𖹽'), + (0x16E5E, 'M', '𖹾'), + (0x16E5F, 'M', '𖹿'), + (0x16E60, 'V'), + (0x16E9B, 'X'), + (0x16F00, 'V'), + (0x16F4B, 'X'), + (0x16F4F, 'V'), + (0x16F88, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), + (0x17000, 'V'), + (0x187F8, 'X'), + (0x18800, 'V'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), + (0x1AFF0, 'V'), + (0x1AFF4, 'X'), + (0x1AFF5, 'V'), + (0x1AFFC, 'X'), + (0x1AFFD, 'V'), + ] + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1AFFF, 'X'), + (0x1B000, 'V'), + (0x1B123, 'X'), + (0x1B132, 'V'), + (0x1B133, 'X'), + (0x1B150, 'V'), + (0x1B153, 'X'), + (0x1B155, 'V'), + (0x1B156, 'X'), + (0x1B164, 'V'), + (0x1B168, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), + (0x1CF00, 'V'), + (0x1CF2E, 'X'), + (0x1CF30, 'V'), + (0x1CF47, 'X'), + (0x1CF50, 'V'), + (0x1CFC4, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', '𝅗𝅥'), + (0x1D15F, 'M', '𝅘𝅥'), + (0x1D160, 'M', '𝅘𝅥𝅮'), + (0x1D161, 'M', '𝅘𝅥𝅯'), + (0x1D162, 'M', '𝅘𝅥𝅰'), + (0x1D163, 'M', '𝅘𝅥𝅱'), + (0x1D164, 'M', '𝅘𝅥𝅲'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', '𝆹𝅥'), + (0x1D1BC, 'M', '𝆺𝅥'), + (0x1D1BD, 'M', '𝆹𝅥𝅮'), + (0x1D1BE, 'M', '𝆺𝅥𝅮'), + (0x1D1BF, 'M', '𝆹𝅥𝅯'), + (0x1D1C0, 'M', '𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1EB, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D2C0, 'V'), + (0x1D2D4, 'X'), + (0x1D2E0, 'V'), + (0x1D2F4, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D379, 'X'), + (0x1D400, 'M', 'a'), + (0x1D401, 'M', 'b'), + (0x1D402, 'M', 'c'), + (0x1D403, 'M', 'd'), + (0x1D404, 'M', 'e'), + (0x1D405, 'M', 'f'), + (0x1D406, 'M', 'g'), + (0x1D407, 'M', 'h'), + (0x1D408, 'M', 'i'), + (0x1D409, 'M', 'j'), + (0x1D40A, 'M', 'k'), + (0x1D40B, 'M', 'l'), + (0x1D40C, 'M', 'm'), + (0x1D40D, 'M', 'n'), + (0x1D40E, 'M', 'o'), + (0x1D40F, 'M', 'p'), + (0x1D410, 'M', 'q'), + (0x1D411, 'M', 'r'), + (0x1D412, 'M', 's'), + (0x1D413, 'M', 't'), + (0x1D414, 'M', 'u'), + (0x1D415, 'M', 'v'), + (0x1D416, 'M', 'w'), + (0x1D417, 'M', 'x'), + (0x1D418, 'M', 'y'), + (0x1D419, 'M', 'z'), + (0x1D41A, 'M', 'a'), + (0x1D41B, 'M', 'b'), + (0x1D41C, 'M', 'c'), + (0x1D41D, 'M', 'd'), + (0x1D41E, 'M', 'e'), + (0x1D41F, 'M', 'f'), + (0x1D420, 'M', 'g'), + (0x1D421, 'M', 'h'), + (0x1D422, 'M', 'i'), + (0x1D423, 'M', 'j'), + (0x1D424, 'M', 'k'), + ] + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D425, 'M', 'l'), + (0x1D426, 'M', 'm'), + (0x1D427, 'M', 'n'), + (0x1D428, 'M', 'o'), + (0x1D429, 'M', 'p'), + (0x1D42A, 'M', 'q'), + (0x1D42B, 'M', 'r'), + (0x1D42C, 'M', 's'), + (0x1D42D, 'M', 't'), + (0x1D42E, 'M', 'u'), + (0x1D42F, 'M', 'v'), + (0x1D430, 'M', 'w'), + (0x1D431, 'M', 'x'), + (0x1D432, 'M', 'y'), + (0x1D433, 'M', 'z'), + (0x1D434, 'M', 'a'), + (0x1D435, 'M', 'b'), + (0x1D436, 'M', 'c'), + (0x1D437, 'M', 'd'), + (0x1D438, 'M', 'e'), + (0x1D439, 'M', 'f'), + (0x1D43A, 'M', 'g'), + (0x1D43B, 'M', 'h'), + (0x1D43C, 'M', 'i'), + (0x1D43D, 'M', 'j'), + (0x1D43E, 'M', 'k'), + (0x1D43F, 'M', 'l'), + (0x1D440, 'M', 'm'), + (0x1D441, 'M', 'n'), + (0x1D442, 'M', 'o'), + (0x1D443, 'M', 'p'), + (0x1D444, 'M', 'q'), + (0x1D445, 'M', 'r'), + (0x1D446, 'M', 's'), + (0x1D447, 'M', 't'), + (0x1D448, 'M', 'u'), + (0x1D449, 'M', 'v'), + (0x1D44A, 'M', 'w'), + (0x1D44B, 'M', 'x'), + (0x1D44C, 'M', 'y'), + (0x1D44D, 'M', 'z'), + (0x1D44E, 'M', 'a'), + (0x1D44F, 'M', 'b'), + (0x1D450, 'M', 'c'), + (0x1D451, 'M', 'd'), + (0x1D452, 'M', 'e'), + (0x1D453, 'M', 'f'), + (0x1D454, 'M', 'g'), + (0x1D455, 'X'), + (0x1D456, 'M', 'i'), + (0x1D457, 'M', 'j'), + (0x1D458, 'M', 'k'), + (0x1D459, 'M', 'l'), + (0x1D45A, 'M', 'm'), + (0x1D45B, 'M', 'n'), + (0x1D45C, 'M', 'o'), + (0x1D45D, 'M', 'p'), + (0x1D45E, 'M', 'q'), + (0x1D45F, 'M', 'r'), + (0x1D460, 'M', 's'), + (0x1D461, 'M', 't'), + (0x1D462, 'M', 'u'), + (0x1D463, 'M', 'v'), + (0x1D464, 'M', 'w'), + (0x1D465, 'M', 'x'), + (0x1D466, 'M', 'y'), + (0x1D467, 'M', 'z'), + (0x1D468, 'M', 'a'), + (0x1D469, 'M', 'b'), + (0x1D46A, 'M', 'c'), + (0x1D46B, 'M', 'd'), + (0x1D46C, 'M', 'e'), + (0x1D46D, 'M', 'f'), + (0x1D46E, 'M', 'g'), + (0x1D46F, 'M', 'h'), + (0x1D470, 'M', 'i'), + (0x1D471, 'M', 'j'), + (0x1D472, 'M', 'k'), + (0x1D473, 'M', 'l'), + (0x1D474, 'M', 'm'), + (0x1D475, 'M', 'n'), + (0x1D476, 'M', 'o'), + (0x1D477, 'M', 'p'), + (0x1D478, 'M', 'q'), + (0x1D479, 'M', 'r'), + (0x1D47A, 'M', 's'), + (0x1D47B, 'M', 't'), + (0x1D47C, 'M', 'u'), + (0x1D47D, 'M', 'v'), + (0x1D47E, 'M', 'w'), + (0x1D47F, 'M', 'x'), + (0x1D480, 'M', 'y'), + (0x1D481, 'M', 'z'), + (0x1D482, 'M', 'a'), + (0x1D483, 'M', 'b'), + (0x1D484, 'M', 'c'), + (0x1D485, 'M', 'd'), + (0x1D486, 'M', 'e'), + (0x1D487, 'M', 'f'), + (0x1D488, 'M', 'g'), + ] + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D489, 'M', 'h'), + (0x1D48A, 'M', 'i'), + (0x1D48B, 'M', 'j'), + (0x1D48C, 'M', 'k'), + (0x1D48D, 'M', 'l'), + (0x1D48E, 'M', 'm'), + (0x1D48F, 'M', 'n'), + (0x1D490, 'M', 'o'), + (0x1D491, 'M', 'p'), + (0x1D492, 'M', 'q'), + (0x1D493, 'M', 'r'), + (0x1D494, 'M', 's'), + (0x1D495, 'M', 't'), + (0x1D496, 'M', 'u'), + (0x1D497, 'M', 'v'), + (0x1D498, 'M', 'w'), + (0x1D499, 'M', 'x'), + (0x1D49A, 'M', 'y'), + (0x1D49B, 'M', 'z'), + (0x1D49C, 'M', 'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', 'c'), + (0x1D49F, 'M', 'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', 'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', 'j'), + (0x1D4A6, 'M', 'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', 'n'), + (0x1D4AA, 'M', 'o'), + (0x1D4AB, 'M', 'p'), + (0x1D4AC, 'M', 'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', 's'), + (0x1D4AF, 'M', 't'), + (0x1D4B0, 'M', 'u'), + (0x1D4B1, 'M', 'v'), + (0x1D4B2, 'M', 'w'), + (0x1D4B3, 'M', 'x'), + (0x1D4B4, 'M', 'y'), + (0x1D4B5, 'M', 'z'), + (0x1D4B6, 'M', 'a'), + (0x1D4B7, 'M', 'b'), + (0x1D4B8, 'M', 'c'), + (0x1D4B9, 'M', 'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', 'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', 'h'), + (0x1D4BE, 'M', 'i'), + (0x1D4BF, 'M', 'j'), + (0x1D4C0, 'M', 'k'), + (0x1D4C1, 'M', 'l'), + (0x1D4C2, 'M', 'm'), + (0x1D4C3, 'M', 'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', 'p'), + (0x1D4C6, 'M', 'q'), + (0x1D4C7, 'M', 'r'), + (0x1D4C8, 'M', 's'), + (0x1D4C9, 'M', 't'), + (0x1D4CA, 'M', 'u'), + (0x1D4CB, 'M', 'v'), + (0x1D4CC, 'M', 'w'), + (0x1D4CD, 'M', 'x'), + (0x1D4CE, 'M', 'y'), + (0x1D4CF, 'M', 'z'), + (0x1D4D0, 'M', 'a'), + (0x1D4D1, 'M', 'b'), + (0x1D4D2, 'M', 'c'), + (0x1D4D3, 'M', 'd'), + (0x1D4D4, 'M', 'e'), + (0x1D4D5, 'M', 'f'), + (0x1D4D6, 'M', 'g'), + (0x1D4D7, 'M', 'h'), + (0x1D4D8, 'M', 'i'), + (0x1D4D9, 'M', 'j'), + (0x1D4DA, 'M', 'k'), + (0x1D4DB, 'M', 'l'), + (0x1D4DC, 'M', 'm'), + (0x1D4DD, 'M', 'n'), + (0x1D4DE, 'M', 'o'), + (0x1D4DF, 'M', 'p'), + (0x1D4E0, 'M', 'q'), + (0x1D4E1, 'M', 'r'), + (0x1D4E2, 'M', 's'), + (0x1D4E3, 'M', 't'), + (0x1D4E4, 'M', 'u'), + (0x1D4E5, 'M', 'v'), + (0x1D4E6, 'M', 'w'), + (0x1D4E7, 'M', 'x'), + (0x1D4E8, 'M', 'y'), + (0x1D4E9, 'M', 'z'), + (0x1D4EA, 'M', 'a'), + (0x1D4EB, 'M', 'b'), + (0x1D4EC, 'M', 'c'), + (0x1D4ED, 'M', 'd'), + (0x1D4EE, 'M', 'e'), + (0x1D4EF, 'M', 'f'), + ] + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D4F0, 'M', 'g'), + (0x1D4F1, 'M', 'h'), + (0x1D4F2, 'M', 'i'), + (0x1D4F3, 'M', 'j'), + (0x1D4F4, 'M', 'k'), + (0x1D4F5, 'M', 'l'), + (0x1D4F6, 'M', 'm'), + (0x1D4F7, 'M', 'n'), + (0x1D4F8, 'M', 'o'), + (0x1D4F9, 'M', 'p'), + (0x1D4FA, 'M', 'q'), + (0x1D4FB, 'M', 'r'), + (0x1D4FC, 'M', 's'), + (0x1D4FD, 'M', 't'), + (0x1D4FE, 'M', 'u'), + (0x1D4FF, 'M', 'v'), + (0x1D500, 'M', 'w'), + (0x1D501, 'M', 'x'), + (0x1D502, 'M', 'y'), + (0x1D503, 'M', 'z'), + (0x1D504, 'M', 'a'), + (0x1D505, 'M', 'b'), + (0x1D506, 'X'), + (0x1D507, 'M', 'd'), + (0x1D508, 'M', 'e'), + (0x1D509, 'M', 'f'), + (0x1D50A, 'M', 'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', 'j'), + (0x1D50E, 'M', 'k'), + (0x1D50F, 'M', 'l'), + (0x1D510, 'M', 'm'), + (0x1D511, 'M', 'n'), + (0x1D512, 'M', 'o'), + (0x1D513, 'M', 'p'), + (0x1D514, 'M', 'q'), + (0x1D515, 'X'), + (0x1D516, 'M', 's'), + (0x1D517, 'M', 't'), + (0x1D518, 'M', 'u'), + (0x1D519, 'M', 'v'), + (0x1D51A, 'M', 'w'), + (0x1D51B, 'M', 'x'), + (0x1D51C, 'M', 'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', 'a'), + (0x1D51F, 'M', 'b'), + (0x1D520, 'M', 'c'), + (0x1D521, 'M', 'd'), + (0x1D522, 'M', 'e'), + (0x1D523, 'M', 'f'), + (0x1D524, 'M', 'g'), + (0x1D525, 'M', 'h'), + (0x1D526, 'M', 'i'), + (0x1D527, 'M', 'j'), + (0x1D528, 'M', 'k'), + (0x1D529, 'M', 'l'), + (0x1D52A, 'M', 'm'), + (0x1D52B, 'M', 'n'), + (0x1D52C, 'M', 'o'), + (0x1D52D, 'M', 'p'), + (0x1D52E, 'M', 'q'), + (0x1D52F, 'M', 'r'), + (0x1D530, 'M', 's'), + (0x1D531, 'M', 't'), + (0x1D532, 'M', 'u'), + (0x1D533, 'M', 'v'), + (0x1D534, 'M', 'w'), + (0x1D535, 'M', 'x'), + (0x1D536, 'M', 'y'), + (0x1D537, 'M', 'z'), + (0x1D538, 'M', 'a'), + (0x1D539, 'M', 'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', 'd'), + (0x1D53C, 'M', 'e'), + (0x1D53D, 'M', 'f'), + (0x1D53E, 'M', 'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', 'i'), + (0x1D541, 'M', 'j'), + (0x1D542, 'M', 'k'), + (0x1D543, 'M', 'l'), + (0x1D544, 'M', 'm'), + (0x1D545, 'X'), + (0x1D546, 'M', 'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', 's'), + (0x1D54B, 'M', 't'), + (0x1D54C, 'M', 'u'), + (0x1D54D, 'M', 'v'), + (0x1D54E, 'M', 'w'), + (0x1D54F, 'M', 'x'), + (0x1D550, 'M', 'y'), + (0x1D551, 'X'), + (0x1D552, 'M', 'a'), + (0x1D553, 'M', 'b'), + (0x1D554, 'M', 'c'), + (0x1D555, 'M', 'd'), + (0x1D556, 'M', 'e'), + ] + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D557, 'M', 'f'), + (0x1D558, 'M', 'g'), + (0x1D559, 'M', 'h'), + (0x1D55A, 'M', 'i'), + (0x1D55B, 'M', 'j'), + (0x1D55C, 'M', 'k'), + (0x1D55D, 'M', 'l'), + (0x1D55E, 'M', 'm'), + (0x1D55F, 'M', 'n'), + (0x1D560, 'M', 'o'), + (0x1D561, 'M', 'p'), + (0x1D562, 'M', 'q'), + (0x1D563, 'M', 'r'), + (0x1D564, 'M', 's'), + (0x1D565, 'M', 't'), + (0x1D566, 'M', 'u'), + (0x1D567, 'M', 'v'), + (0x1D568, 'M', 'w'), + (0x1D569, 'M', 'x'), + (0x1D56A, 'M', 'y'), + (0x1D56B, 'M', 'z'), + (0x1D56C, 'M', 'a'), + (0x1D56D, 'M', 'b'), + (0x1D56E, 'M', 'c'), + (0x1D56F, 'M', 'd'), + (0x1D570, 'M', 'e'), + (0x1D571, 'M', 'f'), + (0x1D572, 'M', 'g'), + (0x1D573, 'M', 'h'), + (0x1D574, 'M', 'i'), + (0x1D575, 'M', 'j'), + (0x1D576, 'M', 'k'), + (0x1D577, 'M', 'l'), + (0x1D578, 'M', 'm'), + (0x1D579, 'M', 'n'), + (0x1D57A, 'M', 'o'), + (0x1D57B, 'M', 'p'), + (0x1D57C, 'M', 'q'), + (0x1D57D, 'M', 'r'), + (0x1D57E, 'M', 's'), + (0x1D57F, 'M', 't'), + (0x1D580, 'M', 'u'), + (0x1D581, 'M', 'v'), + (0x1D582, 'M', 'w'), + (0x1D583, 'M', 'x'), + (0x1D584, 'M', 'y'), + (0x1D585, 'M', 'z'), + (0x1D586, 'M', 'a'), + (0x1D587, 'M', 'b'), + (0x1D588, 'M', 'c'), + (0x1D589, 'M', 'd'), + (0x1D58A, 'M', 'e'), + (0x1D58B, 'M', 'f'), + (0x1D58C, 'M', 'g'), + (0x1D58D, 'M', 'h'), + (0x1D58E, 'M', 'i'), + (0x1D58F, 'M', 'j'), + (0x1D590, 'M', 'k'), + (0x1D591, 'M', 'l'), + (0x1D592, 'M', 'm'), + (0x1D593, 'M', 'n'), + (0x1D594, 'M', 'o'), + (0x1D595, 'M', 'p'), + (0x1D596, 'M', 'q'), + (0x1D597, 'M', 'r'), + (0x1D598, 'M', 's'), + (0x1D599, 'M', 't'), + (0x1D59A, 'M', 'u'), + (0x1D59B, 'M', 'v'), + (0x1D59C, 'M', 'w'), + (0x1D59D, 'M', 'x'), + (0x1D59E, 'M', 'y'), + (0x1D59F, 'M', 'z'), + (0x1D5A0, 'M', 'a'), + (0x1D5A1, 'M', 'b'), + (0x1D5A2, 'M', 'c'), + (0x1D5A3, 'M', 'd'), + (0x1D5A4, 'M', 'e'), + (0x1D5A5, 'M', 'f'), + (0x1D5A6, 'M', 'g'), + (0x1D5A7, 'M', 'h'), + (0x1D5A8, 'M', 'i'), + (0x1D5A9, 'M', 'j'), + (0x1D5AA, 'M', 'k'), + (0x1D5AB, 'M', 'l'), + (0x1D5AC, 'M', 'm'), + (0x1D5AD, 'M', 'n'), + (0x1D5AE, 'M', 'o'), + (0x1D5AF, 'M', 'p'), + (0x1D5B0, 'M', 'q'), + (0x1D5B1, 'M', 'r'), + (0x1D5B2, 'M', 's'), + (0x1D5B3, 'M', 't'), + (0x1D5B4, 'M', 'u'), + (0x1D5B5, 'M', 'v'), + (0x1D5B6, 'M', 'w'), + (0x1D5B7, 'M', 'x'), + (0x1D5B8, 'M', 'y'), + (0x1D5B9, 'M', 'z'), + (0x1D5BA, 'M', 'a'), + ] + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D5BB, 'M', 'b'), + (0x1D5BC, 'M', 'c'), + (0x1D5BD, 'M', 'd'), + (0x1D5BE, 'M', 'e'), + (0x1D5BF, 'M', 'f'), + (0x1D5C0, 'M', 'g'), + (0x1D5C1, 'M', 'h'), + (0x1D5C2, 'M', 'i'), + (0x1D5C3, 'M', 'j'), + (0x1D5C4, 'M', 'k'), + (0x1D5C5, 'M', 'l'), + (0x1D5C6, 'M', 'm'), + (0x1D5C7, 'M', 'n'), + (0x1D5C8, 'M', 'o'), + (0x1D5C9, 'M', 'p'), + (0x1D5CA, 'M', 'q'), + (0x1D5CB, 'M', 'r'), + (0x1D5CC, 'M', 's'), + (0x1D5CD, 'M', 't'), + (0x1D5CE, 'M', 'u'), + (0x1D5CF, 'M', 'v'), + (0x1D5D0, 'M', 'w'), + (0x1D5D1, 'M', 'x'), + (0x1D5D2, 'M', 'y'), + (0x1D5D3, 'M', 'z'), + (0x1D5D4, 'M', 'a'), + (0x1D5D5, 'M', 'b'), + (0x1D5D6, 'M', 'c'), + (0x1D5D7, 'M', 'd'), + (0x1D5D8, 'M', 'e'), + (0x1D5D9, 'M', 'f'), + (0x1D5DA, 'M', 'g'), + (0x1D5DB, 'M', 'h'), + (0x1D5DC, 'M', 'i'), + (0x1D5DD, 'M', 'j'), + (0x1D5DE, 'M', 'k'), + (0x1D5DF, 'M', 'l'), + (0x1D5E0, 'M', 'm'), + (0x1D5E1, 'M', 'n'), + (0x1D5E2, 'M', 'o'), + (0x1D5E3, 'M', 'p'), + (0x1D5E4, 'M', 'q'), + (0x1D5E5, 'M', 'r'), + (0x1D5E6, 'M', 's'), + (0x1D5E7, 'M', 't'), + (0x1D5E8, 'M', 'u'), + (0x1D5E9, 'M', 'v'), + (0x1D5EA, 'M', 'w'), + (0x1D5EB, 'M', 'x'), + (0x1D5EC, 'M', 'y'), + (0x1D5ED, 'M', 'z'), + (0x1D5EE, 'M', 'a'), + (0x1D5EF, 'M', 'b'), + (0x1D5F0, 'M', 'c'), + (0x1D5F1, 'M', 'd'), + (0x1D5F2, 'M', 'e'), + (0x1D5F3, 'M', 'f'), + (0x1D5F4, 'M', 'g'), + (0x1D5F5, 'M', 'h'), + (0x1D5F6, 'M', 'i'), + (0x1D5F7, 'M', 'j'), + (0x1D5F8, 'M', 'k'), + (0x1D5F9, 'M', 'l'), + (0x1D5FA, 'M', 'm'), + (0x1D5FB, 'M', 'n'), + (0x1D5FC, 'M', 'o'), + (0x1D5FD, 'M', 'p'), + (0x1D5FE, 'M', 'q'), + (0x1D5FF, 'M', 'r'), + (0x1D600, 'M', 's'), + (0x1D601, 'M', 't'), + (0x1D602, 'M', 'u'), + (0x1D603, 'M', 'v'), + (0x1D604, 'M', 'w'), + (0x1D605, 'M', 'x'), + (0x1D606, 'M', 'y'), + (0x1D607, 'M', 'z'), + (0x1D608, 'M', 'a'), + (0x1D609, 'M', 'b'), + (0x1D60A, 'M', 'c'), + (0x1D60B, 'M', 'd'), + (0x1D60C, 'M', 'e'), + (0x1D60D, 'M', 'f'), + (0x1D60E, 'M', 'g'), + (0x1D60F, 'M', 'h'), + (0x1D610, 'M', 'i'), + (0x1D611, 'M', 'j'), + (0x1D612, 'M', 'k'), + (0x1D613, 'M', 'l'), + (0x1D614, 'M', 'm'), + (0x1D615, 'M', 'n'), + (0x1D616, 'M', 'o'), + (0x1D617, 'M', 'p'), + (0x1D618, 'M', 'q'), + (0x1D619, 'M', 'r'), + (0x1D61A, 'M', 's'), + (0x1D61B, 'M', 't'), + (0x1D61C, 'M', 'u'), + (0x1D61D, 'M', 'v'), + (0x1D61E, 'M', 'w'), + ] + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D61F, 'M', 'x'), + (0x1D620, 'M', 'y'), + (0x1D621, 'M', 'z'), + (0x1D622, 'M', 'a'), + (0x1D623, 'M', 'b'), + (0x1D624, 'M', 'c'), + (0x1D625, 'M', 'd'), + (0x1D626, 'M', 'e'), + (0x1D627, 'M', 'f'), + (0x1D628, 'M', 'g'), + (0x1D629, 'M', 'h'), + (0x1D62A, 'M', 'i'), + (0x1D62B, 'M', 'j'), + (0x1D62C, 'M', 'k'), + (0x1D62D, 'M', 'l'), + (0x1D62E, 'M', 'm'), + (0x1D62F, 'M', 'n'), + (0x1D630, 'M', 'o'), + (0x1D631, 'M', 'p'), + (0x1D632, 'M', 'q'), + (0x1D633, 'M', 'r'), + (0x1D634, 'M', 's'), + (0x1D635, 'M', 't'), + (0x1D636, 'M', 'u'), + (0x1D637, 'M', 'v'), + (0x1D638, 'M', 'w'), + (0x1D639, 'M', 'x'), + (0x1D63A, 'M', 'y'), + (0x1D63B, 'M', 'z'), + (0x1D63C, 'M', 'a'), + (0x1D63D, 'M', 'b'), + (0x1D63E, 'M', 'c'), + (0x1D63F, 'M', 'd'), + (0x1D640, 'M', 'e'), + (0x1D641, 'M', 'f'), + (0x1D642, 'M', 'g'), + (0x1D643, 'M', 'h'), + (0x1D644, 'M', 'i'), + (0x1D645, 'M', 'j'), + (0x1D646, 'M', 'k'), + (0x1D647, 'M', 'l'), + (0x1D648, 'M', 'm'), + (0x1D649, 'M', 'n'), + (0x1D64A, 'M', 'o'), + (0x1D64B, 'M', 'p'), + (0x1D64C, 'M', 'q'), + (0x1D64D, 'M', 'r'), + (0x1D64E, 'M', 's'), + (0x1D64F, 'M', 't'), + (0x1D650, 'M', 'u'), + (0x1D651, 'M', 'v'), + (0x1D652, 'M', 'w'), + (0x1D653, 'M', 'x'), + (0x1D654, 'M', 'y'), + (0x1D655, 'M', 'z'), + (0x1D656, 'M', 'a'), + (0x1D657, 'M', 'b'), + (0x1D658, 'M', 'c'), + (0x1D659, 'M', 'd'), + (0x1D65A, 'M', 'e'), + (0x1D65B, 'M', 'f'), + (0x1D65C, 'M', 'g'), + (0x1D65D, 'M', 'h'), + (0x1D65E, 'M', 'i'), + (0x1D65F, 'M', 'j'), + (0x1D660, 'M', 'k'), + (0x1D661, 'M', 'l'), + (0x1D662, 'M', 'm'), + (0x1D663, 'M', 'n'), + (0x1D664, 'M', 'o'), + (0x1D665, 'M', 'p'), + (0x1D666, 'M', 'q'), + (0x1D667, 'M', 'r'), + (0x1D668, 'M', 's'), + (0x1D669, 'M', 't'), + (0x1D66A, 'M', 'u'), + (0x1D66B, 'M', 'v'), + (0x1D66C, 'M', 'w'), + (0x1D66D, 'M', 'x'), + (0x1D66E, 'M', 'y'), + (0x1D66F, 'M', 'z'), + (0x1D670, 'M', 'a'), + (0x1D671, 'M', 'b'), + (0x1D672, 'M', 'c'), + (0x1D673, 'M', 'd'), + (0x1D674, 'M', 'e'), + (0x1D675, 'M', 'f'), + (0x1D676, 'M', 'g'), + (0x1D677, 'M', 'h'), + (0x1D678, 'M', 'i'), + (0x1D679, 'M', 'j'), + (0x1D67A, 'M', 'k'), + (0x1D67B, 'M', 'l'), + (0x1D67C, 'M', 'm'), + (0x1D67D, 'M', 'n'), + (0x1D67E, 'M', 'o'), + (0x1D67F, 'M', 'p'), + (0x1D680, 'M', 'q'), + (0x1D681, 'M', 'r'), + (0x1D682, 'M', 's'), + ] + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D683, 'M', 't'), + (0x1D684, 'M', 'u'), + (0x1D685, 'M', 'v'), + (0x1D686, 'M', 'w'), + (0x1D687, 'M', 'x'), + (0x1D688, 'M', 'y'), + (0x1D689, 'M', 'z'), + (0x1D68A, 'M', 'a'), + (0x1D68B, 'M', 'b'), + (0x1D68C, 'M', 'c'), + (0x1D68D, 'M', 'd'), + (0x1D68E, 'M', 'e'), + (0x1D68F, 'M', 'f'), + (0x1D690, 'M', 'g'), + (0x1D691, 'M', 'h'), + (0x1D692, 'M', 'i'), + (0x1D693, 'M', 'j'), + (0x1D694, 'M', 'k'), + (0x1D695, 'M', 'l'), + (0x1D696, 'M', 'm'), + (0x1D697, 'M', 'n'), + (0x1D698, 'M', 'o'), + (0x1D699, 'M', 'p'), + (0x1D69A, 'M', 'q'), + (0x1D69B, 'M', 'r'), + (0x1D69C, 'M', 's'), + (0x1D69D, 'M', 't'), + (0x1D69E, 'M', 'u'), + (0x1D69F, 'M', 'v'), + (0x1D6A0, 'M', 'w'), + (0x1D6A1, 'M', 'x'), + (0x1D6A2, 'M', 'y'), + (0x1D6A3, 'M', 'z'), + (0x1D6A4, 'M', 'ı'), + (0x1D6A5, 'M', 'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', 'α'), + (0x1D6A9, 'M', 'β'), + (0x1D6AA, 'M', 'γ'), + (0x1D6AB, 'M', 'δ'), + (0x1D6AC, 'M', 'ε'), + (0x1D6AD, 'M', 'ζ'), + (0x1D6AE, 'M', 'η'), + (0x1D6AF, 'M', 'θ'), + (0x1D6B0, 'M', 'ι'), + (0x1D6B1, 'M', 'κ'), + (0x1D6B2, 'M', 'λ'), + (0x1D6B3, 'M', 'μ'), + (0x1D6B4, 'M', 'ν'), + (0x1D6B5, 'M', 'ξ'), + (0x1D6B6, 'M', 'ο'), + (0x1D6B7, 'M', 'π'), + (0x1D6B8, 'M', 'ρ'), + (0x1D6B9, 'M', 'θ'), + (0x1D6BA, 'M', 'σ'), + (0x1D6BB, 'M', 'τ'), + (0x1D6BC, 'M', 'υ'), + (0x1D6BD, 'M', 'φ'), + (0x1D6BE, 'M', 'χ'), + (0x1D6BF, 'M', 'ψ'), + (0x1D6C0, 'M', 'ω'), + (0x1D6C1, 'M', '∇'), + (0x1D6C2, 'M', 'α'), + (0x1D6C3, 'M', 'β'), + (0x1D6C4, 'M', 'γ'), + (0x1D6C5, 'M', 'δ'), + (0x1D6C6, 'M', 'ε'), + (0x1D6C7, 'M', 'ζ'), + (0x1D6C8, 'M', 'η'), + (0x1D6C9, 'M', 'θ'), + (0x1D6CA, 'M', 'ι'), + (0x1D6CB, 'M', 'κ'), + (0x1D6CC, 'M', 'λ'), + (0x1D6CD, 'M', 'μ'), + (0x1D6CE, 'M', 'ν'), + (0x1D6CF, 'M', 'ξ'), + (0x1D6D0, 'M', 'ο'), + (0x1D6D1, 'M', 'π'), + (0x1D6D2, 'M', 'ρ'), + (0x1D6D3, 'M', 'σ'), + (0x1D6D5, 'M', 'τ'), + (0x1D6D6, 'M', 'υ'), + (0x1D6D7, 'M', 'φ'), + (0x1D6D8, 'M', 'χ'), + (0x1D6D9, 'M', 'ψ'), + (0x1D6DA, 'M', 'ω'), + (0x1D6DB, 'M', '∂'), + (0x1D6DC, 'M', 'ε'), + (0x1D6DD, 'M', 'θ'), + (0x1D6DE, 'M', 'κ'), + (0x1D6DF, 'M', 'φ'), + (0x1D6E0, 'M', 'ρ'), + (0x1D6E1, 'M', 'π'), + (0x1D6E2, 'M', 'α'), + (0x1D6E3, 'M', 'β'), + (0x1D6E4, 'M', 'γ'), + (0x1D6E5, 'M', 'δ'), + (0x1D6E6, 'M', 'ε'), + (0x1D6E7, 'M', 'ζ'), + (0x1D6E8, 'M', 'η'), + ] + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D6E9, 'M', 'θ'), + (0x1D6EA, 'M', 'ι'), + (0x1D6EB, 'M', 'κ'), + (0x1D6EC, 'M', 'λ'), + (0x1D6ED, 'M', 'μ'), + (0x1D6EE, 'M', 'ν'), + (0x1D6EF, 'M', 'ξ'), + (0x1D6F0, 'M', 'ο'), + (0x1D6F1, 'M', 'π'), + (0x1D6F2, 'M', 'ρ'), + (0x1D6F3, 'M', 'θ'), + (0x1D6F4, 'M', 'σ'), + (0x1D6F5, 'M', 'τ'), + (0x1D6F6, 'M', 'υ'), + (0x1D6F7, 'M', 'φ'), + (0x1D6F8, 'M', 'χ'), + (0x1D6F9, 'M', 'ψ'), + (0x1D6FA, 'M', 'ω'), + (0x1D6FB, 'M', '∇'), + (0x1D6FC, 'M', 'α'), + (0x1D6FD, 'M', 'β'), + (0x1D6FE, 'M', 'γ'), + (0x1D6FF, 'M', 'δ'), + (0x1D700, 'M', 'ε'), + (0x1D701, 'M', 'ζ'), + (0x1D702, 'M', 'η'), + (0x1D703, 'M', 'θ'), + (0x1D704, 'M', 'ι'), + (0x1D705, 'M', 'κ'), + (0x1D706, 'M', 'λ'), + (0x1D707, 'M', 'μ'), + (0x1D708, 'M', 'ν'), + (0x1D709, 'M', 'ξ'), + (0x1D70A, 'M', 'ο'), + (0x1D70B, 'M', 'π'), + (0x1D70C, 'M', 'ρ'), + (0x1D70D, 'M', 'σ'), + (0x1D70F, 'M', 'τ'), + (0x1D710, 'M', 'υ'), + (0x1D711, 'M', 'φ'), + (0x1D712, 'M', 'χ'), + (0x1D713, 'M', 'ψ'), + (0x1D714, 'M', 'ω'), + (0x1D715, 'M', '∂'), + (0x1D716, 'M', 'ε'), + (0x1D717, 'M', 'θ'), + (0x1D718, 'M', 'κ'), + (0x1D719, 'M', 'φ'), + (0x1D71A, 'M', 'ρ'), + (0x1D71B, 'M', 'π'), + (0x1D71C, 'M', 'α'), + (0x1D71D, 'M', 'β'), + (0x1D71E, 'M', 'γ'), + (0x1D71F, 'M', 'δ'), + (0x1D720, 'M', 'ε'), + (0x1D721, 'M', 'ζ'), + (0x1D722, 'M', 'η'), + (0x1D723, 'M', 'θ'), + (0x1D724, 'M', 'ι'), + (0x1D725, 'M', 'κ'), + (0x1D726, 'M', 'λ'), + (0x1D727, 'M', 'μ'), + (0x1D728, 'M', 'ν'), + (0x1D729, 'M', 'ξ'), + (0x1D72A, 'M', 'ο'), + (0x1D72B, 'M', 'π'), + (0x1D72C, 'M', 'ρ'), + (0x1D72D, 'M', 'θ'), + (0x1D72E, 'M', 'σ'), + (0x1D72F, 'M', 'τ'), + (0x1D730, 'M', 'υ'), + (0x1D731, 'M', 'φ'), + (0x1D732, 'M', 'χ'), + (0x1D733, 'M', 'ψ'), + (0x1D734, 'M', 'ω'), + (0x1D735, 'M', '∇'), + (0x1D736, 'M', 'α'), + (0x1D737, 'M', 'β'), + (0x1D738, 'M', 'γ'), + (0x1D739, 'M', 'δ'), + (0x1D73A, 'M', 'ε'), + (0x1D73B, 'M', 'ζ'), + (0x1D73C, 'M', 'η'), + (0x1D73D, 'M', 'θ'), + (0x1D73E, 'M', 'ι'), + (0x1D73F, 'M', 'κ'), + (0x1D740, 'M', 'λ'), + (0x1D741, 'M', 'μ'), + (0x1D742, 'M', 'ν'), + (0x1D743, 'M', 'ξ'), + (0x1D744, 'M', 'ο'), + (0x1D745, 'M', 'π'), + (0x1D746, 'M', 'ρ'), + (0x1D747, 'M', 'σ'), + (0x1D749, 'M', 'τ'), + (0x1D74A, 'M', 'υ'), + (0x1D74B, 'M', 'φ'), + (0x1D74C, 'M', 'χ'), + (0x1D74D, 'M', 'ψ'), + (0x1D74E, 'M', 'ω'), + ] + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D74F, 'M', '∂'), + (0x1D750, 'M', 'ε'), + (0x1D751, 'M', 'θ'), + (0x1D752, 'M', 'κ'), + (0x1D753, 'M', 'φ'), + (0x1D754, 'M', 'ρ'), + (0x1D755, 'M', 'π'), + (0x1D756, 'M', 'α'), + (0x1D757, 'M', 'β'), + (0x1D758, 'M', 'γ'), + (0x1D759, 'M', 'δ'), + (0x1D75A, 'M', 'ε'), + (0x1D75B, 'M', 'ζ'), + (0x1D75C, 'M', 'η'), + (0x1D75D, 'M', 'θ'), + (0x1D75E, 'M', 'ι'), + (0x1D75F, 'M', 'κ'), + (0x1D760, 'M', 'λ'), + (0x1D761, 'M', 'μ'), + (0x1D762, 'M', 'ν'), + (0x1D763, 'M', 'ξ'), + (0x1D764, 'M', 'ο'), + (0x1D765, 'M', 'π'), + (0x1D766, 'M', 'ρ'), + (0x1D767, 'M', 'θ'), + (0x1D768, 'M', 'σ'), + (0x1D769, 'M', 'τ'), + (0x1D76A, 'M', 'υ'), + (0x1D76B, 'M', 'φ'), + (0x1D76C, 'M', 'χ'), + (0x1D76D, 'M', 'ψ'), + (0x1D76E, 'M', 'ω'), + (0x1D76F, 'M', '∇'), + (0x1D770, 'M', 'α'), + (0x1D771, 'M', 'β'), + (0x1D772, 'M', 'γ'), + (0x1D773, 'M', 'δ'), + (0x1D774, 'M', 'ε'), + (0x1D775, 'M', 'ζ'), + (0x1D776, 'M', 'η'), + (0x1D777, 'M', 'θ'), + (0x1D778, 'M', 'ι'), + (0x1D779, 'M', 'κ'), + (0x1D77A, 'M', 'λ'), + (0x1D77B, 'M', 'μ'), + (0x1D77C, 'M', 'ν'), + (0x1D77D, 'M', 'ξ'), + (0x1D77E, 'M', 'ο'), + (0x1D77F, 'M', 'π'), + (0x1D780, 'M', 'ρ'), + (0x1D781, 'M', 'σ'), + (0x1D783, 'M', 'τ'), + (0x1D784, 'M', 'υ'), + (0x1D785, 'M', 'φ'), + (0x1D786, 'M', 'χ'), + (0x1D787, 'M', 'ψ'), + (0x1D788, 'M', 'ω'), + (0x1D789, 'M', '∂'), + (0x1D78A, 'M', 'ε'), + (0x1D78B, 'M', 'θ'), + (0x1D78C, 'M', 'κ'), + (0x1D78D, 'M', 'φ'), + (0x1D78E, 'M', 'ρ'), + (0x1D78F, 'M', 'π'), + (0x1D790, 'M', 'α'), + (0x1D791, 'M', 'β'), + (0x1D792, 'M', 'γ'), + (0x1D793, 'M', 'δ'), + (0x1D794, 'M', 'ε'), + (0x1D795, 'M', 'ζ'), + (0x1D796, 'M', 'η'), + (0x1D797, 'M', 'θ'), + (0x1D798, 'M', 'ι'), + (0x1D799, 'M', 'κ'), + (0x1D79A, 'M', 'λ'), + (0x1D79B, 'M', 'μ'), + (0x1D79C, 'M', 'ν'), + (0x1D79D, 'M', 'ξ'), + (0x1D79E, 'M', 'ο'), + (0x1D79F, 'M', 'π'), + (0x1D7A0, 'M', 'ρ'), + (0x1D7A1, 'M', 'θ'), + (0x1D7A2, 'M', 'σ'), + (0x1D7A3, 'M', 'τ'), + (0x1D7A4, 'M', 'υ'), + (0x1D7A5, 'M', 'φ'), + (0x1D7A6, 'M', 'χ'), + (0x1D7A7, 'M', 'ψ'), + (0x1D7A8, 'M', 'ω'), + (0x1D7A9, 'M', '∇'), + (0x1D7AA, 'M', 'α'), + (0x1D7AB, 'M', 'β'), + (0x1D7AC, 'M', 'γ'), + (0x1D7AD, 'M', 'δ'), + (0x1D7AE, 'M', 'ε'), + (0x1D7AF, 'M', 'ζ'), + (0x1D7B0, 'M', 'η'), + (0x1D7B1, 'M', 'θ'), + (0x1D7B2, 'M', 'ι'), + (0x1D7B3, 'M', 'κ'), + ] + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D7B4, 'M', 'λ'), + (0x1D7B5, 'M', 'μ'), + (0x1D7B6, 'M', 'ν'), + (0x1D7B7, 'M', 'ξ'), + (0x1D7B8, 'M', 'ο'), + (0x1D7B9, 'M', 'π'), + (0x1D7BA, 'M', 'ρ'), + (0x1D7BB, 'M', 'σ'), + (0x1D7BD, 'M', 'τ'), + (0x1D7BE, 'M', 'υ'), + (0x1D7BF, 'M', 'φ'), + (0x1D7C0, 'M', 'χ'), + (0x1D7C1, 'M', 'ψ'), + (0x1D7C2, 'M', 'ω'), + (0x1D7C3, 'M', '∂'), + (0x1D7C4, 'M', 'ε'), + (0x1D7C5, 'M', 'θ'), + (0x1D7C6, 'M', 'κ'), + (0x1D7C7, 'M', 'φ'), + (0x1D7C8, 'M', 'ρ'), + (0x1D7C9, 'M', 'π'), + (0x1D7CA, 'M', 'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', '0'), + (0x1D7CF, 'M', '1'), + (0x1D7D0, 'M', '2'), + (0x1D7D1, 'M', '3'), + (0x1D7D2, 'M', '4'), + (0x1D7D3, 'M', '5'), + (0x1D7D4, 'M', '6'), + (0x1D7D5, 'M', '7'), + (0x1D7D6, 'M', '8'), + (0x1D7D7, 'M', '9'), + (0x1D7D8, 'M', '0'), + (0x1D7D9, 'M', '1'), + (0x1D7DA, 'M', '2'), + (0x1D7DB, 'M', '3'), + (0x1D7DC, 'M', '4'), + (0x1D7DD, 'M', '5'), + (0x1D7DE, 'M', '6'), + (0x1D7DF, 'M', '7'), + (0x1D7E0, 'M', '8'), + (0x1D7E1, 'M', '9'), + (0x1D7E2, 'M', '0'), + (0x1D7E3, 'M', '1'), + (0x1D7E4, 'M', '2'), + (0x1D7E5, 'M', '3'), + (0x1D7E6, 'M', '4'), + (0x1D7E7, 'M', '5'), + (0x1D7E8, 'M', '6'), + (0x1D7E9, 'M', '7'), + (0x1D7EA, 'M', '8'), + (0x1D7EB, 'M', '9'), + (0x1D7EC, 'M', '0'), + (0x1D7ED, 'M', '1'), + (0x1D7EE, 'M', '2'), + (0x1D7EF, 'M', '3'), + (0x1D7F0, 'M', '4'), + (0x1D7F1, 'M', '5'), + (0x1D7F2, 'M', '6'), + (0x1D7F3, 'M', '7'), + (0x1D7F4, 'M', '8'), + (0x1D7F5, 'M', '9'), + (0x1D7F6, 'M', '0'), + (0x1D7F7, 'M', '1'), + (0x1D7F8, 'M', '2'), + (0x1D7F9, 'M', '3'), + (0x1D7FA, 'M', '4'), + (0x1D7FB, 'M', '5'), + (0x1D7FC, 'M', '6'), + (0x1D7FD, 'M', '7'), + (0x1D7FE, 'M', '8'), + (0x1D7FF, 'M', '9'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1DF00, 'V'), + (0x1DF1F, 'X'), + (0x1DF25, 'V'), + (0x1DF2B, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E030, 'M', 'а'), + (0x1E031, 'M', 'б'), + (0x1E032, 'M', 'в'), + (0x1E033, 'M', 'г'), + (0x1E034, 'M', 'д'), + (0x1E035, 'M', 'е'), + (0x1E036, 'M', 'ж'), + ] + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E037, 'M', 'з'), + (0x1E038, 'M', 'и'), + (0x1E039, 'M', 'к'), + (0x1E03A, 'M', 'л'), + (0x1E03B, 'M', 'м'), + (0x1E03C, 'M', 'о'), + (0x1E03D, 'M', 'п'), + (0x1E03E, 'M', 'р'), + (0x1E03F, 'M', 'с'), + (0x1E040, 'M', 'т'), + (0x1E041, 'M', 'у'), + (0x1E042, 'M', 'ф'), + (0x1E043, 'M', 'х'), + (0x1E044, 'M', 'ц'), + (0x1E045, 'M', 'ч'), + (0x1E046, 'M', 'ш'), + (0x1E047, 'M', 'ы'), + (0x1E048, 'M', 'э'), + (0x1E049, 'M', 'ю'), + (0x1E04A, 'M', 'ꚉ'), + (0x1E04B, 'M', 'ә'), + (0x1E04C, 'M', 'і'), + (0x1E04D, 'M', 'ј'), + (0x1E04E, 'M', 'ө'), + (0x1E04F, 'M', 'ү'), + (0x1E050, 'M', 'ӏ'), + (0x1E051, 'M', 'а'), + (0x1E052, 'M', 'б'), + (0x1E053, 'M', 'в'), + (0x1E054, 'M', 'г'), + (0x1E055, 'M', 'д'), + (0x1E056, 'M', 'е'), + (0x1E057, 'M', 'ж'), + (0x1E058, 'M', 'з'), + (0x1E059, 'M', 'и'), + (0x1E05A, 'M', 'к'), + (0x1E05B, 'M', 'л'), + (0x1E05C, 'M', 'о'), + (0x1E05D, 'M', 'п'), + (0x1E05E, 'M', 'с'), + (0x1E05F, 'M', 'у'), + (0x1E060, 'M', 'ф'), + (0x1E061, 'M', 'х'), + (0x1E062, 'M', 'ц'), + (0x1E063, 'M', 'ч'), + (0x1E064, 'M', 'ш'), + (0x1E065, 'M', 'ъ'), + (0x1E066, 'M', 'ы'), + (0x1E067, 'M', 'ґ'), + (0x1E068, 'M', 'і'), + (0x1E069, 'M', 'ѕ'), + (0x1E06A, 'M', 'џ'), + (0x1E06B, 'M', 'ҫ'), + (0x1E06C, 'M', 'ꙑ'), + (0x1E06D, 'M', 'ұ'), + (0x1E06E, 'X'), + (0x1E08F, 'V'), + (0x1E090, 'X'), + (0x1E100, 'V'), + (0x1E12D, 'X'), + (0x1E130, 'V'), + (0x1E13E, 'X'), + (0x1E140, 'V'), + (0x1E14A, 'X'), + (0x1E14E, 'V'), + (0x1E150, 'X'), + (0x1E290, 'V'), + (0x1E2AF, 'X'), + (0x1E2C0, 'V'), + (0x1E2FA, 'X'), + (0x1E2FF, 'V'), + (0x1E300, 'X'), + (0x1E4D0, 'V'), + (0x1E4FA, 'X'), + (0x1E7E0, 'V'), + (0x1E7E7, 'X'), + (0x1E7E8, 'V'), + (0x1E7EC, 'X'), + (0x1E7ED, 'V'), + (0x1E7EF, 'X'), + (0x1E7F0, 'V'), + (0x1E7FF, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', '𞤢'), + (0x1E901, 'M', '𞤣'), + (0x1E902, 'M', '𞤤'), + (0x1E903, 'M', '𞤥'), + (0x1E904, 'M', '𞤦'), + (0x1E905, 'M', '𞤧'), + (0x1E906, 'M', '𞤨'), + (0x1E907, 'M', '𞤩'), + (0x1E908, 'M', '𞤪'), + (0x1E909, 'M', '𞤫'), + (0x1E90A, 'M', '𞤬'), + (0x1E90B, 'M', '𞤭'), + (0x1E90C, 'M', '𞤮'), + (0x1E90D, 'M', '𞤯'), + ] + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E90E, 'M', '𞤰'), + (0x1E90F, 'M', '𞤱'), + (0x1E910, 'M', '𞤲'), + (0x1E911, 'M', '𞤳'), + (0x1E912, 'M', '𞤴'), + (0x1E913, 'M', '𞤵'), + (0x1E914, 'M', '𞤶'), + (0x1E915, 'M', '𞤷'), + (0x1E916, 'M', '𞤸'), + (0x1E917, 'M', '𞤹'), + (0x1E918, 'M', '𞤺'), + (0x1E919, 'M', '𞤻'), + (0x1E91A, 'M', '𞤼'), + (0x1E91B, 'M', '𞤽'), + (0x1E91C, 'M', '𞤾'), + (0x1E91D, 'M', '𞤿'), + (0x1E91E, 'M', '𞥀'), + (0x1E91F, 'M', '𞥁'), + (0x1E920, 'M', '𞥂'), + (0x1E921, 'M', '𞥃'), + (0x1E922, 'V'), + (0x1E94C, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), + (0x1EC71, 'V'), + (0x1ECB5, 'X'), + (0x1ED01, 'V'), + (0x1ED3E, 'X'), + (0x1EE00, 'M', 'ا'), + (0x1EE01, 'M', 'ب'), + (0x1EE02, 'M', 'ج'), + (0x1EE03, 'M', 'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', 'و'), + (0x1EE06, 'M', 'ز'), + (0x1EE07, 'M', 'ح'), + (0x1EE08, 'M', 'ط'), + (0x1EE09, 'M', 'ي'), + (0x1EE0A, 'M', 'ك'), + (0x1EE0B, 'M', 'ل'), + (0x1EE0C, 'M', 'م'), + (0x1EE0D, 'M', 'ن'), + (0x1EE0E, 'M', 'س'), + (0x1EE0F, 'M', 'ع'), + (0x1EE10, 'M', 'ف'), + (0x1EE11, 'M', 'ص'), + (0x1EE12, 'M', 'ق'), + (0x1EE13, 'M', 'ر'), + (0x1EE14, 'M', 'ش'), + (0x1EE15, 'M', 'ت'), + (0x1EE16, 'M', 'ث'), + (0x1EE17, 'M', 'خ'), + (0x1EE18, 'M', 'ذ'), + (0x1EE19, 'M', 'ض'), + (0x1EE1A, 'M', 'ظ'), + (0x1EE1B, 'M', 'غ'), + (0x1EE1C, 'M', 'ٮ'), + (0x1EE1D, 'M', 'ں'), + (0x1EE1E, 'M', 'ڡ'), + (0x1EE1F, 'M', 'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', 'ب'), + (0x1EE22, 'M', 'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', 'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', 'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', 'ي'), + (0x1EE2A, 'M', 'ك'), + (0x1EE2B, 'M', 'ل'), + (0x1EE2C, 'M', 'م'), + (0x1EE2D, 'M', 'ن'), + (0x1EE2E, 'M', 'س'), + (0x1EE2F, 'M', 'ع'), + (0x1EE30, 'M', 'ف'), + (0x1EE31, 'M', 'ص'), + (0x1EE32, 'M', 'ق'), + (0x1EE33, 'X'), + (0x1EE34, 'M', 'ش'), + (0x1EE35, 'M', 'ت'), + (0x1EE36, 'M', 'ث'), + (0x1EE37, 'M', 'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', 'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', 'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', 'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', 'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', 'ي'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', 'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', 'ن'), + (0x1EE4E, 'M', 'س'), + ] + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE4F, 'M', 'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', 'ص'), + (0x1EE52, 'M', 'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', 'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', 'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', 'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', 'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', 'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', 'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', 'ب'), + (0x1EE62, 'M', 'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', 'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', 'ح'), + (0x1EE68, 'M', 'ط'), + (0x1EE69, 'M', 'ي'), + (0x1EE6A, 'M', 'ك'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', 'م'), + (0x1EE6D, 'M', 'ن'), + (0x1EE6E, 'M', 'س'), + (0x1EE6F, 'M', 'ع'), + (0x1EE70, 'M', 'ف'), + (0x1EE71, 'M', 'ص'), + (0x1EE72, 'M', 'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', 'ش'), + (0x1EE75, 'M', 'ت'), + (0x1EE76, 'M', 'ث'), + (0x1EE77, 'M', 'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', 'ض'), + (0x1EE7A, 'M', 'ظ'), + (0x1EE7B, 'M', 'غ'), + (0x1EE7C, 'M', 'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', 'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', 'ا'), + (0x1EE81, 'M', 'ب'), + (0x1EE82, 'M', 'ج'), + (0x1EE83, 'M', 'د'), + (0x1EE84, 'M', 'ه'), + (0x1EE85, 'M', 'و'), + (0x1EE86, 'M', 'ز'), + (0x1EE87, 'M', 'ح'), + (0x1EE88, 'M', 'ط'), + (0x1EE89, 'M', 'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', 'ل'), + (0x1EE8C, 'M', 'م'), + (0x1EE8D, 'M', 'ن'), + (0x1EE8E, 'M', 'س'), + (0x1EE8F, 'M', 'ع'), + (0x1EE90, 'M', 'ف'), + (0x1EE91, 'M', 'ص'), + (0x1EE92, 'M', 'ق'), + (0x1EE93, 'M', 'ر'), + (0x1EE94, 'M', 'ش'), + (0x1EE95, 'M', 'ت'), + (0x1EE96, 'M', 'ث'), + (0x1EE97, 'M', 'خ'), + (0x1EE98, 'M', 'ذ'), + (0x1EE99, 'M', 'ض'), + (0x1EE9A, 'M', 'ظ'), + (0x1EE9B, 'M', 'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', 'ب'), + (0x1EEA2, 'M', 'ج'), + (0x1EEA3, 'M', 'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', 'و'), + (0x1EEA6, 'M', 'ز'), + (0x1EEA7, 'M', 'ح'), + (0x1EEA8, 'M', 'ط'), + (0x1EEA9, 'M', 'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', 'ل'), + (0x1EEAC, 'M', 'م'), + (0x1EEAD, 'M', 'ن'), + (0x1EEAE, 'M', 'س'), + (0x1EEAF, 'M', 'ع'), + (0x1EEB0, 'M', 'ف'), + (0x1EEB1, 'M', 'ص'), + (0x1EEB2, 'M', 'ق'), + (0x1EEB3, 'M', 'ر'), + (0x1EEB4, 'M', 'ش'), + (0x1EEB5, 'M', 'ت'), + (0x1EEB6, 'M', 'ث'), + (0x1EEB7, 'M', 'خ'), + (0x1EEB8, 'M', 'ذ'), + ] + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EEB9, 'M', 'ض'), + (0x1EEBA, 'M', 'ظ'), + (0x1EEBB, 'M', 'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0C0, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0F6, 'X'), + (0x1F101, '3', '0,'), + (0x1F102, '3', '1,'), + (0x1F103, '3', '2,'), + (0x1F104, '3', '3,'), + (0x1F105, '3', '4,'), + (0x1F106, '3', '5,'), + (0x1F107, '3', '6,'), + (0x1F108, '3', '7,'), + (0x1F109, '3', '8,'), + (0x1F10A, '3', '9,'), + (0x1F10B, 'V'), + (0x1F110, '3', '(a)'), + (0x1F111, '3', '(b)'), + (0x1F112, '3', '(c)'), + (0x1F113, '3', '(d)'), + (0x1F114, '3', '(e)'), + (0x1F115, '3', '(f)'), + (0x1F116, '3', '(g)'), + (0x1F117, '3', '(h)'), + (0x1F118, '3', '(i)'), + (0x1F119, '3', '(j)'), + (0x1F11A, '3', '(k)'), + (0x1F11B, '3', '(l)'), + (0x1F11C, '3', '(m)'), + (0x1F11D, '3', '(n)'), + (0x1F11E, '3', '(o)'), + (0x1F11F, '3', '(p)'), + (0x1F120, '3', '(q)'), + (0x1F121, '3', '(r)'), + (0x1F122, '3', '(s)'), + (0x1F123, '3', '(t)'), + (0x1F124, '3', '(u)'), + (0x1F125, '3', '(v)'), + (0x1F126, '3', '(w)'), + (0x1F127, '3', '(x)'), + (0x1F128, '3', '(y)'), + (0x1F129, '3', '(z)'), + (0x1F12A, 'M', '〔s〕'), + (0x1F12B, 'M', 'c'), + (0x1F12C, 'M', 'r'), + (0x1F12D, 'M', 'cd'), + (0x1F12E, 'M', 'wz'), + (0x1F12F, 'V'), + (0x1F130, 'M', 'a'), + (0x1F131, 'M', 'b'), + (0x1F132, 'M', 'c'), + (0x1F133, 'M', 'd'), + (0x1F134, 'M', 'e'), + (0x1F135, 'M', 'f'), + (0x1F136, 'M', 'g'), + (0x1F137, 'M', 'h'), + (0x1F138, 'M', 'i'), + (0x1F139, 'M', 'j'), + (0x1F13A, 'M', 'k'), + (0x1F13B, 'M', 'l'), + (0x1F13C, 'M', 'm'), + (0x1F13D, 'M', 'n'), + (0x1F13E, 'M', 'o'), + (0x1F13F, 'M', 'p'), + (0x1F140, 'M', 'q'), + (0x1F141, 'M', 'r'), + (0x1F142, 'M', 's'), + (0x1F143, 'M', 't'), + (0x1F144, 'M', 'u'), + (0x1F145, 'M', 'v'), + (0x1F146, 'M', 'w'), + (0x1F147, 'M', 'x'), + (0x1F148, 'M', 'y'), + (0x1F149, 'M', 'z'), + (0x1F14A, 'M', 'hv'), + (0x1F14B, 'M', 'mv'), + (0x1F14C, 'M', 'sd'), + (0x1F14D, 'M', 'ss'), + (0x1F14E, 'M', 'ppv'), + (0x1F14F, 'M', 'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', 'mc'), + (0x1F16B, 'M', 'md'), + (0x1F16C, 'M', 'mr'), + (0x1F16D, 'V'), + (0x1F190, 'M', 'dj'), + (0x1F191, 'V'), + ] + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F1AE, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', 'ほか'), + (0x1F201, 'M', 'ココ'), + (0x1F202, 'M', 'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', '手'), + (0x1F211, 'M', '字'), + (0x1F212, 'M', '双'), + (0x1F213, 'M', 'デ'), + (0x1F214, 'M', '二'), + (0x1F215, 'M', '多'), + (0x1F216, 'M', '解'), + (0x1F217, 'M', '天'), + (0x1F218, 'M', '交'), + (0x1F219, 'M', '映'), + (0x1F21A, 'M', '無'), + (0x1F21B, 'M', '料'), + (0x1F21C, 'M', '前'), + (0x1F21D, 'M', '後'), + (0x1F21E, 'M', '再'), + (0x1F21F, 'M', '新'), + (0x1F220, 'M', '初'), + (0x1F221, 'M', '終'), + (0x1F222, 'M', '生'), + (0x1F223, 'M', '販'), + (0x1F224, 'M', '声'), + (0x1F225, 'M', '吹'), + (0x1F226, 'M', '演'), + (0x1F227, 'M', '投'), + (0x1F228, 'M', '捕'), + (0x1F229, 'M', '一'), + (0x1F22A, 'M', '三'), + (0x1F22B, 'M', '遊'), + (0x1F22C, 'M', '左'), + (0x1F22D, 'M', '中'), + (0x1F22E, 'M', '右'), + (0x1F22F, 'M', '指'), + (0x1F230, 'M', '走'), + (0x1F231, 'M', '打'), + (0x1F232, 'M', '禁'), + (0x1F233, 'M', '空'), + (0x1F234, 'M', '合'), + (0x1F235, 'M', '満'), + (0x1F236, 'M', '有'), + (0x1F237, 'M', '月'), + (0x1F238, 'M', '申'), + (0x1F239, 'M', '割'), + (0x1F23A, 'M', '営'), + (0x1F23B, 'M', '配'), + (0x1F23C, 'X'), + (0x1F240, 'M', '〔本〕'), + (0x1F241, 'M', '〔三〕'), + (0x1F242, 'M', '〔二〕'), + (0x1F243, 'M', '〔安〕'), + (0x1F244, 'M', '〔点〕'), + (0x1F245, 'M', '〔打〕'), + (0x1F246, 'M', '〔盗〕'), + (0x1F247, 'M', '〔勝〕'), + (0x1F248, 'M', '〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', '得'), + (0x1F251, 'M', '可'), + (0x1F252, 'X'), + (0x1F260, 'V'), + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D8, 'X'), + (0x1F6DC, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6FD, 'X'), + (0x1F700, 'V'), + (0x1F777, 'X'), + (0x1F77B, 'V'), + (0x1F7DA, 'X'), + (0x1F7E0, 'V'), + (0x1F7EC, 'X'), + (0x1F7F0, 'V'), + (0x1F7F1, 'X'), + (0x1F800, 'V'), + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), + (0x1F900, 'V'), + (0x1FA54, 'X'), + (0x1FA60, 'V'), + (0x1FA6E, 'X'), + (0x1FA70, 'V'), + (0x1FA7D, 'X'), + (0x1FA80, 'V'), + (0x1FA89, 'X'), + ] + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FA90, 'V'), + (0x1FABE, 'X'), + (0x1FABF, 'V'), + (0x1FAC6, 'X'), + (0x1FACE, 'V'), + (0x1FADC, 'X'), + (0x1FAE0, 'V'), + (0x1FAE9, 'X'), + (0x1FAF0, 'V'), + (0x1FAF9, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', '0'), + (0x1FBF1, 'M', '1'), + (0x1FBF2, 'M', '2'), + (0x1FBF3, 'M', '3'), + (0x1FBF4, 'M', '4'), + (0x1FBF5, 'M', '5'), + (0x1FBF6, 'M', '6'), + (0x1FBF7, 'M', '7'), + (0x1FBF8, 'M', '8'), + (0x1FBF9, 'M', '9'), + (0x1FBFA, 'X'), + (0x20000, 'V'), + (0x2A6E0, 'X'), + (0x2A700, 'V'), + (0x2B73A, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), + (0x2EBF0, 'V'), + (0x2EE5E, 'X'), + (0x2F800, 'M', '丽'), + (0x2F801, 'M', '丸'), + (0x2F802, 'M', '乁'), + (0x2F803, 'M', '𠄢'), + (0x2F804, 'M', '你'), + (0x2F805, 'M', '侮'), + (0x2F806, 'M', '侻'), + (0x2F807, 'M', '倂'), + (0x2F808, 'M', '偺'), + (0x2F809, 'M', '備'), + (0x2F80A, 'M', '僧'), + (0x2F80B, 'M', '像'), + (0x2F80C, 'M', '㒞'), + (0x2F80D, 'M', '𠘺'), + (0x2F80E, 'M', '免'), + (0x2F80F, 'M', '兔'), + (0x2F810, 'M', '兤'), + (0x2F811, 'M', '具'), + (0x2F812, 'M', '𠔜'), + (0x2F813, 'M', '㒹'), + (0x2F814, 'M', '內'), + (0x2F815, 'M', '再'), + (0x2F816, 'M', '𠕋'), + (0x2F817, 'M', '冗'), + (0x2F818, 'M', '冤'), + (0x2F819, 'M', '仌'), + (0x2F81A, 'M', '冬'), + (0x2F81B, 'M', '况'), + (0x2F81C, 'M', '𩇟'), + (0x2F81D, 'M', '凵'), + (0x2F81E, 'M', '刃'), + (0x2F81F, 'M', '㓟'), + (0x2F820, 'M', '刻'), + (0x2F821, 'M', '剆'), + (0x2F822, 'M', '割'), + (0x2F823, 'M', '剷'), + (0x2F824, 'M', '㔕'), + (0x2F825, 'M', '勇'), + (0x2F826, 'M', '勉'), + (0x2F827, 'M', '勤'), + (0x2F828, 'M', '勺'), + (0x2F829, 'M', '包'), + (0x2F82A, 'M', '匆'), + (0x2F82B, 'M', '北'), + (0x2F82C, 'M', '卉'), + (0x2F82D, 'M', '卑'), + (0x2F82E, 'M', '博'), + (0x2F82F, 'M', '即'), + (0x2F830, 'M', '卽'), + (0x2F831, 'M', '卿'), + (0x2F834, 'M', '𠨬'), + (0x2F835, 'M', '灰'), + (0x2F836, 'M', '及'), + (0x2F837, 'M', '叟'), + (0x2F838, 'M', '𠭣'), + (0x2F839, 'M', '叫'), + (0x2F83A, 'M', '叱'), + (0x2F83B, 'M', '吆'), + (0x2F83C, 'M', '咞'), + (0x2F83D, 'M', '吸'), + (0x2F83E, 'M', '呈'), + (0x2F83F, 'M', '周'), + (0x2F840, 'M', '咢'), + ] + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F841, 'M', '哶'), + (0x2F842, 'M', '唐'), + (0x2F843, 'M', '啓'), + (0x2F844, 'M', '啣'), + (0x2F845, 'M', '善'), + (0x2F847, 'M', '喙'), + (0x2F848, 'M', '喫'), + (0x2F849, 'M', '喳'), + (0x2F84A, 'M', '嗂'), + (0x2F84B, 'M', '圖'), + (0x2F84C, 'M', '嘆'), + (0x2F84D, 'M', '圗'), + (0x2F84E, 'M', '噑'), + (0x2F84F, 'M', '噴'), + (0x2F850, 'M', '切'), + (0x2F851, 'M', '壮'), + (0x2F852, 'M', '城'), + (0x2F853, 'M', '埴'), + (0x2F854, 'M', '堍'), + (0x2F855, 'M', '型'), + (0x2F856, 'M', '堲'), + (0x2F857, 'M', '報'), + (0x2F858, 'M', '墬'), + (0x2F859, 'M', '𡓤'), + (0x2F85A, 'M', '売'), + (0x2F85B, 'M', '壷'), + (0x2F85C, 'M', '夆'), + (0x2F85D, 'M', '多'), + (0x2F85E, 'M', '夢'), + (0x2F85F, 'M', '奢'), + (0x2F860, 'M', '𡚨'), + (0x2F861, 'M', '𡛪'), + (0x2F862, 'M', '姬'), + (0x2F863, 'M', '娛'), + (0x2F864, 'M', '娧'), + (0x2F865, 'M', '姘'), + (0x2F866, 'M', '婦'), + (0x2F867, 'M', '㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', '嬈'), + (0x2F86A, 'M', '嬾'), + (0x2F86C, 'M', '𡧈'), + (0x2F86D, 'M', '寃'), + (0x2F86E, 'M', '寘'), + (0x2F86F, 'M', '寧'), + (0x2F870, 'M', '寳'), + (0x2F871, 'M', '𡬘'), + (0x2F872, 'M', '寿'), + (0x2F873, 'M', '将'), + (0x2F874, 'X'), + (0x2F875, 'M', '尢'), + (0x2F876, 'M', '㞁'), + (0x2F877, 'M', '屠'), + (0x2F878, 'M', '屮'), + (0x2F879, 'M', '峀'), + (0x2F87A, 'M', '岍'), + (0x2F87B, 'M', '𡷤'), + (0x2F87C, 'M', '嵃'), + (0x2F87D, 'M', '𡷦'), + (0x2F87E, 'M', '嵮'), + (0x2F87F, 'M', '嵫'), + (0x2F880, 'M', '嵼'), + (0x2F881, 'M', '巡'), + (0x2F882, 'M', '巢'), + (0x2F883, 'M', '㠯'), + (0x2F884, 'M', '巽'), + (0x2F885, 'M', '帨'), + (0x2F886, 'M', '帽'), + (0x2F887, 'M', '幩'), + (0x2F888, 'M', '㡢'), + (0x2F889, 'M', '𢆃'), + (0x2F88A, 'M', '㡼'), + (0x2F88B, 'M', '庰'), + (0x2F88C, 'M', '庳'), + (0x2F88D, 'M', '庶'), + (0x2F88E, 'M', '廊'), + (0x2F88F, 'M', '𪎒'), + (0x2F890, 'M', '廾'), + (0x2F891, 'M', '𢌱'), + (0x2F893, 'M', '舁'), + (0x2F894, 'M', '弢'), + (0x2F896, 'M', '㣇'), + (0x2F897, 'M', '𣊸'), + (0x2F898, 'M', '𦇚'), + (0x2F899, 'M', '形'), + (0x2F89A, 'M', '彫'), + (0x2F89B, 'M', '㣣'), + (0x2F89C, 'M', '徚'), + (0x2F89D, 'M', '忍'), + (0x2F89E, 'M', '志'), + (0x2F89F, 'M', '忹'), + (0x2F8A0, 'M', '悁'), + (0x2F8A1, 'M', '㤺'), + (0x2F8A2, 'M', '㤜'), + (0x2F8A3, 'M', '悔'), + (0x2F8A4, 'M', '𢛔'), + (0x2F8A5, 'M', '惇'), + (0x2F8A6, 'M', '慈'), + (0x2F8A7, 'M', '慌'), + (0x2F8A8, 'M', '慎'), + ] + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F8A9, 'M', '慌'), + (0x2F8AA, 'M', '慺'), + (0x2F8AB, 'M', '憎'), + (0x2F8AC, 'M', '憲'), + (0x2F8AD, 'M', '憤'), + (0x2F8AE, 'M', '憯'), + (0x2F8AF, 'M', '懞'), + (0x2F8B0, 'M', '懲'), + (0x2F8B1, 'M', '懶'), + (0x2F8B2, 'M', '成'), + (0x2F8B3, 'M', '戛'), + (0x2F8B4, 'M', '扝'), + (0x2F8B5, 'M', '抱'), + (0x2F8B6, 'M', '拔'), + (0x2F8B7, 'M', '捐'), + (0x2F8B8, 'M', '𢬌'), + (0x2F8B9, 'M', '挽'), + (0x2F8BA, 'M', '拼'), + (0x2F8BB, 'M', '捨'), + (0x2F8BC, 'M', '掃'), + (0x2F8BD, 'M', '揤'), + (0x2F8BE, 'M', '𢯱'), + (0x2F8BF, 'M', '搢'), + (0x2F8C0, 'M', '揅'), + (0x2F8C1, 'M', '掩'), + (0x2F8C2, 'M', '㨮'), + (0x2F8C3, 'M', '摩'), + (0x2F8C4, 'M', '摾'), + (0x2F8C5, 'M', '撝'), + (0x2F8C6, 'M', '摷'), + (0x2F8C7, 'M', '㩬'), + (0x2F8C8, 'M', '敏'), + (0x2F8C9, 'M', '敬'), + (0x2F8CA, 'M', '𣀊'), + (0x2F8CB, 'M', '旣'), + (0x2F8CC, 'M', '書'), + (0x2F8CD, 'M', '晉'), + (0x2F8CE, 'M', '㬙'), + (0x2F8CF, 'M', '暑'), + (0x2F8D0, 'M', '㬈'), + (0x2F8D1, 'M', '㫤'), + (0x2F8D2, 'M', '冒'), + (0x2F8D3, 'M', '冕'), + (0x2F8D4, 'M', '最'), + (0x2F8D5, 'M', '暜'), + (0x2F8D6, 'M', '肭'), + (0x2F8D7, 'M', '䏙'), + (0x2F8D8, 'M', '朗'), + (0x2F8D9, 'M', '望'), + (0x2F8DA, 'M', '朡'), + (0x2F8DB, 'M', '杞'), + (0x2F8DC, 'M', '杓'), + (0x2F8DD, 'M', '𣏃'), + (0x2F8DE, 'M', '㭉'), + (0x2F8DF, 'M', '柺'), + (0x2F8E0, 'M', '枅'), + (0x2F8E1, 'M', '桒'), + (0x2F8E2, 'M', '梅'), + (0x2F8E3, 'M', '𣑭'), + (0x2F8E4, 'M', '梎'), + (0x2F8E5, 'M', '栟'), + (0x2F8E6, 'M', '椔'), + (0x2F8E7, 'M', '㮝'), + (0x2F8E8, 'M', '楂'), + (0x2F8E9, 'M', '榣'), + (0x2F8EA, 'M', '槪'), + (0x2F8EB, 'M', '檨'), + (0x2F8EC, 'M', '𣚣'), + (0x2F8ED, 'M', '櫛'), + (0x2F8EE, 'M', '㰘'), + (0x2F8EF, 'M', '次'), + (0x2F8F0, 'M', '𣢧'), + (0x2F8F1, 'M', '歔'), + (0x2F8F2, 'M', '㱎'), + (0x2F8F3, 'M', '歲'), + (0x2F8F4, 'M', '殟'), + (0x2F8F5, 'M', '殺'), + (0x2F8F6, 'M', '殻'), + (0x2F8F7, 'M', '𣪍'), + (0x2F8F8, 'M', '𡴋'), + (0x2F8F9, 'M', '𣫺'), + (0x2F8FA, 'M', '汎'), + (0x2F8FB, 'M', '𣲼'), + (0x2F8FC, 'M', '沿'), + (0x2F8FD, 'M', '泍'), + (0x2F8FE, 'M', '汧'), + (0x2F8FF, 'M', '洖'), + (0x2F900, 'M', '派'), + (0x2F901, 'M', '海'), + (0x2F902, 'M', '流'), + (0x2F903, 'M', '浩'), + (0x2F904, 'M', '浸'), + (0x2F905, 'M', '涅'), + (0x2F906, 'M', '𣴞'), + (0x2F907, 'M', '洴'), + (0x2F908, 'M', '港'), + (0x2F909, 'M', '湮'), + (0x2F90A, 'M', '㴳'), + (0x2F90B, 'M', '滋'), + (0x2F90C, 'M', '滇'), + ] + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F90D, 'M', '𣻑'), + (0x2F90E, 'M', '淹'), + (0x2F90F, 'M', '潮'), + (0x2F910, 'M', '𣽞'), + (0x2F911, 'M', '𣾎'), + (0x2F912, 'M', '濆'), + (0x2F913, 'M', '瀹'), + (0x2F914, 'M', '瀞'), + (0x2F915, 'M', '瀛'), + (0x2F916, 'M', '㶖'), + (0x2F917, 'M', '灊'), + (0x2F918, 'M', '災'), + (0x2F919, 'M', '灷'), + (0x2F91A, 'M', '炭'), + (0x2F91B, 'M', '𠔥'), + (0x2F91C, 'M', '煅'), + (0x2F91D, 'M', '𤉣'), + (0x2F91E, 'M', '熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', '爨'), + (0x2F921, 'M', '爵'), + (0x2F922, 'M', '牐'), + (0x2F923, 'M', '𤘈'), + (0x2F924, 'M', '犀'), + (0x2F925, 'M', '犕'), + (0x2F926, 'M', '𤜵'), + (0x2F927, 'M', '𤠔'), + (0x2F928, 'M', '獺'), + (0x2F929, 'M', '王'), + (0x2F92A, 'M', '㺬'), + (0x2F92B, 'M', '玥'), + (0x2F92C, 'M', '㺸'), + (0x2F92E, 'M', '瑇'), + (0x2F92F, 'M', '瑜'), + (0x2F930, 'M', '瑱'), + (0x2F931, 'M', '璅'), + (0x2F932, 'M', '瓊'), + (0x2F933, 'M', '㼛'), + (0x2F934, 'M', '甤'), + (0x2F935, 'M', '𤰶'), + (0x2F936, 'M', '甾'), + (0x2F937, 'M', '𤲒'), + (0x2F938, 'M', '異'), + (0x2F939, 'M', '𢆟'), + (0x2F93A, 'M', '瘐'), + (0x2F93B, 'M', '𤾡'), + (0x2F93C, 'M', '𤾸'), + (0x2F93D, 'M', '𥁄'), + (0x2F93E, 'M', '㿼'), + (0x2F93F, 'M', '䀈'), + (0x2F940, 'M', '直'), + (0x2F941, 'M', '𥃳'), + (0x2F942, 'M', '𥃲'), + (0x2F943, 'M', '𥄙'), + (0x2F944, 'M', '𥄳'), + (0x2F945, 'M', '眞'), + (0x2F946, 'M', '真'), + (0x2F948, 'M', '睊'), + (0x2F949, 'M', '䀹'), + (0x2F94A, 'M', '瞋'), + (0x2F94B, 'M', '䁆'), + (0x2F94C, 'M', '䂖'), + (0x2F94D, 'M', '𥐝'), + (0x2F94E, 'M', '硎'), + (0x2F94F, 'M', '碌'), + (0x2F950, 'M', '磌'), + (0x2F951, 'M', '䃣'), + (0x2F952, 'M', '𥘦'), + (0x2F953, 'M', '祖'), + (0x2F954, 'M', '𥚚'), + (0x2F955, 'M', '𥛅'), + (0x2F956, 'M', '福'), + (0x2F957, 'M', '秫'), + (0x2F958, 'M', '䄯'), + (0x2F959, 'M', '穀'), + (0x2F95A, 'M', '穊'), + (0x2F95B, 'M', '穏'), + (0x2F95C, 'M', '𥥼'), + (0x2F95D, 'M', '𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', '䈂'), + (0x2F961, 'M', '𥮫'), + (0x2F962, 'M', '篆'), + (0x2F963, 'M', '築'), + (0x2F964, 'M', '䈧'), + (0x2F965, 'M', '𥲀'), + (0x2F966, 'M', '糒'), + (0x2F967, 'M', '䊠'), + (0x2F968, 'M', '糨'), + (0x2F969, 'M', '糣'), + (0x2F96A, 'M', '紀'), + (0x2F96B, 'M', '𥾆'), + (0x2F96C, 'M', '絣'), + (0x2F96D, 'M', '䌁'), + (0x2F96E, 'M', '緇'), + (0x2F96F, 'M', '縂'), + (0x2F970, 'M', '繅'), + (0x2F971, 'M', '䌴'), + (0x2F972, 'M', '𦈨'), + (0x2F973, 'M', '𦉇'), + ] + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F974, 'M', '䍙'), + (0x2F975, 'M', '𦋙'), + (0x2F976, 'M', '罺'), + (0x2F977, 'M', '𦌾'), + (0x2F978, 'M', '羕'), + (0x2F979, 'M', '翺'), + (0x2F97A, 'M', '者'), + (0x2F97B, 'M', '𦓚'), + (0x2F97C, 'M', '𦔣'), + (0x2F97D, 'M', '聠'), + (0x2F97E, 'M', '𦖨'), + (0x2F97F, 'M', '聰'), + (0x2F980, 'M', '𣍟'), + (0x2F981, 'M', '䏕'), + (0x2F982, 'M', '育'), + (0x2F983, 'M', '脃'), + (0x2F984, 'M', '䐋'), + (0x2F985, 'M', '脾'), + (0x2F986, 'M', '媵'), + (0x2F987, 'M', '𦞧'), + (0x2F988, 'M', '𦞵'), + (0x2F989, 'M', '𣎓'), + (0x2F98A, 'M', '𣎜'), + (0x2F98B, 'M', '舁'), + (0x2F98C, 'M', '舄'), + (0x2F98D, 'M', '辞'), + (0x2F98E, 'M', '䑫'), + (0x2F98F, 'M', '芑'), + (0x2F990, 'M', '芋'), + (0x2F991, 'M', '芝'), + (0x2F992, 'M', '劳'), + (0x2F993, 'M', '花'), + (0x2F994, 'M', '芳'), + (0x2F995, 'M', '芽'), + (0x2F996, 'M', '苦'), + (0x2F997, 'M', '𦬼'), + (0x2F998, 'M', '若'), + (0x2F999, 'M', '茝'), + (0x2F99A, 'M', '荣'), + (0x2F99B, 'M', '莭'), + (0x2F99C, 'M', '茣'), + (0x2F99D, 'M', '莽'), + (0x2F99E, 'M', '菧'), + (0x2F99F, 'M', '著'), + (0x2F9A0, 'M', '荓'), + (0x2F9A1, 'M', '菊'), + (0x2F9A2, 'M', '菌'), + (0x2F9A3, 'M', '菜'), + (0x2F9A4, 'M', '𦰶'), + (0x2F9A5, 'M', '𦵫'), + (0x2F9A6, 'M', '𦳕'), + (0x2F9A7, 'M', '䔫'), + (0x2F9A8, 'M', '蓱'), + (0x2F9A9, 'M', '蓳'), + (0x2F9AA, 'M', '蔖'), + (0x2F9AB, 'M', '𧏊'), + (0x2F9AC, 'M', '蕤'), + (0x2F9AD, 'M', '𦼬'), + (0x2F9AE, 'M', '䕝'), + (0x2F9AF, 'M', '䕡'), + (0x2F9B0, 'M', '𦾱'), + (0x2F9B1, 'M', '𧃒'), + (0x2F9B2, 'M', '䕫'), + (0x2F9B3, 'M', '虐'), + (0x2F9B4, 'M', '虜'), + (0x2F9B5, 'M', '虧'), + (0x2F9B6, 'M', '虩'), + (0x2F9B7, 'M', '蚩'), + (0x2F9B8, 'M', '蚈'), + (0x2F9B9, 'M', '蜎'), + (0x2F9BA, 'M', '蛢'), + (0x2F9BB, 'M', '蝹'), + (0x2F9BC, 'M', '蜨'), + (0x2F9BD, 'M', '蝫'), + (0x2F9BE, 'M', '螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', '蟡'), + (0x2F9C1, 'M', '蠁'), + (0x2F9C2, 'M', '䗹'), + (0x2F9C3, 'M', '衠'), + (0x2F9C4, 'M', '衣'), + (0x2F9C5, 'M', '𧙧'), + (0x2F9C6, 'M', '裗'), + (0x2F9C7, 'M', '裞'), + (0x2F9C8, 'M', '䘵'), + (0x2F9C9, 'M', '裺'), + (0x2F9CA, 'M', '㒻'), + (0x2F9CB, 'M', '𧢮'), + (0x2F9CC, 'M', '𧥦'), + (0x2F9CD, 'M', '䚾'), + (0x2F9CE, 'M', '䛇'), + (0x2F9CF, 'M', '誠'), + (0x2F9D0, 'M', '諭'), + (0x2F9D1, 'M', '變'), + (0x2F9D2, 'M', '豕'), + (0x2F9D3, 'M', '𧲨'), + (0x2F9D4, 'M', '貫'), + (0x2F9D5, 'M', '賁'), + (0x2F9D6, 'M', '贛'), + (0x2F9D7, 'M', '起'), + ] + +def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F9D8, 'M', '𧼯'), + (0x2F9D9, 'M', '𠠄'), + (0x2F9DA, 'M', '跋'), + (0x2F9DB, 'M', '趼'), + (0x2F9DC, 'M', '跰'), + (0x2F9DD, 'M', '𠣞'), + (0x2F9DE, 'M', '軔'), + (0x2F9DF, 'M', '輸'), + (0x2F9E0, 'M', '𨗒'), + (0x2F9E1, 'M', '𨗭'), + (0x2F9E2, 'M', '邔'), + (0x2F9E3, 'M', '郱'), + (0x2F9E4, 'M', '鄑'), + (0x2F9E5, 'M', '𨜮'), + (0x2F9E6, 'M', '鄛'), + (0x2F9E7, 'M', '鈸'), + (0x2F9E8, 'M', '鋗'), + (0x2F9E9, 'M', '鋘'), + (0x2F9EA, 'M', '鉼'), + (0x2F9EB, 'M', '鏹'), + (0x2F9EC, 'M', '鐕'), + (0x2F9ED, 'M', '𨯺'), + (0x2F9EE, 'M', '開'), + (0x2F9EF, 'M', '䦕'), + (0x2F9F0, 'M', '閷'), + (0x2F9F1, 'M', '𨵷'), + (0x2F9F2, 'M', '䧦'), + (0x2F9F3, 'M', '雃'), + (0x2F9F4, 'M', '嶲'), + (0x2F9F5, 'M', '霣'), + (0x2F9F6, 'M', '𩅅'), + (0x2F9F7, 'M', '𩈚'), + (0x2F9F8, 'M', '䩮'), + (0x2F9F9, 'M', '䩶'), + (0x2F9FA, 'M', '韠'), + (0x2F9FB, 'M', '𩐊'), + (0x2F9FC, 'M', '䪲'), + (0x2F9FD, 'M', '𩒖'), + (0x2F9FE, 'M', '頋'), + (0x2FA00, 'M', '頩'), + (0x2FA01, 'M', '𩖶'), + (0x2FA02, 'M', '飢'), + (0x2FA03, 'M', '䬳'), + (0x2FA04, 'M', '餩'), + (0x2FA05, 'M', '馧'), + (0x2FA06, 'M', '駂'), + (0x2FA07, 'M', '駾'), + (0x2FA08, 'M', '䯎'), + (0x2FA09, 'M', '𩬰'), + (0x2FA0A, 'M', '鬒'), + (0x2FA0B, 'M', '鱀'), + (0x2FA0C, 'M', '鳽'), + (0x2FA0D, 'M', '䳎'), + (0x2FA0E, 'M', '䳭'), + (0x2FA0F, 'M', '鵧'), + (0x2FA10, 'M', '𪃎'), + (0x2FA11, 'M', '䳸'), + (0x2FA12, 'M', '𪄅'), + (0x2FA13, 'M', '𪈎'), + (0x2FA14, 'M', '𪊑'), + (0x2FA15, 'M', '麻'), + (0x2FA16, 'M', '䵖'), + (0x2FA17, 'M', '黹'), + (0x2FA18, 'M', '黾'), + (0x2FA19, 'M', '鼅'), + (0x2FA1A, 'M', '鼏'), + (0x2FA1B, 'M', '鼖'), + (0x2FA1C, 'M', '鼻'), + (0x2FA1D, 'M', '𪘀'), + (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), + (0x31350, 'V'), + (0x323B0, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() + + _seg_80() + + _seg_81() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/LICENSE b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/LICENSE new file mode 100644 index 0000000..a396411 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 InfluxData, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/METADATA b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/METADATA new file mode 100644 index 0000000..7f168ca --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/METADATA @@ -0,0 +1,1592 @@ +Metadata-Version: 2.1 +Name: influxdb-client +Version: 1.46.0 +Summary: InfluxDB 2.0 Python client library +Home-page: https://github.com/influxdata/influxdb-client-python +Keywords: InfluxDB,InfluxDB Python Client +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Database +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: reactivex >=4.0.4 +Requires-Dist: certifi >=14.05.14 +Requires-Dist: python-dateutil >=2.5.3 +Requires-Dist: setuptools >=21.0.0 +Requires-Dist: urllib3 >=1.26.0 +Provides-Extra: async +Requires-Dist: aiohttp >=3.8.1 ; extra == 'async' +Requires-Dist: aiocsv >=1.2.2 ; extra == 'async' +Provides-Extra: ciso +Requires-Dist: ciso8601 >=2.1.1 ; extra == 'ciso' +Provides-Extra: extra +Requires-Dist: pandas >=1.0.0 ; extra == 'extra' +Requires-Dist: numpy ; extra == 'extra' +Provides-Extra: test +Requires-Dist: flake8 >=5.0.3 ; extra == 'test' +Requires-Dist: coverage >=4.0.3 ; extra == 'test' +Requires-Dist: nose >=1.3.7 ; extra == 'test' +Requires-Dist: pluggy >=0.3.1 ; extra == 'test' +Requires-Dist: py >=1.4.31 ; extra == 'test' +Requires-Dist: randomize >=0.13 ; extra == 'test' +Requires-Dist: pytest >=5.0.0 ; extra == 'test' +Requires-Dist: pytest-cov >=3.0.0 ; extra == 'test' +Requires-Dist: pytest-timeout >=2.1.0 ; extra == 'test' +Requires-Dist: httpretty ==1.0.5 ; extra == 'test' +Requires-Dist: psutil >=5.6.3 ; extra == 'test' +Requires-Dist: aioresponses >=0.7.3 ; extra == 'test' +Requires-Dist: sphinx ==1.8.5 ; extra == 'test' +Requires-Dist: sphinx-rtd-theme ; extra == 'test' +Requires-Dist: jinja2 >=3.1.4 ; extra == 'test' + +# influxdb-client-python + + + +[![CircleCI](https://circleci.com/gh/influxdata/influxdb-client-python.svg?style=svg)](https://circleci.com/gh/influxdata/influxdb-client-python) +[![codecov](https://codecov.io/gh/influxdata/influxdb-client-python/branch/master/graph/badge.svg)](https://codecov.io/gh/influxdata/influxdb-client-python) +[![CI status](https://img.shields.io/circleci/project/github/influxdata/influxdb-client-python/master.svg)](https://circleci.com/gh/influxdata/influxdb-client-python) +[![PyPI package](https://img.shields.io/pypi/v/influxdb-client.svg)](https://pypi.org/project/influxdb-client/) +[![Anaconda.org package](https://anaconda.org/influxdata/influxdb_client/badges/version.svg)](https://anaconda.org/influxdata/influxdb_client) +[![Supported Python versions](https://img.shields.io/pypi/pyversions/influxdb-client.svg)](https://pypi.python.org/pypi/influxdb-client) +[![Documentation status](https://readthedocs.org/projects/influxdb-client/badge/?version=stable)](https://influxdb-client.readthedocs.io/en/stable/) +[![Slack Status](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://www.influxdata.com/slack) + +This repository contains the Python client library for use with InfluxDB 2.x and Flux. InfluxDB 3.x users should instead use the lightweight [v3 client library](https://github.com/InfluxCommunity/influxdb3-python). +InfluxDB 1.x users should use the [v1 client library](https://github.com/influxdata/influxdb-python). + +For ease of migration and a consistent query and write experience, v2 users should consider using InfluxQL and the [v1 client library](https://github.com/influxdata/influxdb-python). + +The API of the **influxdb-client-python** is not the backwards-compatible with the old one - **influxdb-python**. + +## Documentation + +This section contains links to the client library documentation. + +- [Product documentation](https://docs.influxdata.com/influxdb/v2.0/tools/client-libraries/), [Getting Started](#getting-started) +- [Examples](https://github.com/influxdata/influxdb-client-python/tree/master/examples) +- [API Reference](https://influxdb-client.readthedocs.io/en/stable/api.html) +- [Changelog](https://github.com/influxdata/influxdb-client-python/blob/master/CHANGELOG.md) + +## InfluxDB 2.0 client features + +- Querying data + - using the Flux language + - into csv, raw data, [flux_table](https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/flux_table.py#L33) structure, [Pandas DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html) + - [How to query](#queries) +- Writing data using + - [Line Protocol](https://docs.influxdata.com/influxdb/latest/reference/syntax/line-protocol) + - [Data Point](https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/write/point.py#L47) + - [RxPY](https://rxpy.readthedocs.io/en/latest/) Observable + - [Pandas DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html) + - [How to write](#writes) +- [InfluxDB 2.0 API](https://github.com/influxdata/influxdb/blob/master/http/swagger.yml) client for management + - the client is generated from the [swagger](https://github.com/influxdata/influxdb/blob/master/http/swagger.yml) by using the [openapi-generator](https://github.com/OpenAPITools/openapi-generator) + - organizations & users management + - buckets management + - tasks management + - authorizations + - health check + - ... +- [InfluxDB 1.8 API compatibility](#influxdb-18-api-compatibility) +- Examples + - [Connect to InfluxDB Cloud](#connect-to-influxdb-cloud) + - [How to efficiently import large dataset](#how-to-efficiently-import-large-dataset) + - [Efficiency write data from IOT sensor](#efficiency-write-data-from-iot-sensor) + - [How to use Jupyter + Pandas + InfluxDB 2](#how-to-use-jupyter--pandas--influxdb-2) +- [Advanced Usage](#advanced-usage) + - [Gzip support](#gzip-support) + - [Proxy configuration](#proxy-configuration) + - [Nanosecond precision](#nanosecond-precision) + - [Delete data](#delete-data) + - [Handling Errors](#handling-errors) + - [Logging](#logging) + +## Installation + +InfluxDB python library uses [RxPY](https://github.com/ReactiveX/RxPY) - The Reactive Extensions for Python (RxPY). + +**Python 3.7** or later is required. + +:warning: +> It is recommended to use `ciso8601` with client for parsing dates. `ciso8601` is much faster than built-in Python datetime. Since it's written as a `C` module the best way is build it from sources: + +**Windows**: + +You have to install [Visual C++ Build Tools 2015](http://go.microsoft.com/fwlink/?LinkId=691126&fixForIE=.exe) to build `ciso8601` by `pip`. + +**conda**: + +Install from sources: `conda install -c conda-forge/label/cf202003 ciso8601`. + +### pip install + +The python package is hosted on [PyPI](https://pypi.org/project/influxdb-client/), you can install latest version directly: + +``` sh +pip install 'influxdb-client[ciso]' +``` + +Then import the package: + +``` python +import influxdb_client +``` + +If your application uses async/await in Python you can install with the `async` extra: + +``` sh +$ pip install influxdb-client[async] +``` + +For more info see [How to use Asyncio](#how-to-use-asyncio). + +### Setuptools + +Install via [Setuptools](http://pypi.python.org/pypi/setuptools). + +``` sh +python setup.py install --user +``` + +(or `sudo python setup.py install` to install the package for all users) + +## Getting Started + +Please follow the [Installation](#installation) and then run the following: + + + +``` python +from influxdb_client import InfluxDBClient, Point +from influxdb_client.client.write_api import SYNCHRONOUS + +bucket = "my-bucket" + +client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") + +write_api = client.write_api(write_options=SYNCHRONOUS) +query_api = client.query_api() + +p = Point("my_measurement").tag("location", "Prague").field("temperature", 25.3) + +write_api.write(bucket=bucket, record=p) + +## using Table structure +tables = query_api.query('from(bucket:"my-bucket") |> range(start: -10m)') + +for table in tables: + print(table) + for row in table.records: + print (row.values) + + +## using csv library +csv_result = query_api.query_csv('from(bucket:"my-bucket") |> range(start: -10m)') +val_count = 0 +for row in csv_result: + for cell in row: + val_count += 1 +``` + + + +## Client configuration + +### Via File + +A client can be configured via `*.ini` file in segment `influx2`. + +The following options are supported: + +- `url` - the url to connect to InfluxDB +- `org` - default destination organization for writes and queries +- `token` - the token to use for the authorization +- `timeout` - socket timeout in ms (default value is 10000) +- `verify_ssl` - set this to false to skip verifying SSL certificate when calling API from https server +- `ssl_ca_cert` - set this to customize the certificate file to verify the peer +- `cert_file` - path to the certificate that will be used for mTLS authentication +- `cert_key_file` - path to the file contains private key for mTLS certificate +- `cert_key_password` - string or function which returns password for decrypting the mTLS private key +- `connection_pool_maxsize` - set the number of connections to save that can be reused by urllib3 +- `auth_basic` - enable http basic authentication when talking to a InfluxDB 1.8.x without authentication but is accessed via reverse proxy with basic authentication (defaults to false) +- `profilers` - set the list of enabled [Flux profilers](https://docs.influxdata.com/influxdb/v2.0/reference/flux/stdlib/profiler/) + +``` python +self.client = InfluxDBClient.from_config_file("config.ini") +``` + +``` ini +[influx2] +url=http://localhost:8086 +org=my-org +token=my-token +timeout=6000 +verify_ssl=False +``` + +### Via Environment Properties + +A client can be configured via environment properties. + +Supported properties are: + +- `INFLUXDB_V2_URL` - the url to connect to InfluxDB +- `INFLUXDB_V2_ORG` - default destination organization for writes and queries +- `INFLUXDB_V2_TOKEN` - the token to use for the authorization +- `INFLUXDB_V2_TIMEOUT` - socket timeout in ms (default value is 10000) +- `INFLUXDB_V2_VERIFY_SSL` - set this to false to skip verifying SSL certificate when calling API from https server +- `INFLUXDB_V2_SSL_CA_CERT` - set this to customize the certificate file to verify the peer +- `INFLUXDB_V2_CERT_FILE` - path to the certificate that will be used for mTLS authentication +- `INFLUXDB_V2_CERT_KEY_FILE` - path to the file contains private key for mTLS certificate +- `INFLUXDB_V2_CERT_KEY_PASSWORD` - string or function which returns password for decrypting the mTLS private key +- `INFLUXDB_V2_CONNECTION_POOL_MAXSIZE` - set the number of connections to save that can be reused by urllib3 +- `INFLUXDB_V2_AUTH_BASIC` - enable http basic authentication when talking to a InfluxDB 1.8.x without authentication but is accessed via reverse proxy with basic authentication (defaults to false) +- `INFLUXDB_V2_PROFILERS` - set the list of enabled [Flux profilers](https://docs.influxdata.com/influxdb/v2.0/reference/flux/stdlib/profiler/) + +``` python +self.client = InfluxDBClient.from_env_properties() +``` + +### Profile query + +The [Flux Profiler package](https://docs.influxdata.com/influxdb/v2.0/reference/flux/stdlib/profiler/) provides performance profiling tools for Flux queries and operations. + +You can enable printing profiler information of the Flux query in client +library by: + +- set QueryOptions.profilers in QueryApi, +- set `INFLUXDB_V2_PROFILERS` environment variable, +- set `profilers` option in configuration file. + +When the profiler is enabled, the result of flux query contains additional tables "profiler/". In order to have consistent behaviour with enabled/disabled profiler, `FluxCSVParser` excludes "profiler/" measurements from result. + +Example how to enable profilers using API: + +``` python +q = ''' + from(bucket: stringParam) + |> range(start: -5m, stop: now()) + |> filter(fn: (r) => r._measurement == "mem") + |> filter(fn: (r) => r._field == "available" or r._field == "free" or r._field == "used") + |> aggregateWindow(every: 1m, fn: mean) + |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") +''' +p = { + "stringParam": "my-bucket", +} + +query_api = client.query_api(query_options=QueryOptions(profilers=["query", "operator"])) +csv_result = query_api.query(query=q, params=p) +``` + +Example of a profiler output: + +``` text +=============== +Profiler: query +=============== + +from(bucket: stringParam) + |> range(start: -5m, stop: now()) + |> filter(fn: (r) => r._measurement == "mem") + |> filter(fn: (r) => r._field == "available" or r._field == "free" or r._field == "used") + |> aggregateWindow(every: 1m, fn: mean) + |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") + +======================== +Profiler: profiler/query +======================== +result : _profiler +table : 0 +_measurement : profiler/query +TotalDuration : 8924700 +CompileDuration : 350900 +QueueDuration : 33800 +PlanDuration : 0 +RequeueDuration : 0 +ExecuteDuration : 8486500 +Concurrency : 0 +MaxAllocated : 2072 +TotalAllocated : 0 +flux/query-plan : + +digraph { + ReadWindowAggregateByTime11 + // every = 1m, aggregates = [mean], createEmpty = true, timeColumn = "_stop" + pivot8 + generated_yield + + ReadWindowAggregateByTime11 -> pivot8 + pivot8 -> generated_yield +} + + +influxdb/scanned-bytes: 0 +influxdb/scanned-values: 0 + +=========================== +Profiler: profiler/operator +=========================== +result : _profiler +table : 1 +_measurement : profiler/operator +Type : *universe.pivotTransformation +Label : pivot8 +Count : 3 +MinDuration : 32600 +MaxDuration : 126200 +DurationSum : 193400 +MeanDuration : 64466.666666666664 + +=========================== +Profiler: profiler/operator +=========================== +result : _profiler +table : 1 +_measurement : profiler/operator +Type : *influxdb.readWindowAggregateSource +Label : ReadWindowAggregateByTime11 +Count : 1 +MinDuration : 940500 +MaxDuration : 940500 +DurationSum : 940500 +MeanDuration : 940500.0 +``` + +You can also use callback function to get profilers output. Return value of this callback is type of FluxRecord. + +Example how to use profilers with callback: + +``` python +class ProfilersCallback(object): + def __init__(self): + self.records = [] + + def __call__(self, flux_record): + self.records.append(flux_record.values) + +callback = ProfilersCallback() + +query_api = client.query_api(query_options=QueryOptions(profilers=["query", "operator"], profiler_callback=callback)) +tables = query_api.query('from(bucket:"my-bucket") |> range(start: -10m)') + +for profiler in callback.records: + print(f'Custom processing of profiler result: {profiler}') +``` + +Example output of this callback: + +``` text +Custom processing of profiler result: {'result': '_profiler', 'table': 0, '_measurement': 'profiler/query', 'TotalDuration': 18843792, 'CompileDuration': 1078666, 'QueueDuration': 93375, 'PlanDuration': 0, 'RequeueDuration': 0, 'ExecuteDuration': 17371000, 'Concurrency': 0, 'MaxAllocated': 448, 'TotalAllocated': 0, 'RuntimeErrors': None, 'flux/query-plan': 'digraph {\r\n ReadRange2\r\n generated_yield\r\n\r\n ReadRange2 -> generated_yield\r\n}\r\n\r\n', 'influxdb/scanned-bytes': 0, 'influxdb/scanned-values': 0} +Custom processing of profiler result: {'result': '_profiler', 'table': 1, '_measurement': 'profiler/operator', 'Type': '*influxdb.readFilterSource', 'Label': 'ReadRange2', 'Count': 1, 'MinDuration': 3274084, 'MaxDuration': 3274084, 'DurationSum': 3274084, 'MeanDuration': 3274084.0} +``` + + + +## How to use + +### Writes + + + +The [WriteApi](https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/write_api.py) supports synchronous, asynchronous and batching writes into InfluxDB 2.0. The data should be passed as a [InfluxDB Line Protocol](https://docs.influxdata.com/influxdb/latest/write_protocols/line_protocol_tutorial/), [Data Point](https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/write/point.py) or Observable stream. + +:warning: + +> The `WriteApi` in batching mode (default mode) is supposed to run as a +singleton. To flush all your data you should wrap the execution using +`with client.write_api(...) as write_api:` statement or call +`write_api.close()` at the end of your script. + +*The default instance of WriteApi use batching.* + +#### The data could be written as + +1. `string` or `bytes` that is formatted as a InfluxDB's line protocol +2. [Data Point](https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/write/point.py#L16) structure +3. Dictionary style mapping with keys: `measurement`, `tags`, `fields` and `time` or custom structure +4. [NamedTuple](https://docs.python.org/3/library/collections.html#collections.namedtuple) +5. [Data Classes](https://docs.python.org/3/library/dataclasses.html) +6. [Pandas DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html) +7. List of above items +8. A `batching` type of write also supports an `Observable` that produce one of an above item + +You can find write examples at GitHub: [influxdb-client-python/examples](https://github.com/influxdata/influxdb-client-python/tree/master/examples#writes). + +#### Batching + +The batching is configurable by `write_options`: + + + +| Property | Description | Default Value | +|----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------| +| **batch_size** | the number of data point to collect in a batch | `1000` | +| **flush_interval** | the number of milliseconds before the batch is written | `1000` | +| **jitter_interval** | the number of milliseconds to increase the batch flush interval by a random amount | `0` | +| **retry_interval** | the number of milliseconds to retry first unsuccessful write. The next retry delay is computed using exponential random backoff. The retry interval is used when the InfluxDB server does not specify \"Retry-After\" header. | `5000` | +| **max_retry_time** | maximum total retry timeout in milliseconds. | `180_000` | +| **max_retries** | the number of max retries when write fails | `5` | +| **max_retry_delay** | the maximum delay between each retry attempt in milliseconds | `125_000` | +| **max_close_wait** | the maximum amount of time to wait for batches to flush when `.close()` is called | `300_000` | +| **exponential_base** | the base for the exponential retry delay, the next delay is computed using random exponential backoff as a random value within the interval `retry_interval * exponential_base^(attempts-1)` and `retry_interval * exponential_base^(attempts)`. Example for `retry_interval=5_000, exponential_base=2, max_retry_delay=125_000, total=5` Retry delays are random distributed values within the ranges of `[5_000-10_000, 10_000-20_000, 20_000-40_000, 40_000-80_000, 80_000-125_000]` | `2` | + +``` python +from datetime import datetime, timedelta, timezone + +import pandas as pd +import reactivex as rx +from reactivex import operators as ops + +from influxdb_client import InfluxDBClient, Point, WriteOptions + +with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as _client: + + with _client.write_api(write_options=WriteOptions(batch_size=500, + flush_interval=10_000, + jitter_interval=2_000, + retry_interval=5_000, + max_retries=5, + max_retry_delay=30_000, + max_close_wait=300_000, + exponential_base=2)) as _write_client: + + """ + Write Line Protocol formatted as string + """ + _write_client.write("my-bucket", "my-org", "h2o_feet,location=coyote_creek water_level=1.0 1") + _write_client.write("my-bucket", "my-org", ["h2o_feet,location=coyote_creek water_level=2.0 2", + "h2o_feet,location=coyote_creek water_level=3.0 3"]) + + """ + Write Line Protocol formatted as byte array + """ + _write_client.write("my-bucket", "my-org", "h2o_feet,location=coyote_creek water_level=1.0 1".encode()) + _write_client.write("my-bucket", "my-org", ["h2o_feet,location=coyote_creek water_level=2.0 2".encode(), + "h2o_feet,location=coyote_creek water_level=3.0 3".encode()]) + + """ + Write Dictionary-style object + """ + _write_client.write("my-bucket", "my-org", {"measurement": "h2o_feet", "tags": {"location": "coyote_creek"}, + "fields": {"water_level": 1.0}, "time": 1}) + _write_client.write("my-bucket", "my-org", [{"measurement": "h2o_feet", "tags": {"location": "coyote_creek"}, + "fields": {"water_level": 2.0}, "time": 2}, + {"measurement": "h2o_feet", "tags": {"location": "coyote_creek"}, + "fields": {"water_level": 3.0}, "time": 3}]) + + """ + Write Data Point + """ + _write_client.write("my-bucket", "my-org", + Point("h2o_feet").tag("location", "coyote_creek").field("water_level", 4.0).time(4)) + _write_client.write("my-bucket", "my-org", + [Point("h2o_feet").tag("location", "coyote_creek").field("water_level", 5.0).time(5), + Point("h2o_feet").tag("location", "coyote_creek").field("water_level", 6.0).time(6)]) + + """ + Write Observable stream + """ + _data = rx \ + .range(7, 11) \ + .pipe(ops.map(lambda i: "h2o_feet,location=coyote_creek water_level={0}.0 {0}".format(i))) + + _write_client.write("my-bucket", "my-org", _data) + + """ + Write Pandas DataFrame + """ + _now = datetime.now(tz=timezone.utc) + _data_frame = pd.DataFrame(data=[["coyote_creek", 1.0], ["coyote_creek", 2.0]], + index=[_now, _now + timedelta(hours=1)], + columns=["location", "water_level"]) + + _write_client.write("my-bucket", "my-org", record=_data_frame, data_frame_measurement_name='h2o_feet', + data_frame_tag_columns=['location']) +``` + +#### Default Tags + +Sometimes is useful to store same information in every measurement e.g. `hostname`, `location`, `customer`. The client is able to use static value or env property as a tag value. + +The expressions: + +- `California Miner` - static value +- `${env.hostname}` - environment property + +##### Via API + +``` python +point_settings = PointSettings() +point_settings.add_default_tag("id", "132-987-655") +point_settings.add_default_tag("customer", "California Miner") +point_settings.add_default_tag("data_center", "${env.data_center}") + +self.write_client = self.client.write_api(write_options=SYNCHRONOUS, point_settings=point_settings) +``` + +``` python +self.write_client = self.client.write_api(write_options=SYNCHRONOUS, + point_settings=PointSettings(**{"id": "132-987-655", + "customer": "California Miner"})) +``` + +##### Via Configuration file + +In an [init](https://docs.python.org/3/library/configparser.html) configuration file you are able to specify default tags by `tags` segment. + +``` python +self.client = InfluxDBClient.from_config_file("config.ini") +``` + +``` +[influx2] +url=http://localhost:8086 +org=my-org +token=my-token +timeout=6000 + +[tags] +id = 132-987-655 +customer = California Miner +data_center = ${env.data_center} +``` + +You can also use a [TOML](https://toml.io/en/) or a[JSON](https://www.json.org/json-en.html) format for the configuration file. + +##### Via Environment Properties + +You are able to specify default tags by environment properties with prefix `INFLUXDB_V2_TAG_`. + +Examples: + +- `INFLUXDB_V2_TAG_ID` +- `INFLUXDB_V2_TAG_HOSTNAME` + +``` python +self.client = InfluxDBClient.from_env_properties() +``` + +#### Synchronous client + +Data are writes in a synchronous HTTP request. + +``` python +from influxdb_client import InfluxDBClient, Point +from influxdb_client .client.write_api import SYNCHRONOUS + +client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") +write_api = client.write_api(write_options=SYNCHRONOUS) + +_point1 = Point("my_measurement").tag("location", "Prague").field("temperature", 25.3) +_point2 = Point("my_measurement").tag("location", "New York").field("temperature", 24.3) + +write_api.write(bucket="my-bucket", record=[_point1, _point2]) + +client.close() +``` + + +### Queries + +The result retrieved by [QueryApi](https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/query_api.py) could be formatted as a: + +1. Flux data structure: [FluxTable](https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/flux_table.py#L5), [FluxColumn](https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/flux_table.py#L22) and [FluxRecord](https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/flux_table.py#L31) +2. `influxdb_client.client.flux_table.CSVIterator` which will iterate over CSV lines +3. Raw unprocessed results as a `str` iterator +4. [Pandas DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html) + +The API also support streaming `FluxRecord` via [query_stream](https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/query_api.py#L77), see example below: + +``` python +from influxdb_client import InfluxDBClient, Point, Dialect +from influxdb_client.client.write_api import SYNCHRONOUS + +client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") + +write_api = client.write_api(write_options=SYNCHRONOUS) +query_api = client.query_api() + +""" +Prepare data +""" + +_point1 = Point("my_measurement").tag("location", "Prague").field("temperature", 25.3) +_point2 = Point("my_measurement").tag("location", "New York").field("temperature", 24.3) + +write_api.write(bucket="my-bucket", record=[_point1, _point2]) + +""" +Query: using Table structure +""" +tables = query_api.query('from(bucket:"my-bucket") |> range(start: -10m)') + +for table in tables: + print(table) + for record in table.records: + print(record.values) + +print() +print() + +""" +Query: using Bind parameters +""" + +p = {"_start": datetime.timedelta(hours=-1), + "_location": "Prague", + "_desc": True, + "_floatParam": 25.1, + "_every": datetime.timedelta(minutes=5) + } + +tables = query_api.query(''' + from(bucket:"my-bucket") |> range(start: _start) + |> filter(fn: (r) => r["_measurement"] == "my_measurement") + |> filter(fn: (r) => r["_field"] == "temperature") + |> filter(fn: (r) => r["location"] == _location and r["_value"] > _floatParam) + |> aggregateWindow(every: _every, fn: mean, createEmpty: true) + |> sort(columns: ["_time"], desc: _desc) +''', params=p) + +for table in tables: + print(table) + for record in table.records: + print(str(record["_time"]) + " - " + record["location"] + ": " + str(record["_value"])) + +print() +print() + +""" +Query: using Stream +""" +records = query_api.query_stream('from(bucket:"my-bucket") |> range(start: -10m)') + +for record in records: + print(f'Temperature in {record["location"]} is {record["_value"]}') + +""" +Interrupt a stream after retrieve a required data +""" +large_stream = query_api.query_stream('from(bucket:"my-bucket") |> range(start: -100d)') +for record in large_stream: + if record["location"] == "New York": + print(f'New York temperature: {record["_value"]}') + break + +large_stream.close() + +print() +print() + +""" +Query: using csv library +""" +csv_result = query_api.query_csv('from(bucket:"my-bucket") |> range(start: -10m)', + dialect=Dialect(header=False, delimiter=",", comment_prefix="#", annotations=[], + date_time_format="RFC3339")) +for csv_line in csv_result: + if not len(csv_line) == 0: + print(f'Temperature in {csv_line[9]} is {csv_line[6]}') + +""" +Close client +""" +client.close() +``` + +#### Pandas DataFrame + + + +:warning: + +> For DataFrame querying you should install Pandas dependency via `pip install 'influxdb-client[extra]'`. + +:warning: + +> Note that if a query returns more then one table than the client generates a `DataFrame` for each of them. + +The `client` is able to retrieve data in [Pandas DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html) format thought `query_data_frame`: + +``` python +from influxdb_client import InfluxDBClient, Point, Dialect +from influxdb_client.client.write_api import SYNCHRONOUS + +client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") + +write_api = client.write_api(write_options=SYNCHRONOUS) +query_api = client.query_api() + +""" +Prepare data +""" + +_point1 = Point("my_measurement").tag("location", "Prague").field("temperature", 25.3) +_point2 = Point("my_measurement").tag("location", "New York").field("temperature", 24.3) + +write_api.write(bucket="my-bucket", record=[_point1, _point2]) + +""" +Query: using Pandas DataFrame +""" +data_frame = query_api.query_data_frame('from(bucket:"my-bucket") ' + '|> range(start: -10m) ' + '|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") ' + '|> keep(columns: ["location", "temperature"])') +print(data_frame.to_string()) + +""" +Close client +""" +client.close() +``` + +Output: + +``` text +result table location temperature +0 _result 0 New York 24.3 +1 _result 1 Prague 25.3 +``` + + + +### Examples + + + +#### How to efficiently import large dataset + +The following example shows how to import dataset with a dozen megabytes. If you would like to import gigabytes of data then +use our multiprocessing example: [import_data_set_multiprocessing.py](https://github.com/influxdata/influxdb-client-python/blob/master/examples/import_data_set_multiprocessing.py) for use a full capability of your hardware. + +- sources - [import_data_set.py](https://github.com/influxdata/influxdb-client-python/blob/master/examples/import_data_set.py) + +``` python +""" +Import VIX - CBOE Volatility Index - from "vix-daily.csv" file into InfluxDB 2.0 + +https://datahub.io/core/finance-vix#data +""" + +from collections import OrderedDict +from csv import DictReader + +import reactivex as rx +from reactivex import operators as ops + +from influxdb_client import InfluxDBClient, Point, WriteOptions + +def parse_row(row: OrderedDict): + """Parse row of CSV file into Point with structure: + + financial-analysis,type=ily close=18.47,high=19.82,low=18.28,open=19.82 1198195200000000000 + + CSV format: + Date,VIX Open,VIX High,VIX Low,VIX Close\n + 2004-01-02,17.96,18.68,17.54,18.22\n + 2004-01-05,18.45,18.49,17.44,17.49\n + 2004-01-06,17.66,17.67,16.19,16.73\n + 2004-01-07,16.72,16.75,15.5,15.5\n + 2004-01-08,15.42,15.68,15.32,15.61\n + 2004-01-09,16.15,16.88,15.57,16.75\n + ... + + :param row: the row of CSV file + :return: Parsed csv row to [Point] + """ + + """ + For better performance is sometimes useful directly create a LineProtocol to avoid unnecessary escaping overhead: + """ + # from datetime import timezone + # import ciso8601 + # from influxdb_client.client.write.point import EPOCH + # + # time = (ciso8601.parse_datetime(row["Date"]).replace(tzinfo=timezone.utc) - EPOCH).total_seconds() * 1e9 + # return f"financial-analysis,type=vix-daily" \ + # f" close={float(row['VIX Close'])},high={float(row['VIX High'])},low={float(row['VIX Low'])},open={float(row['VIX Open'])} " \ + # f" {int(time)}" + + return Point("financial-analysis") \ + .tag("type", "vix-daily") \ + .field("open", float(row['VIX Open'])) \ + .field("high", float(row['VIX High'])) \ + .field("low", float(row['VIX Low'])) \ + .field("close", float(row['VIX Close'])) \ + .time(row['Date']) + + +""" +Converts vix-daily.csv into sequence of datad point +""" +data = rx \ + .from_iterable(DictReader(open('vix-daily.csv', 'r'))) \ + .pipe(ops.map(lambda row: parse_row(row))) + +client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=True) + +""" +Create client that writes data in batches with 50_000 items. +""" +write_api = client.write_api(write_options=WriteOptions(batch_size=50_000, flush_interval=10_000)) + +""" +Write data into InfluxDB +""" +write_api.write(bucket="my-bucket", record=data) +write_api.close() + +""" +Querying max value of CBOE Volatility Index +""" +query = 'from(bucket:"my-bucket")' \ + ' |> range(start: 0, stop: now())' \ + ' |> filter(fn: (r) => r._measurement == "financial-analysis")' \ + ' |> max()' +result = client.query_api().query(query=query) + +""" +Processing results +""" +print() +print("=== results ===") +print() +for table in result: + for record in table.records: + print('max {0:5} = {1}'.format(record.get_field(), record.get_value())) + +""" +Close client +""" +client.close() +``` + +#### Efficiency write data from IOT sensor + +- sources - [iot_sensor.py](https://github.com/influxdata/influxdb-client-python/blob/master/examples/iot_sensor.py) + +``` python +""" +Efficiency write data from IOT sensor - write changed temperature every minute +""" +import atexit +import platform +from datetime import timedelta + +import psutil as psutil +import reactivex as rx +from reactivex import operators as ops + +from influxdb_client import InfluxDBClient, WriteApi, WriteOptions + +def on_exit(db_client: InfluxDBClient, write_api: WriteApi): + """Close clients after terminate a script. + + :param db_client: InfluxDB client + :param write_api: WriteApi + :return: nothing + """ + write_api.close() + db_client.close() + + +def sensor_temperature(): + """Read a CPU temperature. The [psutil] doesn't support MacOS so we use [sysctl]. + + :return: actual CPU temperature + """ + os_name = platform.system() + if os_name == 'Darwin': + from subprocess import check_output + output = check_output(["sysctl", "machdep.xcpm.cpu_thermal_level"]) + import re + return re.findall(r'\d+', str(output))[0] + else: + return psutil.sensors_temperatures()["coretemp"][0] + + +def line_protocol(temperature): + """Create a InfluxDB line protocol with structure: + + iot_sensor,hostname=mine_sensor_12,type=temperature value=68 + + :param temperature: the sensor temperature + :return: Line protocol to write into InfluxDB + """ + + import socket + return 'iot_sensor,hostname={},type=temperature value={}'.format(socket.gethostname(), temperature) + + +""" +Read temperature every minute; distinct_until_changed - produce only if temperature change +""" +data = rx\ + .interval(period=timedelta(seconds=60))\ + .pipe(ops.map(lambda t: sensor_temperature()), + ops.distinct_until_changed(), + ops.map(lambda temperature: line_protocol(temperature))) + +_db_client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=True) + +""" +Create client that writes data into InfluxDB +""" +_write_api = _db_client.write_api(write_options=WriteOptions(batch_size=1)) +_write_api.write(bucket="my-bucket", record=data) + + +""" +Call after terminate a script +""" +atexit.register(on_exit, _db_client, _write_api) + +input() +``` + +#### Connect to InfluxDB Cloud + +The following example demonstrate the simplest way how to write and query date with the InfluxDB Cloud. + +At first point you should create an authentication token as is described [here](https://v2.docs.influxdata.com/v2.0/security/tokens/create-token/). + +After that you should configure properties: `influx_cloud_url`,`influx_cloud_token`, `bucket` and `org` in a `influx_cloud.py` example. + +The last step is run a python script via: `python3 influx_cloud.py`. + +- sources - [influx_cloud.py](https://github.com/influxdata/influxdb-client-python/blob/master/examples/influx_cloud.py) + +``` python +""" +Connect to InfluxDB 2.0 - write data and query them +""" + +from datetime import datetime, timezone + +from influxdb_client import Point, InfluxDBClient +from influxdb_client.client.write_api import SYNCHRONOUS + +""" +Configure credentials +""" +influx_cloud_url = 'https://us-west-2-1.aws.cloud2.influxdata.com' +influx_cloud_token = '...' +bucket = '...' +org = '...' + +client = InfluxDBClient(url=influx_cloud_url, token=influx_cloud_token) +try: + kind = 'temperature' + host = 'host1' + device = 'opt-123' + + """ + Write data by Point structure + """ + point = Point(kind).tag('host', host).tag('device', device).field('value', 25.3).time(time=datetime.now(tz=timezone.utc)) + + print(f'Writing to InfluxDB cloud: {point.to_line_protocol()} ...') + + write_api = client.write_api(write_options=SYNCHRONOUS) + write_api.write(bucket=bucket, org=org, record=point) + + print() + print('success') + print() + print() + + """ + Query written data + """ + query = f'from(bucket: "{bucket}") |> range(start: -1d) |> filter(fn: (r) => r._measurement == "{kind}")' + print(f'Querying from InfluxDB cloud: "{query}" ...') + print() + + query_api = client.query_api() + tables = query_api.query(query=query, org=org) + + for table in tables: + for row in table.records: + print(f'{row.values["_time"]}: host={row.values["host"]},device={row.values["device"]} ' + f'{row.values["_value"]} °C') + + print() + print('success') + +except Exception as e: + print(e) +finally: + client.close() +``` + +#### How to use Jupyter + Pandas + InfluxDB 2 + +The first example shows how to use client capabilities to predict stock price via [Keras](https://keras.io), [TensorFlow](https://www.tensorflow.org), [sklearn](https://scikit-learn.org/stable/): + +The example is taken from [Kaggle](https://www.kaggle.com/chaitanyacc4/predicting-stock-prices-of-apple-inc). + +- sources - [stock-predictions.ipynb](notebooks/stock-predictions.ipynb) + +![image](https://raw.githubusercontent.com/influxdata/influxdb-client-python/master/docs/images/stock-price-prediction.gif) + +Result: + +![image](https://raw.githubusercontent.com/influxdata/influxdb-client-python/master/docs/images/stock-price-prediction-results.png) + +The second example shows how to use client capabilities to realtime visualization via [hvPlot](https://hvplot.pyviz.org), [Streamz](https://streamz.readthedocs.io/en/latest/), [RxPY](https://rxpy.readthedocs.io/en/latest/): + +- sources - [realtime-stream.ipynb](notebooks/realtime-stream.ipynb) + +![image](https://raw.githubusercontent.com/influxdata/influxdb-client-python/master/docs/images/realtime-result.gif) + +#### Other examples + +You can find all examples at GitHub: [influxdb-client-python/examples](https://github.com/influxdata/influxdb-client-python/tree/master/examples#examples). + + + +## Advanced Usage + +### Gzip support + + + +`InfluxDBClient` does not enable gzip compression for http requests by default. If you want to enable gzip to reduce transfer data's size, you can call: + +``` python +from influxdb_client import InfluxDBClient + +_db_client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", enable_gzip=True) +``` + + +### Authenticate to the InfluxDB + + + +`InfluxDBClient` supports three options how to authorize a connection: + +- _Token_ +- _Username & Password_ +- _HTTP Basic_ + +#### Token + +Use the `token` to authenticate to the InfluxDB API. In your API requests, an _Authorization_ header will be sent. The header value, provide the word _Token_ followed by a space and an InfluxDB API token. The word _token_ is case-sensitive. + +``` python +from influxdb_client import InfluxDBClient + +with InfluxDBClient(url="http://localhost:8086", token="my-token") as client +``` + +:warning: + +> Note that this is a preferred way how to authenticate to InfluxDB API. + + +#### Username & Password + +Authenticates via username and password credentials. If successful, creates a new session for the user. + +``` python +from influxdb_client import InfluxDBClient + +with InfluxDBClient(url="http://localhost:8086", username="my-user", password="my-password") as client +``` + +:warning: + +> The `username/password` auth is based on the HTTP "Basic" authentication. The authorization expires when the [time-to-live (TTL)](https://docs.influxdata.com/influxdb/latest/reference/config-options/#session-length) (default 60 minutes) is reached and client produces `unauthorized exception`. + +#### HTTP Basic + +Use this to enable basic authentication when talking to a InfluxDB 1.8.x that does not use auth-enabled but is protected by a reverse proxy with basic authentication. + +``` python +from influxdb_client import InfluxDBClient + +with InfluxDBClient(url="http://localhost:8086", auth_basic=True, token="my-proxy-secret") as client +``` + +:warning: + +> Don't use this when directly talking to InfluxDB 2. + + + +### Proxy configuration + + + +You can configure the client to tunnel requests through an HTTP proxy. The following proxy options are supported: + +- `proxy` - Set this to configure the http proxy to be used, ex. `http://localhost:3128` +- `proxy_headers` - A dictionary containing headers that will be sent to the proxy. Could be used for proxy authentication. + +``` python +from influxdb_client import InfluxDBClient + +with InfluxDBClient(url="http://localhost:8086", + token="my-token", + org="my-org", + proxy="http://localhost:3128") as client: +``` + +If your proxy notify the client with permanent redirect (`HTTP 301`) to **different host**. The client removes `Authorization` header, because otherwise the contents of `Authorization` is sent to third parties which is a security vulnerability. + +You can change this behaviour by: + +``` python +from urllib3 import Retry +Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset() +Retry.DEFAULT.remove_headers_on_redirect = Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT +``` + + +### Delete data + + + +The [delete_api.py](influxdb_client/client/delete_api.py) supports deletes [points](https://v2.docs.influxdata.com/v2.0/reference/glossary/#point) from an InfluxDB bucket. + +``` python +from influxdb_client import InfluxDBClient + +client = InfluxDBClient(url="http://localhost:8086", token="my-token") + +delete_api = client.delete_api() + +""" +Delete Data +""" +start = "1970-01-01T00:00:00Z" +stop = "2021-02-01T00:00:00Z" +delete_api.delete(start, stop, '_measurement="my_measurement"', bucket='my-bucket', org='my-org') + +""" +Close client +""" +client.close() +``` + + +### InfluxDB 1.8 API compatibility + +[InfluxDB 1.8.0 introduced forward compatibility APIs](https://docs.influxdata.com/influxdb/v1.8/tools/api/#influxdb-2-0-api-compatibility-endpoints) for InfluxDB 2.0. This allows you to easily move from InfluxDB 1.x to InfluxDB 2.0 Cloud or open source. + +The following forward compatible APIs are available: + + | API | Endpoint | Description | + |-----------------------------------------------------|------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| + | [query_api.py](influxdb_client/client/query_api.py) | [/api/v2/query](https://docs.influxdata.com/influxdb/v1.8/tools/api/#apiv2query-http-endpoint) | Query data in InfluxDB 1.8.0+ using the InfluxDB 2.0 API and [Flux](https://docs.influxdata.com/flux/latest/) (endpoint should be enabled by [flux-enabled option](https://docs.influxdata.com/influxdb/v1.8/administration/config/#flux-enabled-false)) | + | [write_api.py](influxdb_client/client/write_api.py) | [/api/v2/write](https://docs.influxdata.com/influxdb/v1.8/tools/api/#apiv2write-http-endpoint) | Write data to InfluxDB 1.8.0+ using the InfluxDB 2.0 API | + | [ping()](influxdb_client/client/influxdb_client.py) | [/ping](https://docs.influxdata.com/influxdb/v1.8/tools/api/#ping-http-endpoint) | Check the status of your InfluxDB instance | + +For detail info see [InfluxDB 1.8 example](examples/influxdb_18_example.py). + +### Handling Errors + + + +Errors happen, and it's important that your code is prepared for them. All client related exceptions are delivered from `InfluxDBError`. +If the exception cannot be recovered in the client it is returned to the application. These exceptions are left for the developer to handle. + +Almost all APIs directly return unrecoverable exceptions to be handled this way: + +``` python +from influxdb_client import InfluxDBClient +from influxdb_client.client.exceptions import InfluxDBError +from influxdb_client.client.write_api import SYNCHRONOUS + +with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + try: + client.write_api(write_options=SYNCHRONOUS).write("my-bucket", record="mem,tag=a value=86") + except InfluxDBError as e: + if e.response.status == 401: + raise Exception(f"Insufficient write permissions to 'my-bucket'.") from e + raise +``` + +The only exception is **batching** `WriteAPI` (for more info see [Batching](#batching)) where you need to register custom callbacks to handle batch events. +This is because this API runs in the `background` in a `separate` thread and isn't possible to directly return underlying exceptions. + +``` python +from influxdb_client import InfluxDBClient +from influxdb_client.client.exceptions import InfluxDBError + + +class BatchingCallback(object): + + def success(self, conf: (str, str, str), data: str): + print(f"Written batch: {conf}, data: {data}") + + def error(self, conf: (str, str, str), data: str, exception: InfluxDBError): + print(f"Cannot write batch: {conf}, data: {data} due: {exception}") + + def retry(self, conf: (str, str, str), data: str, exception: InfluxDBError): + print(f"Retryable error occurs for batch: {conf}, data: {data} retry: {exception}") + + +with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + callback = BatchingCallback() + with client.write_api(success_callback=callback.success, + error_callback=callback.error, + retry_callback=callback.retry) as write_api: + pass +``` + +#### HTTP Retry Strategy + +By default, the client uses a retry strategy only for batching writes (for more info see [Batching](#batching)). +For other HTTP requests there is no one retry strategy, but it could be configured by `retries` parameter of `InfluxDBClient`. + +For more info about how configure HTTP retry see details in [urllib3 documentation](https://urllib3.readthedocs.io/en/latest/reference/index.html?highlight=retry#urllib3.Retry). + +``` python +from urllib3 import Retry + +from influxdb_client import InfluxDBClient + +retries = Retry(connect=5, read=2, redirect=5) +client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", retries=retries) +``` + + + +### Nanosecond precision + + + +The Python's [datetime](https://docs.python.org/3/library/datetime.html) doesn't support precision with nanoseconds so the library during writes and queries ignores everything after microseconds. + +If you would like to use `datetime` with nanosecond precision you should use [pandas.Timestamp](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.Timestamp.html#pandas.Timestamp) that is replacement for python `datetime.datetime` object, and also you should set a proper `DateTimeHelper` to the client. + +- sources - [nanosecond_precision.py](https://github.com/influxdata/influxdb-client-python/blob/master/examples/nanosecond_precision.py) + +``` python +from influxdb_client import Point, InfluxDBClient +from influxdb_client.client.util.date_utils_pandas import PandasDateTimeHelper +from influxdb_client.client.write_api import SYNCHRONOUS + +""" +Set PandasDate helper which supports nanoseconds. +""" +import influxdb_client.client.util.date_utils as date_utils + +date_utils.date_helper = PandasDateTimeHelper() + +""" +Prepare client. +""" +client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") + +write_api = client.write_api(write_options=SYNCHRONOUS) +query_api = client.query_api() + +""" +Prepare data +""" + +point = Point("h2o_feet") \ + .field("water_level", 10) \ + .tag("location", "pacific") \ + .time('1996-02-25T21:20:00.001001231Z') + +print(f'Time serialized with nanosecond precision: {point.to_line_protocol()}') +print() + +write_api.write(bucket="my-bucket", record=point) + +""" +Query: using Stream +""" +query = ''' +from(bucket:"my-bucket") + |> range(start: 0, stop: now()) + |> filter(fn: (r) => r._measurement == "h2o_feet") +''' +records = query_api.query_stream(query) + +for record in records: + print(f'Temperature in {record["location"]} is {record["_value"]} at time: {record["_time"]}') + +""" +Close client +""" +client.close() +``` + + +### How to use Asyncio + + + +Starting from version 1.27.0 for Python 3.7+ the `influxdb-client` package supports `async/await` based on [asyncio](https://docs.python.org/3/library/asyncio.html), [aiohttp](https://docs.aiohttp.org) and [aiocsv](https://pypi.org/project/aiocsv/). +You can install `aiohttp` and `aiocsv` directly: + +> ``` bash +> $ python -m pip install influxdb-client aiohttp aiocsv +> ``` + +or use the `[async]` extra: + +> ``` bash +> $ python -m pip install influxdb-client[async] +> ``` + +:warning: + +> The `InfluxDBClientAsync` should be initialised inside `async coroutine` otherwise there can be unexpected behaviour. For more info see: [Why is creating a ClientSession outside an event loop dangerous?](https://docs.aiohttp.org/en/stable/faq.html#why-is-creating-a-clientsession-outside-of-an-event-loop-dangerous). + +#### Async APIs + +All async APIs are available via `influxdb_client.client.influxdb_client_async.InfluxDBClientAsync`. The `async` version of the client supports following asynchronous APIs: + +- `influxdb_client.client.write_api_async.WriteApiAsync` +- `influxdb_client.client.query_api_async.QueryApiAsync` +- `influxdb_client.client.delete_api_async.DeleteApiAsync` +- Management services into `influxdb_client.service` supports async + operation + +and also check to readiness of the InfluxDB via `/ping` endpoint: + +> ``` python +> import asyncio +> +> from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync +> +> +> async def main(): +> async with InfluxDBClientAsync(url="http://localhost:8086", token="my-token", org="my-org") as client: +> ready = await client.ping() +> print(f"InfluxDB: {ready}") +> +> +> if __name__ == "__main__": +> asyncio.run(main()) +> ``` + +#### Async Write API + +The `influxdb_client.client.write_api_async.WriteApiAsync` supports ingesting data as: + +- `string` or `bytes` that is formatted as a InfluxDB\'s line protocol +- [Data Point](https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/write/point.py#L16) structure +- Dictionary style mapping with keys: `measurement`, `tags`, `fields` and `time` or custom structure +- [NamedTuple](https://docs.python.org/3/library/collections.html#collections.namedtuple) +- [Data Classes](https://docs.python.org/3/library/dataclasses.html) +- [Pandas DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html) +- List of above items + +> ``` python +> import asyncio +> +> from influxdb_client import Point +> from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync +> +> +> async def main(): +> async with InfluxDBClientAsync(url="http://localhost:8086", token="my-token", org="my-org") as client: +> +> write_api = client.write_api() +> +> _point1 = Point("async_m").tag("location", "Prague").field("temperature", 25.3) +> _point2 = Point("async_m").tag("location", "New York").field("temperature", 24.3) +> +> successfully = await write_api.write(bucket="my-bucket", record=[_point1, _point2]) +> +> print(f" > successfully: {successfully}") +> +> +> if __name__ == "__main__": +> asyncio.run(main()) +> ``` + +#### Async Query API + +The `influxdb_client.client.query_api_async.QueryApiAsync` supports retrieve data as: + +- List of `influxdb_client.client.flux_table.FluxTable` +- Stream of `influxdb_client.client.flux_table.FluxRecord` via `typing.AsyncGenerator` +- [Pandas DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html) +- Stream of [Pandas DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html) via `typing.AsyncGenerator` +- Raw `str` output + +> ``` python +> import asyncio +> +> from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync +> +> +> async def main(): +> async with InfluxDBClientAsync(url="http://localhost:8086", token="my-token", org="my-org") as client: +> # Stream of FluxRecords +> query_api = client.query_api() +> records = await query_api.query_stream('from(bucket:"my-bucket") ' +> '|> range(start: -10m) ' +> '|> filter(fn: (r) => r["_measurement"] == "async_m")') +> async for record in records: +> print(record) +> +> +> if __name__ == "__main__": +> asyncio.run(main()) +> ``` + +#### Async Delete API + +> ``` python +> import asyncio +> from datetime import datetime +> +> from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync +> +> +> async def main(): +> async with InfluxDBClientAsync(url="http://localhost:8086", token="my-token", org="my-org") as client: +> start = datetime.fromtimestamp(0) +> stop = datetime.now() +> # Delete data with location = 'Prague' +> successfully = await client.delete_api().delete(start=start, stop=stop, bucket="my-bucket", +> predicate="location = \"Prague\"") +> print(f" > successfully: {successfully}") +> +> +> if __name__ == "__main__": +> asyncio.run(main()) +> ``` + +#### Management API + +> ``` python +> import asyncio +> +> from influxdb_client import OrganizationsService +> from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync +> +> +> async def main(): +> async with InfluxDBClientAsync(url='http://localhost:8086', token='my-token', org='my-org') as client: +> # Initialize async OrganizationsService +> organizations_service = OrganizationsService(api_client=client.api_client) +> +> # Find organization with name 'my-org' +> organizations = await organizations_service.get_orgs(org='my-org') +> for organization in organizations.orgs: +> print(f'name: {organization.name}, id: {organization.id}') +> +> +> if __name__ == "__main__": +> asyncio.run(main()) +> ``` + +#### Proxy and redirects + +You can configure the client to tunnel requests through an HTTP proxy. +The following proxy options are supported: + +- `proxy` - Set this to configure the http proxy to be used, ex. `http://localhost:3128` +- `proxy_headers` - A dictionary containing headers that will be sent to the proxy. Could be used for proxy authentication. + +``` python +from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync + + +async with InfluxDBClientAsync(url="http://localhost:8086", + token="my-token", + org="my-org", + proxy="http://localhost:3128") as client: +``` + +If your proxy notify the client with permanent redirect (`HTTP 301`) to **different host**. +The client removes `Authorization` header, because otherwise the contents of `Authorization` is sent to third parties which is a security vulnerability. + +Client automatically follows HTTP redirects. The default redirect policy is to follow up to `10` consecutive requests. +The redirects can be configured via: + +- `allow_redirects` - If set to `False`, do not follow HTTP redirects. + `True` by default. +- `max_redirects` - Maximum number of HTTP redirects to follow. `10` + by default. + + + +### Logging + + + +The client uses Python's [logging](https://docs.python.org/3/library/logging.html) facility for logging the library activity. The following logger categories are +exposed: + +- `influxdb_client.client.influxdb_client` +- `influxdb_client.client.influxdb_client_async` +- `influxdb_client.client.write_api` +- `influxdb_client.client.write_api_async` +- `influxdb_client.client.write.retry` +- `influxdb_client.client.write.dataframe_serializer` +- `influxdb_client.client.util.multiprocessing_helper` +- `influxdb_client.client.http` +- `influxdb_client.client.exceptions` + +The default logging level is `warning` without configured logger output. You can use the standard logger interface to change the log level and handler: + +``` python +import logging +import sys + +from influxdb_client import InfluxDBClient + +with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + for _, logger in client.conf.loggers.items(): + logger.setLevel(logging.DEBUG) + logger.addHandler(logging.StreamHandler(sys.stdout)) +``` + +#### Debugging + +For debug purpose you can enable verbose logging of HTTP requests and set the `debug` level to all client's logger categories by: + +``` python +client = InfluxDBClient(url="http://localhost:8086", token="my-token", debug=True) +``` + +Both HTTP request headers and body will be logged to standard output. + + + +## Local tests + +``` console +# start/restart InfluxDB2 on local machine using docker +./scripts/influxdb-restart.sh + +# install requirements +pip install -e . --user +pip install -e .\[extra\] --user +pip install -e .\[test\] --user + +# run unit & integration tests +pytest tests +``` + +## Contributing + +Bug reports and pull requests are welcome on GitHub at . + +## License + +The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT). diff --git a/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/RECORD b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/RECORD new file mode 100644 index 0000000..4e48aa7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/RECORD @@ -0,0 +1,820 @@ +influxdb_client-1.46.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +influxdb_client-1.46.0.dist-info/LICENSE,sha256=Imzbi0-cDPO6d3Dfd1hkKtilouVkIn1zlom-kl9wR64,1073 +influxdb_client-1.46.0.dist-info/METADATA,sha256=3xUEunAtA94puUZBggfmK-N-F-LKct2rXeYctC4K8yA,64812 +influxdb_client-1.46.0.dist-info/RECORD,, +influxdb_client-1.46.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +influxdb_client-1.46.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +influxdb_client-1.46.0.dist-info/top_level.txt,sha256=gbZ1PAKLca104ubdmUjA22TCOWlx5N-MB-AXK-66Vgo,16 +influxdb_client/__init__.py,sha256=0Q3JpvbMJjLRCuTyESgeqF2nVda-kE6pHzDALIO4zyw,27295 +influxdb_client/__pycache__/__init__.cpython-312.pyc,, +influxdb_client/__pycache__/configuration.cpython-312.pyc,, +influxdb_client/__pycache__/extras.cpython-312.pyc,, +influxdb_client/__pycache__/rest.cpython-312.pyc,, +influxdb_client/__pycache__/version.cpython-312.pyc,, +influxdb_client/_async/__init__.py,sha256=-Kpdx5s1qXeel2bY5vvk7v5d0NcCA_llWJVG-7Zo_1E,30 +influxdb_client/_async/__pycache__/__init__.cpython-312.pyc,, +influxdb_client/_async/__pycache__/api_client.cpython-312.pyc,, +influxdb_client/_async/__pycache__/rest.cpython-312.pyc,, +influxdb_client/_async/api_client.py,sha256=Lr9wIJMnRnr3Zk1pLvc1UVzDBfwnlMQYvhkpLys0JGg,26664 +influxdb_client/_async/rest.py,sha256=UHx7c7mj9K0P6d1b4WrMcygVFRw9hlYMMofMuhjqFvY,12648 +influxdb_client/_sync/__init__.py,sha256=BW2sxb13LZVu0pf2fTGFBl4cBimLVFTENcpGG6GCXOo,29 +influxdb_client/_sync/__pycache__/__init__.cpython-312.pyc,, +influxdb_client/_sync/__pycache__/api_client.cpython-312.pyc,, +influxdb_client/_sync/__pycache__/rest.cpython-312.pyc,, +influxdb_client/_sync/api_client.py,sha256=-izYyoFx-AR0QmfByPezi_6aDdJ7X6xBPOv4Bkp2FbE,26669 +influxdb_client/_sync/rest.py,sha256=8gt-njMep_bdprLAxk9hpWJYq6qMT45czt7s5E83-VA,15258 +influxdb_client/client/__init__.py,sha256=UlVNJ5HGupDxnxSRZPUi9OajdjDK1UOxe-SKg7z6uZg,3264 +influxdb_client/client/__pycache__/__init__.cpython-312.pyc,, +influxdb_client/client/__pycache__/_base.cpython-312.pyc,, +influxdb_client/client/__pycache__/_pages.cpython-312.pyc,, +influxdb_client/client/__pycache__/authorizations_api.cpython-312.pyc,, +influxdb_client/client/__pycache__/bucket_api.cpython-312.pyc,, +influxdb_client/client/__pycache__/delete_api.cpython-312.pyc,, +influxdb_client/client/__pycache__/delete_api_async.cpython-312.pyc,, +influxdb_client/client/__pycache__/exceptions.cpython-312.pyc,, +influxdb_client/client/__pycache__/flux_csv_parser.cpython-312.pyc,, +influxdb_client/client/__pycache__/flux_table.cpython-312.pyc,, +influxdb_client/client/__pycache__/influxdb_client.cpython-312.pyc,, +influxdb_client/client/__pycache__/influxdb_client_async.cpython-312.pyc,, +influxdb_client/client/__pycache__/invokable_scripts_api.cpython-312.pyc,, +influxdb_client/client/__pycache__/labels_api.cpython-312.pyc,, +influxdb_client/client/__pycache__/logging_handler.cpython-312.pyc,, +influxdb_client/client/__pycache__/organizations_api.cpython-312.pyc,, +influxdb_client/client/__pycache__/query_api.cpython-312.pyc,, +influxdb_client/client/__pycache__/query_api_async.cpython-312.pyc,, +influxdb_client/client/__pycache__/tasks_api.cpython-312.pyc,, +influxdb_client/client/__pycache__/users_api.cpython-312.pyc,, +influxdb_client/client/__pycache__/warnings.cpython-312.pyc,, +influxdb_client/client/__pycache__/write_api.cpython-312.pyc,, +influxdb_client/client/__pycache__/write_api_async.cpython-312.pyc,, +influxdb_client/client/_base.py,sha256=mtO7VS_Wrme6GDGJHXMCrrEzvQdSkfXeeXFb6lCUvSU,24092 +influxdb_client/client/_pages.py,sha256=FLhu7mJdDMWpsSxS0uVAitgjQZlzaoTmoNA2hXFL9TE,2069 +influxdb_client/client/authorizations_api.py,sha256=rCbuyZKSYA8LYfnPPq8Gz-kGYHq9mUDIo8L-3NMQvno,4702 +influxdb_client/client/bucket_api.py,sha256=EGYErPDhUWM55EhOyhvkdu-1XidaTzFMyAouyyOGvI4,5022 +influxdb_client/client/delete_api.py,sha256=cRjYEX8lCgZAAYLTnf8VdJX_aamrnrLWXYGfYwCEOW0,1548 +influxdb_client/client/delete_api_async.py,sha256=3Ji1PGWbVRN98N0-XbJO-LYK9Y_EEMD_VHLc1fOBKgI,1782 +influxdb_client/client/exceptions.py,sha256=TM3n70nqJMdoS3xuCJKkHBqZvaYxKi9_AA-QHR4-v40,1668 +influxdb_client/client/flux_csv_parser.py,sha256=5-ZicsUIZ4IZKnd1NUTWY6reTB_7o_2wMnNTxfGSjtA,15490 +influxdb_client/client/flux_table.py,sha256=jbTr49yLqy1-H6levq_1fLCjpxUZ3kn9p9e_wm6YcGU,9207 +influxdb_client/client/influxdb_client.py,sha256=FWKZASfoDAj6avEFSM8rkY6cgWMZZMFRvYbEi8TkUJc,17756 +influxdb_client/client/influxdb_client_async.py,sha256=M8sY_RpwbxiRUJWsIEbZjqlCtTshW2Ok8QeR8firlN0,13369 +influxdb_client/client/invokable_scripts_api.py,sha256=nrYE8CRV9jBdHIo0pg-1YY9ZAin2qHyyJ2SF3hz-Gz0,13768 +influxdb_client/client/labels_api.py,sha256=jAADu7j1HN_W04mhbgE2zjqjBkcyRets-KMdl7eW-qU,3009 +influxdb_client/client/logging_handler.py,sha256=VPYpHAB_Y5zluex6-frRheFhck_cGazj8BSoan3LtUE,2211 +influxdb_client/client/organizations_api.py,sha256=zUymRcfjGvIfCgibeaIIfMAkwizwnQesqUeCkRt3AC4,2386 +influxdb_client/client/query_api.py,sha256=5528RsyqEF4WUWtjBwoHsR7HjTXBgAgu2zQqTeDETz0,15524 +influxdb_client/client/query_api_async.py,sha256=RXboi0gVKBKKk9LqrKDuefLUO1we0PLw4BjhqWQUwbw,12351 +influxdb_client/client/tasks_api.py,sha256=GTWf21bLmLqidQoANqVsU3Yg9OYDQw5-cwJUnwh3yDw,8992 +influxdb_client/client/users_api.py,sha256=En42XQ9j1RfrPxt1-uAEPCwEv6cXlD32V0uLfyPWUMo,2612 +influxdb_client/client/util/__init__.py,sha256=vSdg6MFNsdihNGfkSMdw6VLvJcV0pxjdVNmYYTZ5eqQ,21 +influxdb_client/client/util/__pycache__/__init__.cpython-312.pyc,, +influxdb_client/client/util/__pycache__/date_utils.cpython-312.pyc,, +influxdb_client/client/util/__pycache__/date_utils_pandas.cpython-312.pyc,, +influxdb_client/client/util/__pycache__/helpers.cpython-312.pyc,, +influxdb_client/client/util/__pycache__/multiprocessing_helper.cpython-312.pyc,, +influxdb_client/client/util/date_utils.py,sha256=kva1sSi-oIpB3ZEqlXBChndp_vjzuU4F27i6ZKM4U7I,3228 +influxdb_client/client/util/date_utils_pandas.py,sha256=8OXJeYAQqmg_gqmyNdL-Ybp9D3_K6j2Q_ckHUS7som0,622 +influxdb_client/client/util/helpers.py,sha256=RSA0Vr93T2dorOvJ8T5qlcLz0xedLV_hCWiOBiDj8J0,1901 +influxdb_client/client/util/multiprocessing_helper.py,sha256=C-Ae0t0ZnQS0zYfvgrZ9F2SUH4gMZ5FxwbQvgPg-pJY,7388 +influxdb_client/client/warnings.py,sha256=VshZ8GQTp1r_zBoHbyMlEj5xt6Unr-oEU550PK7T_5k,1766 +influxdb_client/client/write/__init__.py,sha256=UlVNJ5HGupDxnxSRZPUi9OajdjDK1UOxe-SKg7z6uZg,3264 +influxdb_client/client/write/__pycache__/__init__.cpython-312.pyc,, +influxdb_client/client/write/__pycache__/dataframe_serializer.cpython-312.pyc,, +influxdb_client/client/write/__pycache__/point.cpython-312.pyc,, +influxdb_client/client/write/__pycache__/retry.cpython-312.pyc,, +influxdb_client/client/write/dataframe_serializer.py,sha256=KzBVID-RQ56ljkLkI6c6DMmsIQU3urmbeCm5hgAQ0nI,14185 +influxdb_client/client/write/point.py,sha256=y6a09TiYaaNa4TwXDMN0A4DoUrEEFkgeDtg5s1R8eMw,13439 +influxdb_client/client/write/retry.py,sha256=IE8cqcOOUSqCn_eGaPzD9WfX7D0bNqZ_5Q818GhIwyA,5932 +influxdb_client/client/write_api.py,sha256=CM1qdsJeEJDpO7ztfOULNWpMH-yvnL6ch073b8qbYeM,25167 +influxdb_client/client/write_api_async.py,sha256=ti6-XeqYQwhGWCxUA9TSGFVcI5y2__FhfA0AVIxTA1Y,6911 +influxdb_client/configuration.py,sha256=YZ3fGQcc7O3S92VMNVXlzF5ma-l8d7vclgxmurbT5q0,9501 +influxdb_client/domain/__init__.py,sha256=lVYSED2-JTLEPTpR6aHNtB8CdWwxdHQm4Qbx4zGEUaw,23603 +influxdb_client/domain/__pycache__/__init__.cpython-312.pyc,, +influxdb_client/domain/__pycache__/add_resource_member_request_body.cpython-312.pyc,, +influxdb_client/domain/__pycache__/analyze_query_response.cpython-312.pyc,, +influxdb_client/domain/__pycache__/analyze_query_response_errors.cpython-312.pyc,, +influxdb_client/domain/__pycache__/array_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/ast_response.cpython-312.pyc,, +influxdb_client/domain/__pycache__/authorization.cpython-312.pyc,, +influxdb_client/domain/__pycache__/authorization_post_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/authorization_update_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/authorizations.cpython-312.pyc,, +influxdb_client/domain/__pycache__/axes.cpython-312.pyc,, +influxdb_client/domain/__pycache__/axis.cpython-312.pyc,, +influxdb_client/domain/__pycache__/axis_scale.cpython-312.pyc,, +influxdb_client/domain/__pycache__/bad_statement.cpython-312.pyc,, +influxdb_client/domain/__pycache__/band_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/binary_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/block.cpython-312.pyc,, +influxdb_client/domain/__pycache__/boolean_literal.cpython-312.pyc,, +influxdb_client/domain/__pycache__/bucket.cpython-312.pyc,, +influxdb_client/domain/__pycache__/bucket_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/bucket_metadata_manifest.cpython-312.pyc,, +influxdb_client/domain/__pycache__/bucket_retention_rules.cpython-312.pyc,, +influxdb_client/domain/__pycache__/bucket_shard_mapping.cpython-312.pyc,, +influxdb_client/domain/__pycache__/buckets.cpython-312.pyc,, +influxdb_client/domain/__pycache__/builder_aggregate_function_type.cpython-312.pyc,, +influxdb_client/domain/__pycache__/builder_config.cpython-312.pyc,, +influxdb_client/domain/__pycache__/builder_config_aggregate_window.cpython-312.pyc,, +influxdb_client/domain/__pycache__/builder_functions_type.cpython-312.pyc,, +influxdb_client/domain/__pycache__/builder_tags_type.cpython-312.pyc,, +influxdb_client/domain/__pycache__/builtin_statement.cpython-312.pyc,, +influxdb_client/domain/__pycache__/call_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/cell.cpython-312.pyc,, +influxdb_client/domain/__pycache__/cell_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/cell_update.cpython-312.pyc,, +influxdb_client/domain/__pycache__/cell_with_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/check.cpython-312.pyc,, +influxdb_client/domain/__pycache__/check_base.cpython-312.pyc,, +influxdb_client/domain/__pycache__/check_base_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/check_discriminator.cpython-312.pyc,, +influxdb_client/domain/__pycache__/check_patch.cpython-312.pyc,, +influxdb_client/domain/__pycache__/check_status_level.cpython-312.pyc,, +influxdb_client/domain/__pycache__/check_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/checks.cpython-312.pyc,, +influxdb_client/domain/__pycache__/column_data_type.cpython-312.pyc,, +influxdb_client/domain/__pycache__/column_semantic_type.cpython-312.pyc,, +influxdb_client/domain/__pycache__/conditional_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/config.cpython-312.pyc,, +influxdb_client/domain/__pycache__/constant_variable_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/create_cell.cpython-312.pyc,, +influxdb_client/domain/__pycache__/create_dashboard_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/custom_check.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dashboard.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dashboard_color.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dashboard_query.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dashboard_with_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dashboards.cpython-312.pyc,, +influxdb_client/domain/__pycache__/date_time_literal.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dbr_ps.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dbrp.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dbrp_create.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dbrp_get.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dbrp_update.cpython-312.pyc,, +influxdb_client/domain/__pycache__/deadman_check.cpython-312.pyc,, +influxdb_client/domain/__pycache__/decimal_places.cpython-312.pyc,, +influxdb_client/domain/__pycache__/delete_predicate_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dialect.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dict_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/dict_item.cpython-312.pyc,, +influxdb_client/domain/__pycache__/duration.cpython-312.pyc,, +influxdb_client/domain/__pycache__/duration_literal.cpython-312.pyc,, +influxdb_client/domain/__pycache__/error.cpython-312.pyc,, +influxdb_client/domain/__pycache__/expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/expression_statement.cpython-312.pyc,, +influxdb_client/domain/__pycache__/field.cpython-312.pyc,, +influxdb_client/domain/__pycache__/file.cpython-312.pyc,, +influxdb_client/domain/__pycache__/float_literal.cpython-312.pyc,, +influxdb_client/domain/__pycache__/flux_response.cpython-312.pyc,, +influxdb_client/domain/__pycache__/flux_suggestion.cpython-312.pyc,, +influxdb_client/domain/__pycache__/flux_suggestions.cpython-312.pyc,, +influxdb_client/domain/__pycache__/function_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/gauge_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/greater_threshold.cpython-312.pyc,, +influxdb_client/domain/__pycache__/health_check.cpython-312.pyc,, +influxdb_client/domain/__pycache__/heatmap_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/histogram_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/http_notification_endpoint.cpython-312.pyc,, +influxdb_client/domain/__pycache__/http_notification_rule.cpython-312.pyc,, +influxdb_client/domain/__pycache__/http_notification_rule_base.cpython-312.pyc,, +influxdb_client/domain/__pycache__/identifier.cpython-312.pyc,, +influxdb_client/domain/__pycache__/import_declaration.cpython-312.pyc,, +influxdb_client/domain/__pycache__/index_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/integer_literal.cpython-312.pyc,, +influxdb_client/domain/__pycache__/is_onboarding.cpython-312.pyc,, +influxdb_client/domain/__pycache__/label.cpython-312.pyc,, +influxdb_client/domain/__pycache__/label_create_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/label_mapping.cpython-312.pyc,, +influxdb_client/domain/__pycache__/label_response.cpython-312.pyc,, +influxdb_client/domain/__pycache__/label_update.cpython-312.pyc,, +influxdb_client/domain/__pycache__/labels_response.cpython-312.pyc,, +influxdb_client/domain/__pycache__/language_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/legacy_authorization_post_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/lesser_threshold.cpython-312.pyc,, +influxdb_client/domain/__pycache__/line_plus_single_stat_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/line_protocol_error.cpython-312.pyc,, +influxdb_client/domain/__pycache__/line_protocol_length_error.cpython-312.pyc,, +influxdb_client/domain/__pycache__/links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/list_stacks_response.cpython-312.pyc,, +influxdb_client/domain/__pycache__/log_event.cpython-312.pyc,, +influxdb_client/domain/__pycache__/logical_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/logs.cpython-312.pyc,, +influxdb_client/domain/__pycache__/map_variable_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/markdown_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/measurement_schema.cpython-312.pyc,, +influxdb_client/domain/__pycache__/measurement_schema_column.cpython-312.pyc,, +influxdb_client/domain/__pycache__/measurement_schema_create_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/measurement_schema_list.cpython-312.pyc,, +influxdb_client/domain/__pycache__/measurement_schema_update_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/member_assignment.cpython-312.pyc,, +influxdb_client/domain/__pycache__/member_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/metadata_backup.cpython-312.pyc,, +influxdb_client/domain/__pycache__/model_property.cpython-312.pyc,, +influxdb_client/domain/__pycache__/mosaic_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/node.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_endpoint.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_endpoint_base.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_endpoint_base_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_endpoint_discriminator.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_endpoint_type.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_endpoint_update.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_endpoints.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_rule.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_rule_base.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_rule_base_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_rule_discriminator.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_rule_update.cpython-312.pyc,, +influxdb_client/domain/__pycache__/notification_rules.cpython-312.pyc,, +influxdb_client/domain/__pycache__/object_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/onboarding_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/onboarding_response.cpython-312.pyc,, +influxdb_client/domain/__pycache__/option_statement.cpython-312.pyc,, +influxdb_client/domain/__pycache__/organization.cpython-312.pyc,, +influxdb_client/domain/__pycache__/organization_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/organizations.cpython-312.pyc,, +influxdb_client/domain/__pycache__/package.cpython-312.pyc,, +influxdb_client/domain/__pycache__/package_clause.cpython-312.pyc,, +influxdb_client/domain/__pycache__/pager_duty_notification_endpoint.cpython-312.pyc,, +influxdb_client/domain/__pycache__/pager_duty_notification_rule.cpython-312.pyc,, +influxdb_client/domain/__pycache__/pager_duty_notification_rule_base.cpython-312.pyc,, +influxdb_client/domain/__pycache__/paren_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/password_reset_body.cpython-312.pyc,, +influxdb_client/domain/__pycache__/patch_bucket_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/patch_dashboard_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/patch_organization_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/patch_retention_rule.cpython-312.pyc,, +influxdb_client/domain/__pycache__/patch_stack_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/patch_stack_request_additional_resources.cpython-312.pyc,, +influxdb_client/domain/__pycache__/permission.cpython-312.pyc,, +influxdb_client/domain/__pycache__/permission_resource.cpython-312.pyc,, +influxdb_client/domain/__pycache__/pipe_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/pipe_literal.cpython-312.pyc,, +influxdb_client/domain/__pycache__/post_bucket_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/post_check.cpython-312.pyc,, +influxdb_client/domain/__pycache__/post_notification_endpoint.cpython-312.pyc,, +influxdb_client/domain/__pycache__/post_notification_rule.cpython-312.pyc,, +influxdb_client/domain/__pycache__/post_organization_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/post_restore_kv_response.cpython-312.pyc,, +influxdb_client/domain/__pycache__/post_stack_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/property_key.cpython-312.pyc,, +influxdb_client/domain/__pycache__/query.cpython-312.pyc,, +influxdb_client/domain/__pycache__/query_edit_mode.cpython-312.pyc,, +influxdb_client/domain/__pycache__/query_variable_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/query_variable_properties_values.cpython-312.pyc,, +influxdb_client/domain/__pycache__/range_threshold.cpython-312.pyc,, +influxdb_client/domain/__pycache__/ready.cpython-312.pyc,, +influxdb_client/domain/__pycache__/regexp_literal.cpython-312.pyc,, +influxdb_client/domain/__pycache__/remote_connection.cpython-312.pyc,, +influxdb_client/domain/__pycache__/remote_connection_creation_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/remote_connection_update_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/remote_connections.cpython-312.pyc,, +influxdb_client/domain/__pycache__/renamable_field.cpython-312.pyc,, +influxdb_client/domain/__pycache__/replication.cpython-312.pyc,, +influxdb_client/domain/__pycache__/replication_creation_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/replication_update_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/replications.cpython-312.pyc,, +influxdb_client/domain/__pycache__/resource_member.cpython-312.pyc,, +influxdb_client/domain/__pycache__/resource_members.cpython-312.pyc,, +influxdb_client/domain/__pycache__/resource_members_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/resource_owner.cpython-312.pyc,, +influxdb_client/domain/__pycache__/resource_owners.cpython-312.pyc,, +influxdb_client/domain/__pycache__/restored_bucket_mappings.cpython-312.pyc,, +influxdb_client/domain/__pycache__/retention_policy_manifest.cpython-312.pyc,, +influxdb_client/domain/__pycache__/return_statement.cpython-312.pyc,, +influxdb_client/domain/__pycache__/routes.cpython-312.pyc,, +influxdb_client/domain/__pycache__/routes_external.cpython-312.pyc,, +influxdb_client/domain/__pycache__/routes_query.cpython-312.pyc,, +influxdb_client/domain/__pycache__/routes_system.cpython-312.pyc,, +influxdb_client/domain/__pycache__/rule_status_level.cpython-312.pyc,, +influxdb_client/domain/__pycache__/run.cpython-312.pyc,, +influxdb_client/domain/__pycache__/run_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/run_manually.cpython-312.pyc,, +influxdb_client/domain/__pycache__/runs.cpython-312.pyc,, +influxdb_client/domain/__pycache__/scatter_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/schema_type.cpython-312.pyc,, +influxdb_client/domain/__pycache__/scraper_target_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/scraper_target_response.cpython-312.pyc,, +influxdb_client/domain/__pycache__/scraper_target_responses.cpython-312.pyc,, +influxdb_client/domain/__pycache__/script.cpython-312.pyc,, +influxdb_client/domain/__pycache__/script_create_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/script_invocation_params.cpython-312.pyc,, +influxdb_client/domain/__pycache__/script_language.cpython-312.pyc,, +influxdb_client/domain/__pycache__/script_update_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/scripts.cpython-312.pyc,, +influxdb_client/domain/__pycache__/secret_keys.cpython-312.pyc,, +influxdb_client/domain/__pycache__/secret_keys_response.cpython-312.pyc,, +influxdb_client/domain/__pycache__/shard_group_manifest.cpython-312.pyc,, +influxdb_client/domain/__pycache__/shard_manifest.cpython-312.pyc,, +influxdb_client/domain/__pycache__/shard_owner.cpython-312.pyc,, +influxdb_client/domain/__pycache__/simple_table_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/single_stat_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/slack_notification_endpoint.cpython-312.pyc,, +influxdb_client/domain/__pycache__/slack_notification_rule.cpython-312.pyc,, +influxdb_client/domain/__pycache__/slack_notification_rule_base.cpython-312.pyc,, +influxdb_client/domain/__pycache__/smtp_notification_rule.cpython-312.pyc,, +influxdb_client/domain/__pycache__/smtp_notification_rule_base.cpython-312.pyc,, +influxdb_client/domain/__pycache__/source.cpython-312.pyc,, +influxdb_client/domain/__pycache__/source_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/sources.cpython-312.pyc,, +influxdb_client/domain/__pycache__/stack.cpython-312.pyc,, +influxdb_client/domain/__pycache__/stack_associations.cpython-312.pyc,, +influxdb_client/domain/__pycache__/stack_events.cpython-312.pyc,, +influxdb_client/domain/__pycache__/stack_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/stack_resources.cpython-312.pyc,, +influxdb_client/domain/__pycache__/statement.cpython-312.pyc,, +influxdb_client/domain/__pycache__/static_legend.cpython-312.pyc,, +influxdb_client/domain/__pycache__/status_rule.cpython-312.pyc,, +influxdb_client/domain/__pycache__/string_literal.cpython-312.pyc,, +influxdb_client/domain/__pycache__/subscription_manifest.cpython-312.pyc,, +influxdb_client/domain/__pycache__/table_view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/table_view_properties_table_options.cpython-312.pyc,, +influxdb_client/domain/__pycache__/tag_rule.cpython-312.pyc,, +influxdb_client/domain/__pycache__/task.cpython-312.pyc,, +influxdb_client/domain/__pycache__/task_create_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/task_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/task_status_type.cpython-312.pyc,, +influxdb_client/domain/__pycache__/task_update_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/tasks.cpython-312.pyc,, +influxdb_client/domain/__pycache__/telegraf.cpython-312.pyc,, +influxdb_client/domain/__pycache__/telegraf_plugin.cpython-312.pyc,, +influxdb_client/domain/__pycache__/telegraf_plugin_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/telegraf_plugin_request_plugins.cpython-312.pyc,, +influxdb_client/domain/__pycache__/telegraf_plugins.cpython-312.pyc,, +influxdb_client/domain/__pycache__/telegraf_request.cpython-312.pyc,, +influxdb_client/domain/__pycache__/telegraf_request_metadata.cpython-312.pyc,, +influxdb_client/domain/__pycache__/telegrafs.cpython-312.pyc,, +influxdb_client/domain/__pycache__/telegram_notification_endpoint.cpython-312.pyc,, +influxdb_client/domain/__pycache__/telegram_notification_rule.cpython-312.pyc,, +influxdb_client/domain/__pycache__/telegram_notification_rule_base.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_apply.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_apply_remotes.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_apply_template.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_chart.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_export_by_id.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_export_by_id_org_ids.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_export_by_id_resource_filters.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_export_by_id_resources.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_export_by_name.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_export_by_name_resources.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_kind.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_buckets.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_buckets_new_old.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_checks.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_dashboards.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_dashboards_new_old.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_label_mappings.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_labels.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_labels_new_old.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_notification_endpoints.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_notification_rules.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_notification_rules_new_old.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_tasks.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_tasks_new_old.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_telegraf_configs.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_variables.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_diff_variables_new_old.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_errors.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_label.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_label_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_summary.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_summary_buckets.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_summary_dashboards.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_summary_label_mappings.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_summary_notification_rules.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_summary_status_rules.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_summary_tag_rules.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_summary_tasks.cpython-312.pyc,, +influxdb_client/domain/__pycache__/template_summary_summary_variables.cpython-312.pyc,, +influxdb_client/domain/__pycache__/test_statement.cpython-312.pyc,, +influxdb_client/domain/__pycache__/threshold.cpython-312.pyc,, +influxdb_client/domain/__pycache__/threshold_base.cpython-312.pyc,, +influxdb_client/domain/__pycache__/threshold_check.cpython-312.pyc,, +influxdb_client/domain/__pycache__/unary_expression.cpython-312.pyc,, +influxdb_client/domain/__pycache__/unsigned_integer_literal.cpython-312.pyc,, +influxdb_client/domain/__pycache__/user.cpython-312.pyc,, +influxdb_client/domain/__pycache__/user_response.cpython-312.pyc,, +influxdb_client/domain/__pycache__/user_response_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/users.cpython-312.pyc,, +influxdb_client/domain/__pycache__/variable.cpython-312.pyc,, +influxdb_client/domain/__pycache__/variable_assignment.cpython-312.pyc,, +influxdb_client/domain/__pycache__/variable_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/variable_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/variables.cpython-312.pyc,, +influxdb_client/domain/__pycache__/view.cpython-312.pyc,, +influxdb_client/domain/__pycache__/view_links.cpython-312.pyc,, +influxdb_client/domain/__pycache__/view_properties.cpython-312.pyc,, +influxdb_client/domain/__pycache__/views.cpython-312.pyc,, +influxdb_client/domain/__pycache__/write_precision.cpython-312.pyc,, +influxdb_client/domain/__pycache__/xy_geom.cpython-312.pyc,, +influxdb_client/domain/__pycache__/xy_view_properties.cpython-312.pyc,, +influxdb_client/domain/add_resource_member_request_body.py,sha256=1H3PyibQPk8wHgjaK92ZqlxWGRip4ynoHhJQnTCkBm0,3899 +influxdb_client/domain/analyze_query_response.py,sha256=rlfUkPRv9YVxBefS-5H7kbZhT6Bg2TX69sblvG0RmxQ,3095 +influxdb_client/domain/analyze_query_response_errors.py,sha256=QvTLaUTphavyXzl2DgBcCnDk9rh9WZdKlOJ6UvuNZXk,5052 +influxdb_client/domain/array_expression.py,sha256=3V3d517kvTvB29oRdiAiLFCL5BJP5-K7BVdAwvaxOqw,3847 +influxdb_client/domain/ast_response.py,sha256=0NID3f_wxeHIlGaJY6KOzLI9rIYuwSx9Kaog47iWg4M,2897 +influxdb_client/domain/authorization.py,sha256=U7WmMRbckYsCeJOX7PDiOQ-ESWP7pbQvHuZ37aeVBAU,11522 +influxdb_client/domain/authorization_post_request.py,sha256=vHa3ngmtBTE9Agk5D9nXvzlpSUuOyvQWtYMQW0O8hy8,6084 +influxdb_client/domain/authorization_update_request.py,sha256=6OPDQLB1nt9Z1mgFSgtChbiYjQyxSx3gxKIzPMkL_pM,4064 +influxdb_client/domain/authorizations.py,sha256=RFxMLAUs788Y3NzXWhebABxQMBT4bvbwBSG0h6gZQEw,3766 +influxdb_client/domain/axes.py,sha256=psGUyVyfxpXjTSI-uPsLRZMcEFygW6OHcaSCf6Azq64,3428 +influxdb_client/domain/axis.py,sha256=5rziSYbaikOW9uD1Nip8hw52Ek3ReHuxtVTmnh6_H0s,6312 +influxdb_client/domain/axis_scale.py,sha256=5LzMN3R6nEbLHCsgVnkYZPh8QplsuI6cdNOp6ghdFhc,2424 +influxdb_client/domain/bad_statement.py,sha256=52AczE-b9E4bzLRga69Ix51BHWgUlvNNUzlflzsNZTE,3679 +influxdb_client/domain/band_view_properties.py,sha256=cGCHFj4aRBTuBet7CJWxBSP6XHIib4KaOA4MPzgXiQg,25134 +influxdb_client/domain/binary_expression.py,sha256=YHdPXouiJCaSsM19j_E29gdchqwxUAtI7T9r3vmDiAI,4967 +influxdb_client/domain/block.py,sha256=DtKEb4C7NZ0L2mE_rzoQZ5R_hd5wXBAFJ6McAUJ_3eE,3608 +influxdb_client/domain/boolean_literal.py,sha256=H27jGzaXbTcxLeowKvjf9uTvx4xZ9MSKn-EX0YpDNCE,3678 +influxdb_client/domain/bucket.py,sha256=jiJwLvGvAX8r6y2rwe9IBA3-GKdbjHJY0XpWigc3Hnw,10502 +influxdb_client/domain/bucket_links.py,sha256=V7o5wpWYht8nKefcxAKti3Z6Pbn1Fn9ova2ZuT8_LUw,6146 +influxdb_client/domain/bucket_metadata_manifest.py,sha256=QeL3DwirCknolFNfzuy54zh9EuJGKSvAR4v_KFz3K8s,8749 +influxdb_client/domain/bucket_retention_rules.py,sha256=qLS38WmCvjaTi313ey_Ytr3h3LHLVkTg55hKCJvrHuo,6101 +influxdb_client/domain/bucket_shard_mapping.py,sha256=jW8K6k2_BWi7qQmt_hUy1XBEL8s-p1JbIVfYyu4K2Mc,3784 +influxdb_client/domain/buckets.py,sha256=F3kkX9awxBn9jLh1MpXtDxZihLEMZCczBrPQdL1DQfk,3528 +influxdb_client/domain/builder_aggregate_function_type.py,sha256=fGqVS-RQE7XTpnbALJB22k0PCqGe4gdff3dSyKgjAjs,2485 +influxdb_client/domain/builder_config.py,sha256=S-FdXKHa7fRAhjCYkA8iCLVJJ5XuCnC_-vZQONuV0Ao,5220 +influxdb_client/domain/builder_config_aggregate_window.py,sha256=5rBNunOZVlmQim-VODRsc1viGf4RDxCduZicpmcQwmQ,3828 +influxdb_client/domain/builder_functions_type.py,sha256=Kpiz8Irq2ZqmhvFnHUSpNFhyf9xYfGdk1c6JwDDJuTs,2968 +influxdb_client/domain/builder_tags_type.py,sha256=WNXY68kQcPDH2FoV1xp5q2wQtoEhEJMxt2wCzupuNs4,4564 +influxdb_client/domain/builtin_statement.py,sha256=I9st7lQGLt0b7hGTcYfgc3Pdt4lkW3AwfDIlY1Vdh9E,3654 +influxdb_client/domain/call_expression.py,sha256=ZifmJY6552B5Dq311r7P6DVM5GxgruA9lDJ3s2XtndA,4477 +influxdb_client/domain/cell.py,sha256=Q2xIN7RaSKVcYQDqydC_3xIu9DYkQabrd5KcB2IM740,5935 +influxdb_client/domain/cell_links.py,sha256=arjEePyy6kAtLU4NhTDQ1pslVCSuGjucIxz3rwn0Ynk,3456 +influxdb_client/domain/cell_update.py,sha256=Ra1q3X4AynR5mZmdqS2Fm7Mr9yLJQTxJoprNSWioB-Y,4308 +influxdb_client/domain/cell_with_view_properties.py,sha256=UhJPuKLi-LnKXerXMhyOcUQ_Ej7lkiJzs1FTycCADDc,4248 +influxdb_client/domain/check.py,sha256=ipr9Lgl85up6nLiXQM1Z6IXOJTGdhEiWeY_mJLXg8Cg,3411 +influxdb_client/domain/check_base.py,sha256=eEeCqykW0y-Tu2a63qnVsEI5YDh7e306o0lZHbm2n_E,13027 +influxdb_client/domain/check_base_links.py,sha256=btEUy6rM_IbO_VYPfHDywUDPTbwtv7FSDC96BV1V9JU,5629 +influxdb_client/domain/check_discriminator.py,sha256=p3TWK2zhcRcEt3qhW1OCRZFp3c5bbtUvxZ5c7j46Ouw,3949 +influxdb_client/domain/check_patch.py,sha256=QmOKCzoixBeDGamm8VyVtHGNKEKh9GKncZ6Hq76APNk,4178 +influxdb_client/domain/check_status_level.py,sha256=V_kcUBQiKNDMDufnSp82Ir0X8zxAhYWdRaBftOwZKGg,2499 +influxdb_client/domain/check_view_properties.py,sha256=uMlrlJMuZidoXwhPVCsBp9UWMf5FKEAdfg_FOVpVjH0,11262 +influxdb_client/domain/checks.py,sha256=14dZ2t1uA4MMSheUavG7hXfKYHpxydakv4mgfoQEEGo,3494 +influxdb_client/domain/column_data_type.py,sha256=IRc9Pr2ZBF87oiZAwS8X2xIpU4wj3jeTMKLfI-pO23k,2517 +influxdb_client/domain/column_semantic_type.py,sha256=F3ZZvKXl04jTL8oJSQ6GRJr9IaCpIMwL5OZqFGYPqdo,2477 +influxdb_client/domain/conditional_expression.py,sha256=PmCDqCi3xdylKHLxTfwnq38Z3K2lgRn2_r0NUScGPIo,5203 +influxdb_client/domain/config.py,sha256=MAvrUvwNp4PGtV-b5Ia35LnJrLQXQ2VgegN9EQSnmns,2952 +influxdb_client/domain/constant_variable_properties.py,sha256=xye1Fmlom8XgTldOHc4QpCO4cmxhiIXwklvTycLWGHo,3826 +influxdb_client/domain/create_cell.py,sha256=YAou9VzXPurFZrkydvIfiXCMTNiH91Rs1pcb4cVhsYA,5615 +influxdb_client/domain/create_dashboard_request.py,sha256=11jGjtENKVo0dxzG8P8DQ3j8cBNR9Qyx1fqRgkJyJKk,4853 +influxdb_client/domain/custom_check.py,sha256=0gPJDferHCEJEQr255cV4NyVNRDS89zt4ieHjOCDJ-A,4607 +influxdb_client/domain/dashboard.py,sha256=lUzmNN4nmnX5Q3UcLqzeZYz4bNYZaUfddI-dcdqZM_0,5599 +influxdb_client/domain/dashboard_color.py,sha256=qHV_4NdJRlBg0LXKEt4UvYQUWIu-kngGELK5yynK8BE,6312 +influxdb_client/domain/dashboard_query.py,sha256=HYo-owjxyzJlIr_5Nt1qcf7fvTcOUWNTkAbPFNLhJMo,5056 +influxdb_client/domain/dashboard_with_view_properties.py,sha256=fHzIrJax-UXjF9hvQB3S-EJ1TSd-wU5q3lxdEwUjgCE,6067 +influxdb_client/domain/dashboards.py,sha256=JShTmOi8WZWyHSYyun3IzRgjN5r0O5BYY_Kd8Tu3ZAs,3630 +influxdb_client/domain/date_time_literal.py,sha256=yyWOPK5yWTa-pwtxKOQPPGljgiG-JFkYSYLbEIbq8dg,3701 +influxdb_client/domain/dbr_ps.py,sha256=P88b4CYeU7jmpQxPr7FmHat5iW8H-NpuBoxeJH33qZM,2944 +influxdb_client/domain/dbrp.py,sha256=dIplyR8bIYGwJK1vg0myldxuYzxzd-VzI3rc0mn0ISE,9316 +influxdb_client/domain/dbrp_create.py,sha256=jMoS9jqpXjkbqIwvK2ZHksDFZZLbN7Sbn_n6zEzNLO8,7989 +influxdb_client/domain/dbrp_get.py,sha256=z5RaRyv_5_jcIqHLaCj-Kjolf3rEAh-GlKNda2FT1W4,2940 +influxdb_client/domain/dbrp_update.py,sha256=Dt4Y1qwisYgARylGRCzGakPRcd-vFtS9I3zUkh9a1d0,4477 +influxdb_client/domain/deadman_check.py,sha256=YrAPsrQHT4fFgKoBqYNtvu3EyGsROnJCKDEvMqo490Q,10859 +influxdb_client/domain/decimal_places.py,sha256=EjOm537DOdOwy81it_FJJf0iMfI1mCtY0K1TJfkVMC0,3909 +influxdb_client/domain/delete_predicate_request.py,sha256=Ii-NYlepo4-efTC8YovCFh10D_fy9agv99WcIzalzko,5429 +influxdb_client/domain/dialect.py,sha256=Yjae0nJtc4_r9RIp8NBeRLq1N5P8kuuOvXuExwNSZTk,9277 +influxdb_client/domain/dict_expression.py,sha256=FC0m948qYO--fEJL8GtJFe6ir65hlTfEqnoro9N40Q0,3840 +influxdb_client/domain/dict_item.py,sha256=5M7FYeQxWjbjl4mveS0yl-EfDKwn8itzBVpS207epyc,4022 +influxdb_client/domain/duration.py,sha256=B5Tp1lh2CBy2fUv7myK05vDIv55htVOgA7grUbxvHQM,4120 +influxdb_client/domain/duration_literal.py,sha256=vD5qHTIHWOx1AfGavHTG8Ooz2FASQMIN9ogXo-Lg-Tw,3789 +influxdb_client/domain/error.py,sha256=uzHTEC7MnPMQB-tONfUrvKR9BxgYjLNG3WiommsvX44,5078 +influxdb_client/domain/expression.py,sha256=jUb-B1IWpEjmEmUZp-TVgfOLx4YQ9jNpDddZbXqv_lo,2435 +influxdb_client/domain/expression_statement.py,sha256=gi6zNByQ3k1rDN7vT5kGG4FQNF4XkriiQI1aiy-4oGQ,3847 +influxdb_client/domain/field.py,sha256=lekR7iC9GeCMyG2moZgrRXxt1eXAFZzR9YkkN_29A8o,5183 +influxdb_client/domain/file.py,sha256=AjZHUK8f8lNOTckRSWocSWNun8LXUD-larGC749LwgU,5456 +influxdb_client/domain/float_literal.py,sha256=uzRku-k6V_7YdJpCs18eY_4cZrLSjbbeh3PqoWC01KM,3659 +influxdb_client/domain/flux_response.py,sha256=qeWVEkcUJStj9qkjbVMHGbYynMps8xJewyft-atcZus,2912 +influxdb_client/domain/flux_suggestion.py,sha256=DT3JLgilXl6Z4pK7SRmLH6xEBIbWkyG4Vd-lRZo3y0A,3565 +influxdb_client/domain/flux_suggestions.py,sha256=e0R0rHdwMYiU9S6gXwfgqHK0xNz8DstzRhWf6Pl4lCo,3004 +influxdb_client/domain/function_expression.py,sha256=x2Y78R5sw-gidTK3TK-phiBuINF7cTp020X3eKAfWQI,4430 +influxdb_client/domain/gauge_view_properties.py,sha256=FIlKx0Fg7ndJVE--KSVEmJgyQHuGQJkZQEEn9dwrsOs,11269 +influxdb_client/domain/greater_threshold.py,sha256=-DtVAuRDo5xo5FARRnEG6MLP2tloPBvzBSdrnWqc9Co,4029 +influxdb_client/domain/health_check.py,sha256=2bpkcB9gtNwouqdb5RklH4dmz3Renv96sKkZATxHT10,6125 +influxdb_client/domain/heatmap_view_properties.py,sha256=ugrwotyOz7lrJVjy7dj8ZoykYz2etBLyXpb8Q88PLYA,27474 +influxdb_client/domain/histogram_view_properties.py,sha256=sw26g2IIGr23LuvOllZC0R2MXQd2NV9Dta--G264r9c,15810 +influxdb_client/domain/http_notification_endpoint.py,sha256=6K7vY9RQY1PzB4hwB71sO8BAcNfxKXtWi7r4O2EcpG0,9417 +influxdb_client/domain/http_notification_rule.py,sha256=mjo0zNUuxFSysqh9gm6v8Y_ig7bJB164C_pmtfcIyF0,4990 +influxdb_client/domain/http_notification_rule_base.py,sha256=_lU9m8bYZfYQ569szf5a94gcxJcjpEufcQxajTwYwX0,6164 +influxdb_client/domain/identifier.py,sha256=hvUt3ljZ059efnwwHrJZnB6lexodvh_IIwOxa2AL2ho,3616 +influxdb_client/domain/import_declaration.py,sha256=UpiG_HZqP5yLLFU6odo7l-Q0kW2TB4-L-7vUQciAA1Q,4185 +influxdb_client/domain/index_expression.py,sha256=rupUpt96hccDyseug5sKuI9uUH8LTxsPSjMiKFa3fKI,4318 +influxdb_client/domain/integer_literal.py,sha256=qA9fAkABbWG2OVEpW_pngm7X-YXe-wF6eEd_jqK7gmE,3675 +influxdb_client/domain/is_onboarding.py,sha256=vHv0LrbQhD4cJsGnnp4KEFkQOL_4OQ8rC2Ds-z_K6U8,3149 +influxdb_client/domain/label.py,sha256=ZnOJY3iiwAXDW1ZmqpLLxG88kBniZS4EWU1-HBfWBc8,4865 +influxdb_client/domain/label_create_request.py,sha256=dsUWXnolpNwyI1MvIYrVe5iN4kaayTSi2xaETAE5WEg,4740 +influxdb_client/domain/label_mapping.py,sha256=ZgJqee2SA09RCdEFCvoVXHUn8KHQvwBbwUK07xBsaU0,3182 +influxdb_client/domain/label_response.py,sha256=GecgCudOrjQbjThsg7R87vDP3YXNOQ4C3qbRcjH1l18,3533 +influxdb_client/domain/label_update.py,sha256=a40b0yA2BddjZYwecopC5Lpg1LIyEctbXxnNr_cGYCc,3612 +influxdb_client/domain/labels_response.py,sha256=GD9TganKOHyalEzZeUM8IR9b-AyZQIrikBnguop3Vew,3582 +influxdb_client/domain/language_request.py,sha256=bYRIviEkWs8llr2RrEB6HCcnczO1oD1fncy-IFbHxu4,3131 +influxdb_client/domain/legacy_authorization_post_request.py,sha256=kpNNl0P0i9s1b9fHNewmekT4cWRgaQvHRtVSE2PotLA,6066 +influxdb_client/domain/lesser_threshold.py,sha256=1kv07TEuDOtfYoqQG-qNiJarp0nORB5NyD11Lp0ErHc,4017 +influxdb_client/domain/line_plus_single_stat_properties.py,sha256=Juh4VOSo0l3xmkhoeTNf-SGMeDXnbvDVbJYiSijgI6g,27278 +influxdb_client/domain/line_protocol_error.py,sha256=2mrb96ojJKl2hUDvE5LdcAZ5_VrpudSKStO8OBCsMjQ,6024 +influxdb_client/domain/line_protocol_length_error.py,sha256=uf_FkcsiAyZAI5TSu3eHYRC_HzpqOSxPP2FxzIuTRsI,3986 +influxdb_client/domain/links.py,sha256=CAvQwKy9kpJCyRpbIYznWxuawVyyPcDaJlB9nL73-6c,4182 +influxdb_client/domain/list_stacks_response.py,sha256=0UaqF5pBOwYldU9rY-k5SwTol8R07OKysr8LsJ51S0E,3018 +influxdb_client/domain/log_event.py,sha256=0o9sYaPNpmI3g7OycLAz92Nu_rjt02Wv5sR6z4TDoms,4626 +influxdb_client/domain/logical_expression.py,sha256=ViPejSdxkwh6X2WUYKJAroYatZzx7nzl4YzRtVTKeqk,4986 +influxdb_client/domain/logs.py,sha256=Ke2pXcqCOtQ0fbpuQcqEVqRhzgUqUmyHYAfd6qQWJ7s,2929 +influxdb_client/domain/map_variable_properties.py,sha256=6lciDOG1-iKwre__LgaQgAH66V9rKDnX4EA0DSqcPgU,3786 +influxdb_client/domain/markdown_view_properties.py,sha256=_wFSOmNCcPVwO6yBd7AkAegXEjebyGnDtgWcbExrx9w,4576 +influxdb_client/domain/measurement_schema.py,sha256=KouiL05_N0AKALKlFiuDknQ92guH6AJ1DUBVNgBYOQs,7679 +influxdb_client/domain/measurement_schema_column.py,sha256=Triuaz8QhXQtv11O6XhsL3lerOegGBlV4I5lIHaOQos,4536 +influxdb_client/domain/measurement_schema_create_request.py,sha256=J4UA7PnZdVUumi5tZ47vEyPk0D22pXaUQKbMq4nSnVs,4295 +influxdb_client/domain/measurement_schema_list.py,sha256=3OZUuuA38ZsqV8kBya1JKw0YwKlJjsiE0IVGJveBxKg,3432 +influxdb_client/domain/measurement_schema_update_request.py,sha256=1hf5ilxxWlcqm1gz3BgXQMOulvcZ_ioF0hHHmQeauY0,3368 +influxdb_client/domain/member_assignment.py,sha256=uqlBThB0m5Sm15fVa5rnEoB7f6_qVD7OlJBcT8R19TM,4347 +influxdb_client/domain/member_expression.py,sha256=kBoJ9GKCWTpxhraZCViRLZBFF1US5n9i5YhsNr714P4,4435 +influxdb_client/domain/metadata_backup.py,sha256=C9InVyu_ngHYoCckCOOc8WaB4qXZM-m5rdbSioWQRy4,4388 +influxdb_client/domain/model_property.py,sha256=eT9n0acvUhh22f9TY1UC1sark0kvDuUSK4wAYZu8kc0,4140 +influxdb_client/domain/mosaic_view_properties.py,sha256=xo_pt8plRKUHhqRrhglFs3G4P2lRltktb9xhw4J1T7I,26178 +influxdb_client/domain/node.py,sha256=N_a7NoqqOBrN65rPMmf0WBcj6lCxX-FpuI1VCZyxiMY,2330 +influxdb_client/domain/notification_endpoint.py,sha256=cnwWLChUH2hUaBFzJL8HuXl4jOxsIXtbYHR1Pod3na8,3668 +influxdb_client/domain/notification_endpoint_base.py,sha256=P_MwGHWYhGeGZxIZrq1TpzzJjWdb5JS2iyVSjB4QBmA,10157 +influxdb_client/domain/notification_endpoint_base_links.py,sha256=ydUyOQMc1oxxQx5XDcHwFCyu5sH68LQtuwtifzzSBas,5276 +influxdb_client/domain/notification_endpoint_discriminator.py,sha256=fE2NMcN83Rp1De0XmBjINOfKfbSHDvn3tcdwoZrQ2no,3570 +influxdb_client/domain/notification_endpoint_type.py,sha256=aB4oWLR-ICULHN55RkfupiccgijWaz6QnC1ZCgCUNJs,2523 +influxdb_client/domain/notification_endpoint_update.py,sha256=cy71XU2eJO3E_pjv-u_K9UH-oMhhPfxMd_QbzN1AMdY,4418 +influxdb_client/domain/notification_endpoints.py,sha256=oqFOHKD9Xm3dphqNjL228W-C_E-20lgrmYbwnTQmP_g,4023 +influxdb_client/domain/notification_rule.py,sha256=PZj98YEZWHWioY7FSZPjKMEsaKgoZ34cf7RgosU8yiE,3769 +influxdb_client/domain/notification_rule_base.py,sha256=lv5rbda2MGkGni-t5Zmb44s3FzWBlgBf3maUg4U-Zsc,20763 +influxdb_client/domain/notification_rule_base_links.py,sha256=zjBdZVKpCH_UHjiSZjWwOOdWLEgMj1dDWu74z-uRfQA,5882 +influxdb_client/domain/notification_rule_discriminator.py,sha256=iT5N6wX73PQQREbwRUHfuv6u45w8nImnuvDuhJ1SRWA,4866 +influxdb_client/domain/notification_rule_update.py,sha256=J80I2PDgltwcYxiYQUyYcw2wrmQMU3wyyNIsqNvXpCY,4358 +influxdb_client/domain/notification_rules.py,sha256=vy0MUKDaJwrda0THyk8gGMxNS3GgxOK4XMlP4pymL1M,3887 +influxdb_client/domain/object_expression.py,sha256=37V1gctBXnKBo5gXrDpaaK7hEvJEyYhbnJJc5PoNRac,3899 +influxdb_client/domain/onboarding_request.py,sha256=3EilRUqKKlHrDCxSi72Efupu8LJIRu_3PzqkJrr4mgs,8139 +influxdb_client/domain/onboarding_response.py,sha256=1TmsRPIic7LTcU2Tj1mdFfdha4k361FuDeG2cXOPmgU,4813 +influxdb_client/domain/option_statement.py,sha256=HVxwbx2wkxrm8vJdWHiJzj4W0MlnYEF3UAD1EaxrNgg,3791 +influxdb_client/domain/organization.py,sha256=d89nwXnccXPoUBDvyCZ69LhAU6WNF-iTqsgShvhggiY,7919 +influxdb_client/domain/organization_links.py,sha256=ME2b59I4YGqtigTBKKmlIZq-2w02Te-L8qovfwkvzZw,7828 +influxdb_client/domain/organizations.py,sha256=_FaeaYByj6LkuQhEWfKLXuRLJ3xZb1hIj50RF83A_5k,3552 +influxdb_client/domain/package.py,sha256=oR1AazlEc3DrucZUIElE0mdWz4gsg4QIXEaiLHGvvSs,4792 +influxdb_client/domain/package_clause.py,sha256=E2Ki51XqiFk66paFxie2c6xF3FZ4a7sF6r2eoKAZDgM,3554 +influxdb_client/domain/pager_duty_notification_endpoint.py,sha256=xNfK5bzyCdlg1ySvraH-EulFmlfXxDzAbKuzylx_8S4,5203 +influxdb_client/domain/pager_duty_notification_rule.py,sha256=7Ban_nKmNJO8Og8YuQaZCXlz_WCcgPZPqfNtKx1ZPeg,5108 +influxdb_client/domain/pager_duty_notification_rule_base.py,sha256=E-14I60kz68gZh9xUBfA0tiq2CfVtm6DAoWOUJaP4hM,6573 +influxdb_client/domain/paren_expression.py,sha256=dS2XFtwnQIh9zCw0rvmQo9bhewecgNaYtcY_l4I1mJs,3807 +influxdb_client/domain/password_reset_body.py,sha256=nkRsecDsz6QCuRwKyDYU8Y_cakhzR7-amsauN99eUYM,3114 +influxdb_client/domain/patch_bucket_request.py,sha256=kKDBlSb__oSxyz8hOgjtqM5lZ__m4WZvTR4nRwhW3SA,4840 +influxdb_client/domain/patch_dashboard_request.py,sha256=n_B5FKjDkcnA-lEGQjb7IBFgo_Jh6OD53thos8jwYcI,4614 +influxdb_client/domain/patch_organization_request.py,sha256=Dc-xyM067aPVJ98ei8Dwew8siKfZiz-sFz133be4nU0,3912 +influxdb_client/domain/patch_retention_rule.py,sha256=kzpTmn0M588h9gA17mwNcBGXpSa1Hh6CkhQ-0MJXzLk,6375 +influxdb_client/domain/patch_stack_request.py,sha256=Lq4bITGYmjQRXJaeLTDOhkjc3IPJanLy5s4FDY2Oa9s,5357 +influxdb_client/domain/patch_stack_request_additional_resources.py,sha256=Q9_wq1Y4LIPHQMJP7w0EbxaH8AJ7Ze5UoSvDD-peibI,4978 +influxdb_client/domain/permission.py,sha256=gQc3bMLC7rKe8oRPJBNVqRGcBSykYb-tZff7JXSDXfk,3785 +influxdb_client/domain/permission_resource.py,sha256=SzDRz2cHFkvLYipGT2GfkXjXdlnDTDoX7GpwKfnjFWE,6090 +influxdb_client/domain/pipe_expression.py,sha256=9Xslhfrt8NNLdxlSffr-5a-_FkV585wrfoPED-95hnQ,4355 +influxdb_client/domain/pipe_literal.py,sha256=8Vyqinh8qUChlzdAXYALGTy3wFoZly7tIBOajgkeGaU,3068 +influxdb_client/domain/post_bucket_request.py,sha256=OM-t-ZMWGMi_YHBIHXK4msAMLJNMm4WtUEwSpGlt6BM,8809 +influxdb_client/domain/post_check.py,sha256=29_iqC9PYjJWmX_P5I89OqQFw4DL_hEb_USC1lTDB68,3439 +influxdb_client/domain/post_notification_endpoint.py,sha256=Bl-stktmAoCK0V9o2t77bWZUqlNrg0uMD_rLSFCUfis,3696 +influxdb_client/domain/post_notification_rule.py,sha256=iomtlCI95Tf4vL5hWghQqGvGzaX8yb9hBHDqTOuCItk,3797 +influxdb_client/domain/post_organization_request.py,sha256=g26jGHMXdYpCju5Kaecla2h5XqrJQ7-L1eOuCmiCCYk,3984 +influxdb_client/domain/post_restore_kv_response.py,sha256=dLdWozdE3H_XA9hB8yLjD6l78hF7ygGNLMNKRgGsWAY,3205 +influxdb_client/domain/post_stack_request.py,sha256=9Mdl2oF9hK5lGVMZ-TVzrgRUSlmVE2RDbiL4UgPZECE,4859 +influxdb_client/domain/property_key.py,sha256=4SXSrOe8FiXZ7G_xc9s6yUROKslraXFAVws84v-V2wI,2462 +influxdb_client/domain/query.py,sha256=BVmX3LdNbe3pcyfA3KHvMMHCj0_-Ih4TAqNKnQUZQ6E,7407 +influxdb_client/domain/query_edit_mode.py,sha256=snVox-WG635m6TaJaEGG1RFg5a9zp0kltqdRMMa8tfk,2448 +influxdb_client/domain/query_variable_properties.py,sha256=3Np453UwLonZeA_HEZQDDzdzJCDjg0em6mBMyBmReRw,3853 +influxdb_client/domain/query_variable_properties_values.py,sha256=JjSQ33qPDuLj7uqb82oiqAK3t9EHu01vi-FTekY77zE,3757 +influxdb_client/domain/range_threshold.py,sha256=QcuCg-mgpr7O_godQGCZrVMLRInrTQeutXvOoK3jXTQ,5291 +influxdb_client/domain/ready.py,sha256=V8eD5FVjVBwKJlVvtqSrBkMqBh6w7adsZCVg0oa8Dfc,3998 +influxdb_client/domain/regexp_literal.py,sha256=gdHHcFNNRgNR2sUj89sFwBDkuvi9epfv-I4xBJX2mUY,3664 +influxdb_client/domain/remote_connection.py,sha256=4bEjakZSClb5amIWUOQM-CVCynMYuQM21kqcSWRs86M,7537 +influxdb_client/domain/remote_connection_creation_request.py,sha256=lEWjeTmythaqeAr7nLtWD4b2_cV0ajyanCbbXfNybPQ,8294 +influxdb_client/domain/remote_connection_update_request.py,sha256=ZOQmp5fhEERRfvWLJKKy1ZyMsrFo7_df3xjERf-os94,7118 +influxdb_client/domain/remote_connections.py,sha256=TmrG-L7i1lYo4ncL8AxF6tRq-tEFkAshFJBbJaBqZSk,3064 +influxdb_client/domain/renamable_field.py,sha256=QZoHAg_aih9YZgggICRzLb8nPSfmQouqWT-_LPPGL_Y,4797 +influxdb_client/domain/replication.py,sha256=BmkODXdts7fQPQSfxRYjIxy84HM-qVBZnMh7lOBJq50,13815 +influxdb_client/domain/replication_creation_request.py,sha256=GuzlDb2Xcip7KndLUGyTe75VENVxsPSn4vjn_8p59s0,11454 +influxdb_client/domain/replication_update_request.py,sha256=ZhxEWTnxq0i8wxMZR0Y2kLSzgaYDTpuYSXqCCPhxgpM,9349 +influxdb_client/domain/replications.py,sha256=Uyevtv5lorCZCPv_xT0dOL0xSpBjrIhogsx92B1EZGM,3114 +influxdb_client/domain/resource_member.py,sha256=cFwI23lyzTPFE_oEPKolzLJGEUUNsDXat8yvnOiv69M,3350 +influxdb_client/domain/resource_members.py,sha256=kJXhapcB6Aku24y8qss-sI3Djvbc2eYnhCzSe5RvXjU,3645 +influxdb_client/domain/resource_members_links.py,sha256=4GdSwFKu9JUhmWuRU_kb73lXvfdiacAAVE1yYdlSwTA,2987 +influxdb_client/domain/resource_owner.py,sha256=SJFouY95jnVWH95U4KJAwZ3vhvncRaY-QVDu7uPi24M,3342 +influxdb_client/domain/resource_owners.py,sha256=ggh2Kk9XIYTE7H7HTMKgVUb1VDuiUT83Z_IpK48avU8,3631 +influxdb_client/domain/restored_bucket_mappings.py,sha256=QjrUrAOmnpmM3DcLbIlVu7GHu1f_AxeAHRGmSW1UP58,4735 +influxdb_client/domain/retention_policy_manifest.py,sha256=wnep4v5rpvhaO-25Ruh5BKkul8ZMveyanXuqX4_QwJU,7507 +influxdb_client/domain/return_statement.py,sha256=KbBRKq4wTWUylX8Ye4Y1_jh0fIyMIOfKVnH8lmkJWtM,3763 +influxdb_client/domain/routes.py,sha256=t_rdW1her-O9bVsPNkHtdUxArkIek1WoV75Cgccqi1M,12972 +influxdb_client/domain/routes_external.py,sha256=4NRqJGfUpPQlOW9gp7bbepr76wMEgS5aagGT4a41yfA,3065 +influxdb_client/domain/routes_query.py,sha256=mPrsL4WhcXwhxHa_wM84Lk4VYZ2YXYbGDAbcQFy_qAE,4766 +influxdb_client/domain/routes_system.py,sha256=_iHXoq5qhbUCy0EswHZuj5AJIkR_SvMsnwz80qiAVf4,4148 +influxdb_client/domain/rule_status_level.py,sha256=dldrQBEEuaitpu0_apyZGdZS31MmqAQZktZywB0iimg,2512 +influxdb_client/domain/run.py,sha256=uW9VHVkW32LLJiJBE8DmYj1w1Nejbr_g2u20Zx7c7Wg,9592 +influxdb_client/domain/run_links.py,sha256=r0gsSsoShTNFz2XFyNHKVtRTijexBLkhzX0Bl1Syf7k,4007 +influxdb_client/domain/run_manually.py,sha256=jfvh4ZjHHvc2KqlFpsGycfY5WuOKroAk1TxEHBYpWTA,3447 +influxdb_client/domain/runs.py,sha256=yshplGEWUXLHWbwwB9hZUHI6yH2_yC64P5fz68r29D0,3426 +influxdb_client/domain/scatter_view_properties.py,sha256=DXjinYwaMgMIBoeqGCGkgwNWlsruxhIblxDaFB0kDkI,28474 +influxdb_client/domain/schema_type.py,sha256=WQNi6OqEAYEd3Jpb41wrfcgPQViR-nh4dQS50llODGM,2441 +influxdb_client/domain/scraper_target_request.py,sha256=zGXlNWwqyNczZ3C-Z20bezQp3iqITewFW8chofvwDH8,6733 +influxdb_client/domain/scraper_target_response.py,sha256=CDUFE-_Q3qVrsLioRXJOE4YAHiCJVCuVmrZxbcg_148,5558 +influxdb_client/domain/scraper_target_responses.py,sha256=q-dnmenK5C9WueuGi0YxE_Spu5wus4D4xjCuoCacmwE,3254 +influxdb_client/domain/script.py,sha256=X2gq8TxU9z8d3l6UTTK1RSxEZGQrR6o-fOYi1gDI988,8083 +influxdb_client/domain/script_create_request.py,sha256=bwN3IFv2nI4fmlumEFJNfYF-MFjSbPjFU5laVUBtd4Y,5679 +influxdb_client/domain/script_invocation_params.py,sha256=81xRZ0iqsmwkMsXBVDjiYvB0xo_N_5h-f9IUW0_W2PY,3514 +influxdb_client/domain/script_language.py,sha256=hOwsH8YXog-n-Rt5dDn2MpZyUfAlJeAGW5FoPsUzDPA,2461 +influxdb_client/domain/script_update_request.py,sha256=bCgaUCHi0L8KAFLGlLmOjTzJvPWlg1-Mxtnu7f08bK0,3867 +influxdb_client/domain/scripts.py,sha256=3Kula60mK6bu8p-9VAhvH4m_HiVOZ-HpmI8qDgsKVvM,2964 +influxdb_client/domain/secret_keys.py,sha256=JhS4f9Qeow3kT6tu4ckm_ad_Lg23roiG7gDZgBB2K5E,2976 +influxdb_client/domain/secret_keys_response.py,sha256=mOXxSHZB3C6Ll9LkyySp_CXjRpi7F_DrI5P65ZFSRZ0,3188 +influxdb_client/domain/shard_group_manifest.py,sha256=YiMDH2oei85-DfT6yt7TBGll7R7oHby7rOjNKKfwes0,6790 +influxdb_client/domain/shard_manifest.py,sha256=SSLri01WABebI9wuyP5I7DygDwCwTLYOp3IKwwUmdls,3811 +influxdb_client/domain/shard_owner.py,sha256=Sfbfb3AtD2o5U5Gwos5Tsc7Am-JUlSV6zfCGkGH_ZRo,3141 +influxdb_client/domain/simple_table_view_properties.py,sha256=YcUIRr4nZEhiCmLGJxDQqVxpCbH0yLQie2Y7cPpzYUo,7330 +influxdb_client/domain/single_stat_view_properties.py,sha256=3DzYfN4h-UqXxAIVuFgLAnYflwIAmqZVRsEuk576IfM,12316 +influxdb_client/domain/slack_notification_endpoint.py,sha256=JaOoJkzPCRCIqvtahStx9xqZgg-hzopIpmQiAOBFjzY,5121 +influxdb_client/domain/slack_notification_rule.py,sha256=46TT2FwUNZSEqpqSlvhyiNpexuwMSziD7xUHMnlHI40,5162 +influxdb_client/domain/slack_notification_rule_base.py,sha256=V9ipaDnMDeOK-m6W4Aa-k2-JaKYAtGnZ9o6IVfArX24,7199 +influxdb_client/domain/smtp_notification_rule.py,sha256=tB-WmSRhf7T4Nn73L1FqW2UnoRaW7x-bJtbgLrMcwEU,5246 +influxdb_client/domain/smtp_notification_rule_base.py,sha256=lKsy04meciSdV5jjnJKP9EXwZlqYbapVqsySJh-hvYA,7950 +influxdb_client/domain/source.py,sha256=x9w25MbFbX2anX7uLBB_tOFeisy96TC4cxK3UNGQGAQ,12461 +influxdb_client/domain/source_links.py,sha256=BaAWKG8vJcujUBPUfBbiPj-fA2S6PMSMjo-0XfOyHlw,4706 +influxdb_client/domain/sources.py,sha256=1cuXAx2F4s-0VyHMLzAetPfLfTouSKy75zdK9s_3g-8,3573 +influxdb_client/domain/stack.py,sha256=LeDBjJm-8oNcCEF0bixLuhKrp8DTxMUVDXh2xio-ovg,4668 +influxdb_client/domain/stack_associations.py,sha256=w373phYwTjUxrxPntSNQpm9XV7GgSzfMBR5E6AEauM0,3651 +influxdb_client/domain/stack_events.py,sha256=O74OhDMjrdrMLA7uMSJDzKEfvFiyjAra1AVe70iivmk,6869 +influxdb_client/domain/stack_links.py,sha256=QIwLe-VeQly6WAt88tgN4DJwbI4v52MnagrSJEWn1Yg,2917 +influxdb_client/domain/stack_resources.py,sha256=vg3NH9D5AAFoZUdQBxsA0e5TEpbSrmlg8W6v7WLbGdc,6600 +influxdb_client/domain/statement.py,sha256=FQTyIV6rzKhY0pZ0oHfpmwQVpw3godMY3eXV08nS-08,2345 +influxdb_client/domain/static_legend.py,sha256=5CM8yEGSuWrUJRsm6fEoe9dtBiZuq95D42aV5qrZhOE,7279 +influxdb_client/domain/status_rule.py,sha256=aYUdcmnnHJNHmXQ3Iz1aon6iHzh3MfH1iVZuqNfAUzA,5058 +influxdb_client/domain/string_literal.py,sha256=-7iqjHGKcdUUQvdHJCHvy3_Mw2bGOpAyFwTLoZBSxX8,3669 +influxdb_client/domain/subscription_manifest.py,sha256=tCKYpps8Q6wwiY6ygEM4e4SvOxVU6V9vWx0RvnCSs7s,4583 +influxdb_client/domain/table_view_properties.py,sha256=O79KzEjFE7qdrEZrlxXg6gRHMGjsleL2ECP55qr4jJ8,11275 +influxdb_client/domain/table_view_properties_table_options.py,sha256=oxVniaem_9OldegfH_TGtP7Juofoch2Cx0zO_G2no7o,6235 +influxdb_client/domain/tag_rule.py,sha256=dgxrq1QsOXsyJHKTqZn-WN_jvbKXeWnt1Sph6Pvfaug,4033 +influxdb_client/domain/task.py,sha256=xvu800Rx_0Kj10E9AjxEdiNftm-Wo4AL8R61JHoB5rM,17656 +influxdb_client/domain/task_create_request.py,sha256=0ATMCCOpFbZiVLvkQyFqGQITv9O-24yq-YWUDCHkWcc,5992 +influxdb_client/domain/task_links.py,sha256=JYIghl_WHdzSOT1_g8ci_Q_cMiLOatBD8HZT1eFZKbs,6092 +influxdb_client/domain/task_status_type.py,sha256=vkSx9Re22yM___Xq_16TlL7IC9qzy9eRMPQJBaSfoB8,2449 +influxdb_client/domain/task_update_request.py,sha256=InFzaezPxnDsNZeu2jBwCWgUTBTDewLYS-Hhd8WWa8U,7306 +influxdb_client/domain/tasks.py,sha256=Ldp5JfJXf3ecTCoCPP4b4OaFLy3p56gupH1lIKvtQPY,3460 +influxdb_client/domain/telegraf.py,sha256=wW-3JxwiF5eM5FGNG51OleSJuAk0xcvNGWsQ8MkDkMY,4607 +influxdb_client/domain/telegraf_plugin.py,sha256=UtPhsOIZBL3MSqGcqlaBywPpBeNZMdguBzxy4bIGjWA,4846 +influxdb_client/domain/telegraf_plugin_request.py,sha256=odq6JQp9HB7DK1r_QoxKLIefFJq-8hkh7zESh6KiDn0,6457 +influxdb_client/domain/telegraf_plugin_request_plugins.py,sha256=6RCASuDfxm2AbBvJPxF2LJbY9sl9_705TVh-wSUJRqk,5754 +influxdb_client/domain/telegraf_plugins.py,sha256=uOMFsLG9p31X9POgaHi7arzpz7ciUoIhod_tg_dknh0,4204 +influxdb_client/domain/telegraf_request.py,sha256=MEOUdmvHMEk1xGC2HmLkLozYfRda621NdxUnoKsSKOc,5572 +influxdb_client/domain/telegraf_request_metadata.py,sha256=LaAARuem3pjAikalB8qDr14QmaYHdDrTt226UN1O5ts,3067 +influxdb_client/domain/telegrafs.py,sha256=aH-s1UJK5XJS8NmRzu6BNDsO2YHsyIUT-Q_pbDUy7bc,3124 +influxdb_client/domain/telegram_notification_endpoint.py,sha256=ZQe1ZeDV881pjGvQG5sOhCxMlFb8f-a3H81bPJqci2g,5497 +influxdb_client/domain/telegram_notification_rule.py,sha256=v-P_3hn4c-SanFwRTtcG0BVM9h9x5mjyG8m3udSAkgY,5390 +influxdb_client/domain/telegram_notification_rule_base.py,sha256=LKYL9Ea12gIXiLpw4FbxNH451PkCqOSmnasthskQqx0,9053 +influxdb_client/domain/template_apply.py,sha256=at4UM8JcWecWZ2P73tnBdgje4sJbr8GclLwsm1sV_qI,16253 +influxdb_client/domain/template_apply_remotes.py,sha256=Ul3_fRM_2uU27NJPwk4k_4iv-YOz7pHwePXUsYvcvCg,3779 +influxdb_client/domain/template_apply_template.py,sha256=swtx9XTzDRUaYKYZJ3aVlLQDcgn9Cj2vHBABS8Topm0,4507 +influxdb_client/domain/template_chart.py,sha256=vEJnwJW9mLEFMQtcl7OIBZ_KQ6_6ttD9cv2gldI5YXI,5418 +influxdb_client/domain/template_export_by_id.py,sha256=WLWS3FILgx9JSSbXZ40MTmWH4Ojttqow5vJMImnkqTE,4527 +influxdb_client/domain/template_export_by_id_org_ids.py,sha256=83IPVIR8Jph8Ova0NAeA4Ye976pr5L8-QJTzlc_OW48,3970 +influxdb_client/domain/template_export_by_id_resource_filters.py,sha256=5xI-1ctR3-vLYqIynOAg7ZGg6UPD-hukQ0ZGgBSS3mw,4081 +influxdb_client/domain/template_export_by_id_resources.py,sha256=o2OZh8nQFL39kKjvbnsCpz5TuyJ5fBVDme1Gs3PswVQ,4690 +influxdb_client/domain/template_export_by_name.py,sha256=EasQ_aSgNRDuLbB76GlvN7pn33-NrrjJFnLC8w0NxQQ,4563 +influxdb_client/domain/template_export_by_name_resources.py,sha256=L4Yw-PC9BIQnH36dyg6QWJ9vP9Vla52U8pzDI1YRA9c,3850 +influxdb_client/domain/template_kind.py,sha256=ti5qVO5MUUA1_ezPLl8BiLFmb4hV_cpbHOliCf5cFHw,2905 +influxdb_client/domain/template_summary.py,sha256=gQrfBil2fZs5Cq6tuVRUntMRZrDbQQFIOhqxhU2e2tM,5647 +influxdb_client/domain/template_summary_diff.py,sha256=7EOqsRceYgAIXRc0G30IsSNyM88LSbZj7Xo5BAId5AY,10678 +influxdb_client/domain/template_summary_diff_buckets.py,sha256=w3vQ7TM1h5IkPCooNLrA3yMTxRQCk18xBjr5OqZ6wTs,6638 +influxdb_client/domain/template_summary_diff_buckets_new_old.py,sha256=zZRbGsbAqt3AVUUr9EK6NAdWt27pZaf9i2QotjIvPI8,5412 +influxdb_client/domain/template_summary_diff_checks.py,sha256=K0i8G3rcpaFhjDPv-Xb5p2bdgMO69p7ktITIv2oJVE4,6527 +influxdb_client/domain/template_summary_diff_dashboards.py,sha256=slZDndI5ume39oCe2oYuF9a7weZ0AKxyx_4vdFI2vgU,6737 +influxdb_client/domain/template_summary_diff_dashboards_new_old.py,sha256=aI7o8P_CErrAj0luNTYUrauxPTWt_F-WedFAEaNr42o,4601 +influxdb_client/domain/template_summary_diff_label_mappings.py,sha256=4yClquKK6QAL0fm720YpU6ImFZD047adpcE1NChsOgo,9107 +influxdb_client/domain/template_summary_diff_labels.py,sha256=v4O1u-ZA9Ew6eJFReR4FFXkOIvs45O9aZI75cA2Okko,6605 +influxdb_client/domain/template_summary_diff_labels_new_old.py,sha256=Lg3ssiUs7rb6Lnu1fgxToIBKap8T8dDqVHTqXR0PReE,4473 +influxdb_client/domain/template_summary_diff_notification_endpoints.py,sha256=fTOeVa_7R84rByuoSSL9ZSuctTVoo9pICR9gbfJRGhw,7022 +influxdb_client/domain/template_summary_diff_notification_rules.py,sha256=ZchiAbm5lkTX9SXJcu6Np_E0KX8kGhKmWT9TOdec5og,6968 +influxdb_client/domain/template_summary_diff_notification_rules_new_old.py,sha256=74XUeqLQoOR_T1FrSVJOoFOyq5cjf-YAR5SSQTGqpMI,11343 +influxdb_client/domain/template_summary_diff_tasks.py,sha256=euF5v5xZo4jYHfewn1wXB48klbLXJkwP5DuX-bOtKRk,6572 +influxdb_client/domain/template_summary_diff_tasks_new_old.py,sha256=qXwmhlWBnl4XCLvKJBZzWOWLMUZiycziirWIVobcOuI,7078 +influxdb_client/domain/template_summary_diff_telegraf_configs.py,sha256=b_yOtkybJun_Ta4mvjQ3V-ZEt51yLjI8KOIH5e0udkE,6752 +influxdb_client/domain/template_summary_diff_variables.py,sha256=0g7OEglHiXmF8IFrlilLK0yKuF7S5VmRQwGErG3NU6Y,6704 +influxdb_client/domain/template_summary_diff_variables_new_old.py,sha256=PRbGMH9blArE5zmxaXVBatSUC2IIj5eQmBx47XdWmEo,4543 +influxdb_client/domain/template_summary_errors.py,sha256=J-9m1tebmeSHVHyEM-w5EROMmqWWQMSsNIxYkDQbHU8,4960 +influxdb_client/domain/template_summary_label.py,sha256=Q7vGE4yC34_KszTjfdFWEy15e39SEyh7a0PKkNvtIXI,7239 +influxdb_client/domain/template_summary_label_properties.py,sha256=IDvh7RDKA3h7BDV4k83YX9IIyyqSDBFPTxI-dp0ZPBk,3828 +influxdb_client/domain/template_summary_summary.py,sha256=I81Ap6cde2v25SQ-zAadTjAf6oAO46oiu2ve3uDsyqY,12436 +influxdb_client/domain/template_summary_summary_buckets.py,sha256=ksytOEdh040MpGiyav8v3DuyzcOTMJx6-OOSIoY2Lhc,9296 +influxdb_client/domain/template_summary_summary_dashboards.py,sha256=p1MI0hLMwHPRJ9-EtywdrqTqs2Td365iUYnIqxZdnnI,9262 +influxdb_client/domain/template_summary_summary_label_mappings.py,sha256=0xSRYZZB47cUWppU0sr9l3J-00qYovPr20T-pzHRQWk,9212 +influxdb_client/domain/template_summary_summary_notification_rules.py,sha256=HDaDeuTx3auNBZjPeGwvKnNj1tz69JwJaAa9T07IXPY,15023 +influxdb_client/domain/template_summary_summary_status_rules.py,sha256=se5QzTmULObj3f2ROBZAaOAWMH7z8r2L5e-pQJzWVsI,4079 +influxdb_client/domain/template_summary_summary_tag_rules.py,sha256=2ZAZ4UQ0WweIjV9EeFy3D5DSGGXmyiupLzrTdgmoF58,4378 +influxdb_client/domain/template_summary_summary_tasks.py,sha256=OHblPHp_wmNqnBuv4txMIHVSuvTydWnz20IAG130fm4,9948 +influxdb_client/domain/template_summary_summary_variables.py,sha256=WdXYoSIhATdY_KiLBx_dcU4kLcH0OrXqIarniTaran0,9280 +influxdb_client/domain/test_statement.py,sha256=GYLK-0xr6pVTxWoco0JgOiZc7_JYjxRSgmDM5I6Vlx0,3805 +influxdb_client/domain/threshold.py,sha256=Zy4ATzkJHDvKWYEz5HpzAp1X8jpzuVbl3x6j8GKtUBI,3445 +influxdb_client/domain/threshold_base.py,sha256=ka_MQ7fLn7ywNaXsoqZLAASrucugcHoqsM9CdVboxgM,3780 +influxdb_client/domain/threshold_check.py,sha256=RslKMLwc3AWLninBKVcWw3ftnk-2j-BLjAClb1lNFIo,8524 +influxdb_client/domain/unary_expression.py,sha256=tDNfB2yscwT9ckKmGjCJsXEYKm6RI_rWynUAMvcBfyE,4417 +influxdb_client/domain/unsigned_integer_literal.py,sha256=ieJTUK0g4lNS9DO5XZ69Uf0Og8nhN7pUk-84On21fsw,3763 +influxdb_client/domain/user.py,sha256=arBAmvaaEOz37Q6ouuaU0kXvpo4O5DGdO-IudWjW6Mw,4221 +influxdb_client/domain/user_response.py,sha256=aY7mY0jApJ1S8aK6Xon5auXrl1gNJVnuLEYuSNbKug0,4985 +influxdb_client/domain/user_response_links.py,sha256=UybNCHxTV0vmmFHfcbFLQv0b4rkIz7TgFJGQZaDqZ5Q,2966 +influxdb_client/domain/users.py,sha256=GHoP_pPyOwABIuX0urDA5WzIEvF-HLaT-RN19EliHJs,3529 +influxdb_client/domain/variable.py,sha256=pBoLb5k3sxkUJT9fbo2i2NovHnn1-6B3ATEyQvGiEzs,8782 +influxdb_client/domain/variable_assignment.py,sha256=9kW1zeMmAifrQ_ejxmFOPJiQ2bga8rcgtCvOs4HBa0E,4279 +influxdb_client/domain/variable_links.py,sha256=FNg5ipqNYpq2aiYCnd-AW6oyCXfJEG2nN_lMOrRtHe8,4082 +influxdb_client/domain/variable_properties.py,sha256=w9-SDAOgFVRv4KZV81EIsz4UEQDFXEjLkTOaok85xtk,2372 +influxdb_client/domain/variables.py,sha256=lVpcscybFwUv_V6uS04EkH2Sd2Qxyqy3rOB978NSht8,3024 +influxdb_client/domain/view.py,sha256=doFN9ejH4FU9ggxKxJKDC72qShbvZzNd7wOBXHj7hxU,4757 +influxdb_client/domain/view_links.py,sha256=w6KLpMLkZCGuiKUUbK8LxbjYF7hzLz9R01F7RicWm4w,2910 +influxdb_client/domain/view_properties.py,sha256=sZYXLl5_-Ap3zD5vBwJXOXUUbIXT7AIJC8xC4ZV8uts,2360 +influxdb_client/domain/views.py,sha256=ynv_KQ3KO2BkRkKcYdZBywkWtqV72ca5Eph1m6iPdO8,3472 +influxdb_client/domain/write_precision.py,sha256=8r-s8ytutYO2valmV8SArUstQExkoiLZ9YC1C7oeLXY,2455 +influxdb_client/domain/xy_geom.py,sha256=aMDTPSgcc-lCdoQFzfrirvf6JVaFqRIXTXGZ_yvLkt8,2539 +influxdb_client/domain/xy_view_properties.py,sha256=_fRY0J5AUNiZlNGambJk0MRN5U-gPgRk7Tawu--NrUM,25266 +influxdb_client/extras.py,sha256=hLLdBCv9P0AJiNdhKcNMXwxd4RSopv0BFLp0MKEPADk,396 +influxdb_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +influxdb_client/rest.py,sha256=BLfjaNqoKSkgvzCEcnpw1MHTjPlzCleY-fNElMVZCVI,3025 +influxdb_client/service/__init__.py,sha256=UlVNJ5HGupDxnxSRZPUi9OajdjDK1UOxe-SKg7z6uZg,3264 +influxdb_client/service/__pycache__/__init__.cpython-312.pyc,, +influxdb_client/service/__pycache__/_base_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/authorizations_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/backup_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/bucket_schemas_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/buckets_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/cells_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/checks_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/config_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/dashboards_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/dbr_ps_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/delete_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/health_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/invokable_scripts_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/labels_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/legacy_authorizations_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/metrics_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/notification_endpoints_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/notification_rules_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/organizations_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/ping_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/query_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/ready_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/remote_connections_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/replications_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/resources_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/restore_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/routes_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/rules_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/scraper_targets_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/secrets_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/setup_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/signin_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/signout_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/sources_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/tasks_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/telegraf_plugins_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/telegrafs_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/templates_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/users_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/variables_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/views_service.cpython-312.pyc,, +influxdb_client/service/__pycache__/write_service.cpython-312.pyc,, +influxdb_client/service/_base_service.py,sha256=tAqGl2uiMwDAS57xIkVOdVTt4xPsSdAQkJ2TMax8rVQ,2517 +influxdb_client/service/authorizations_service.py,sha256=gfS5S21IRLAPJcJoSZJeWB1lPnyah4temUg3OMwoaZw,41968 +influxdb_client/service/backup_service.py,sha256=D_oGKAmF-XoXAzFEDyDhJXCi3UZ3u5EOr2jq5eBy8u4,17672 +influxdb_client/service/bucket_schemas_service.py,sha256=2AliG6UliuzxufVp0sRb5tkPwgurTGD2bbNjr06ey9o,38263 +influxdb_client/service/buckets_service.py,sha256=QsbKHjWnmanULKyQt2qHXGzod0uXLxviHAD8UYvBmlk,119228 +influxdb_client/service/cells_service.py,sha256=2Q1ecNbwFcoDgLJYfNzsyQ3GwlEJl9HSi9JPUzdKRcI,40089 +influxdb_client/service/checks_service.py,sha256=8UqBZ6-Sr8wECKzgWpPLLNdL1-LTnhVJRBnFKXYP7DU,56692 +influxdb_client/service/config_service.py,sha256=-sxc1AB3UyLRzhmlrJ_sKlU2O5t-KmNrqlqnUiwPxVg,13864 +influxdb_client/service/dashboards_service.py,sha256=jiRaF0vQnM1s4x2Kc_NzrJBgiY05klMBaV2WJiU2Icg,127511 +influxdb_client/service/dbr_ps_service.py,sha256=XSx1C6rMWL371i2-aokQ9TpQ2XvgNJEMRTV0n3SO3L4,37654 +influxdb_client/service/delete_service.py,sha256=E2GtSpvyv6R1XdZFJ8ajyDMVm7F1ICbT77axlCQ29So,16854 +influxdb_client/service/health_service.py,sha256=fXI61gPDkUE2zz_UNjgypn_sjUN5ycxPlPooxQD06w8,5236 +influxdb_client/service/invokable_scripts_service.py,sha256=S-XFE1pZbR6_TE1dq16NhtwIqcNxUqeZXiFF_C-NUyc,53080 +influxdb_client/service/labels_service.py,sha256=iKi_5Vq7HH10XN4oejNKj13rNvFOyV75VUw2favoHj0,26670 +influxdb_client/service/legacy_authorizations_service.py,sha256=qAzqvrE7SlHGO6jD8DOkz7rpK2kpIxLrQT6IsEzzSVc,37745 +influxdb_client/service/metrics_service.py,sha256=qBGo4aQstMfbtvzjmE0chVN67z5Iq5-oXllM7VebS_I,6923 +influxdb_client/service/notification_endpoints_service.py,sha256=bSIfGG0JsExfSOOBuLRzrpeucS1rzP3WVsiiLfpx-DA,56655 +influxdb_client/service/notification_rules_service.py,sha256=8hSqOns9c36E7TsdUnYw81biWOVManZ_9L7DuP72kPk,56064 +influxdb_client/service/organizations_service.py,sha256=tw8vMzoXqytL4ZBgZYqoT-gaOuDkceY4_HfI3FdS2ZM,86048 +influxdb_client/service/ping_service.py,sha256=_pTyCOsOvYtCL4unVO2MRmNWqpJ8AdFmP7OAm612ebU,10843 +influxdb_client/service/query_service.py,sha256=n2EXx-OEizFIt_VPjVOcOBxhIYw_TLDtowvEUElbPRc,42250 +influxdb_client/service/ready_service.py,sha256=fcfZXUAyDz2Yajo1IGg-Pk_QgLyGVu9aG2lEYPZNwuc,5083 +influxdb_client/service/remote_connections_service.py,sha256=q-HLxnedggoTzp1aLELZ6v6pWx-8rLua3Y_HUOUlZ9U,29037 +influxdb_client/service/replications_service.py,sha256=36iwPl_mt9hqt0DlUS7nk4bVw2tTs9LrQTkYjWNySxk,35374 +influxdb_client/service/resources_service.py,sha256=8vZlZBMy3dveXNC92oeGWk1wsm3OUH-tqa3LBGdbiRU,5120 +influxdb_client/service/restore_service.py,sha256=swO0vA2ZhziQ1dYmHgrTlqTNz9k6bOvAul1MgjL03HI,33023 +influxdb_client/service/routes_service.py,sha256=I_cC-Ui5hA9imJr7ua8Daigy0d8_opzFVmpEDYET-QI,5846 +influxdb_client/service/rules_service.py,sha256=3wUwOqELb4nXQ7RQ04wksdmsgLsKSoAlxlaZjbtqiuU,6111 +influxdb_client/service/scraper_targets_service.py,sha256=YzCF1LIuGPKs4RiEzka8bGQlHMhNxIEGWFeJGQsbShU,85021 +influxdb_client/service/secrets_service.py,sha256=F8ZyChlKe70k0P0b9kRRSGHSXohkeV0IPGm3uj2uhfM,24096 +influxdb_client/service/setup_service.py,sha256=Ph1Hz4w-KQARwqTiILDmrh7X0n5qwUnXyX6xj3EuN0I,11093 +influxdb_client/service/signin_service.py,sha256=x0lmCwWonn34TLCJKf7Skc-NfPlqW19iiHzSQxsNUS8,8757 +influxdb_client/service/signout_service.py,sha256=xrzzW0iFcWMpX45jSqhTJU4lsMC_IRRY450U1IZVyWw,9088 +influxdb_client/service/sources_service.py,sha256=9qOsizcK9yNW0vS8FVfvs8cBLufEoluQ50vfJmCDL9A,37076 +influxdb_client/service/tasks_service.py,sha256=uKX2uJxli3i5eKzp5VUffH_5Xpo6anxaqh5xYSOlugQ,146850 +influxdb_client/service/telegraf_plugins_service.py,sha256=nWdm7SUg7F71-Ct3534amAKwKU1W-NKHQKs_afV9yRw,5544 +influxdb_client/service/telegrafs_service.py,sha256=si3Nm4euxANzTxwUyfuXFJrrV1lYxg8-I3C5DSTklkU,82635 +influxdb_client/service/templates_service.py,sha256=bgqiGsH4LoNorHLcDC9-2YX7gikqFRP5YvnL_892KDE,52331 +influxdb_client/service/users_service.py,sha256=mwKXQbiRN_NluRvSh_tTM5TjNElsu71E3Cp4U9PWgkg,71346 +influxdb_client/service/variables_service.py,sha256=FxPjm_sAGIIpouOCmlyD-mS-7F_8FfWaF1hKuscAd0E,50835 +influxdb_client/service/views_service.py,sha256=smGhs-hrNN_3ZR03mZrFbmp42UN9Tysy4na529_NYiQ,13755 +influxdb_client/service/write_service.py,sha256=u6KzC3Xn1mOIqa0xWCxnmxhf0_ye1-d3qRpJOIxvHxM,20537 +influxdb_client/version.py,sha256=UXnUwt3sVUAJlwfWMdeLAfA2_XU2sYrL6_0vPdcK-xA,83 diff --git a/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/REQUESTED b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/WHEEL new file mode 100644 index 0000000..98c0d20 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/top_level.txt b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/top_level.txt new file mode 100644 index 0000000..59dfec2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client-1.46.0.dist-info/top_level.txt @@ -0,0 +1 @@ +influxdb_client diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/__init__.py b/myenv/lib/python3.12/site-packages/influxdb_client/__init__.py new file mode 100644 index 0000000..a100951 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/__init__.py @@ -0,0 +1,396 @@ +# coding: utf-8 + +# flake8: noqa + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +# import apis into sdk package +from influxdb_client.service.authorizations_service import AuthorizationsService +from influxdb_client.service.backup_service import BackupService +from influxdb_client.service.bucket_schemas_service import BucketSchemasService +from influxdb_client.service.buckets_service import BucketsService +from influxdb_client.service.cells_service import CellsService +from influxdb_client.service.checks_service import ChecksService +from influxdb_client.service.config_service import ConfigService +from influxdb_client.service.dbr_ps_service import DBRPsService +from influxdb_client.service.dashboards_service import DashboardsService +from influxdb_client.service.delete_service import DeleteService +from influxdb_client.service.health_service import HealthService +from influxdb_client.service.invokable_scripts_service import InvokableScriptsService +from influxdb_client.service.labels_service import LabelsService +from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService +from influxdb_client.service.metrics_service import MetricsService +from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService +from influxdb_client.service.notification_rules_service import NotificationRulesService +from influxdb_client.service.organizations_service import OrganizationsService +from influxdb_client.service.ping_service import PingService +from influxdb_client.service.query_service import QueryService +from influxdb_client.service.ready_service import ReadyService +from influxdb_client.service.remote_connections_service import RemoteConnectionsService +from influxdb_client.service.replications_service import ReplicationsService +from influxdb_client.service.resources_service import ResourcesService +from influxdb_client.service.restore_service import RestoreService +from influxdb_client.service.routes_service import RoutesService +from influxdb_client.service.rules_service import RulesService +from influxdb_client.service.scraper_targets_service import ScraperTargetsService +from influxdb_client.service.secrets_service import SecretsService +from influxdb_client.service.setup_service import SetupService +from influxdb_client.service.signin_service import SigninService +from influxdb_client.service.signout_service import SignoutService +from influxdb_client.service.sources_service import SourcesService +from influxdb_client.service.tasks_service import TasksService +from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService +from influxdb_client.service.telegrafs_service import TelegrafsService +from influxdb_client.service.templates_service import TemplatesService +from influxdb_client.service.users_service import UsersService +from influxdb_client.service.variables_service import VariablesService +from influxdb_client.service.views_service import ViewsService +from influxdb_client.service.write_service import WriteService + +from influxdb_client.configuration import Configuration +# import models into sdk package +from influxdb_client.domain.ast_response import ASTResponse +from influxdb_client.domain.add_resource_member_request_body import AddResourceMemberRequestBody +from influxdb_client.domain.analyze_query_response import AnalyzeQueryResponse +from influxdb_client.domain.analyze_query_response_errors import AnalyzeQueryResponseErrors +from influxdb_client.domain.array_expression import ArrayExpression +from influxdb_client.domain.authorization import Authorization +from influxdb_client.domain.authorization_post_request import AuthorizationPostRequest +from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest +from influxdb_client.domain.authorizations import Authorizations +from influxdb_client.domain.axes import Axes +from influxdb_client.domain.axis import Axis +from influxdb_client.domain.axis_scale import AxisScale +from influxdb_client.domain.bad_statement import BadStatement +from influxdb_client.domain.band_view_properties import BandViewProperties +from influxdb_client.domain.binary_expression import BinaryExpression +from influxdb_client.domain.block import Block +from influxdb_client.domain.boolean_literal import BooleanLiteral +from influxdb_client.domain.bucket import Bucket +from influxdb_client.domain.bucket_links import BucketLinks +from influxdb_client.domain.bucket_metadata_manifest import BucketMetadataManifest +from influxdb_client.domain.bucket_retention_rules import BucketRetentionRules +from influxdb_client.domain.bucket_shard_mapping import BucketShardMapping +from influxdb_client.domain.buckets import Buckets +from influxdb_client.domain.builder_aggregate_function_type import BuilderAggregateFunctionType +from influxdb_client.domain.builder_config import BuilderConfig +from influxdb_client.domain.builder_config_aggregate_window import BuilderConfigAggregateWindow +from influxdb_client.domain.builder_functions_type import BuilderFunctionsType +from influxdb_client.domain.builder_tags_type import BuilderTagsType +from influxdb_client.domain.builtin_statement import BuiltinStatement +from influxdb_client.domain.call_expression import CallExpression +from influxdb_client.domain.cell import Cell +from influxdb_client.domain.cell_links import CellLinks +from influxdb_client.domain.cell_update import CellUpdate +from influxdb_client.domain.cell_with_view_properties import CellWithViewProperties +from influxdb_client.domain.check import Check +from influxdb_client.domain.check_base import CheckBase +from influxdb_client.domain.check_base_links import CheckBaseLinks +from influxdb_client.domain.check_discriminator import CheckDiscriminator +from influxdb_client.domain.check_patch import CheckPatch +from influxdb_client.domain.check_status_level import CheckStatusLevel +from influxdb_client.domain.check_view_properties import CheckViewProperties +from influxdb_client.domain.checks import Checks +from influxdb_client.domain.column_data_type import ColumnDataType +from influxdb_client.domain.column_semantic_type import ColumnSemanticType +from influxdb_client.domain.conditional_expression import ConditionalExpression +from influxdb_client.domain.config import Config +from influxdb_client.domain.constant_variable_properties import ConstantVariableProperties +from influxdb_client.domain.create_cell import CreateCell +from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest +from influxdb_client.domain.custom_check import CustomCheck +from influxdb_client.domain.dbrp import DBRP +from influxdb_client.domain.dbrp_create import DBRPCreate +from influxdb_client.domain.dbrp_get import DBRPGet +from influxdb_client.domain.dbrp_update import DBRPUpdate +from influxdb_client.domain.dbr_ps import DBRPs +from influxdb_client.domain.dashboard import Dashboard +from influxdb_client.domain.dashboard_color import DashboardColor +from influxdb_client.domain.dashboard_query import DashboardQuery +from influxdb_client.domain.dashboard_with_view_properties import DashboardWithViewProperties +from influxdb_client.domain.dashboards import Dashboards +from influxdb_client.domain.date_time_literal import DateTimeLiteral +from influxdb_client.domain.deadman_check import DeadmanCheck +from influxdb_client.domain.decimal_places import DecimalPlaces +from influxdb_client.domain.delete_predicate_request import DeletePredicateRequest +from influxdb_client.domain.dialect import Dialect +from influxdb_client.domain.dict_expression import DictExpression +from influxdb_client.domain.dict_item import DictItem +from influxdb_client.domain.duration import Duration +from influxdb_client.domain.duration_literal import DurationLiteral +from influxdb_client.domain.error import Error +from influxdb_client.domain.expression import Expression +from influxdb_client.domain.expression_statement import ExpressionStatement +from influxdb_client.domain.field import Field +from influxdb_client.domain.file import File +from influxdb_client.domain.float_literal import FloatLiteral +from influxdb_client.domain.flux_response import FluxResponse +from influxdb_client.domain.flux_suggestion import FluxSuggestion +from influxdb_client.domain.flux_suggestions import FluxSuggestions +from influxdb_client.domain.function_expression import FunctionExpression +from influxdb_client.domain.gauge_view_properties import GaugeViewProperties +from influxdb_client.domain.greater_threshold import GreaterThreshold +from influxdb_client.domain.http_notification_endpoint import HTTPNotificationEndpoint +from influxdb_client.domain.http_notification_rule import HTTPNotificationRule +from influxdb_client.domain.http_notification_rule_base import HTTPNotificationRuleBase +from influxdb_client.domain.health_check import HealthCheck +from influxdb_client.domain.heatmap_view_properties import HeatmapViewProperties +from influxdb_client.domain.histogram_view_properties import HistogramViewProperties +from influxdb_client.domain.identifier import Identifier +from influxdb_client.domain.import_declaration import ImportDeclaration +from influxdb_client.domain.index_expression import IndexExpression +from influxdb_client.domain.integer_literal import IntegerLiteral +from influxdb_client.domain.is_onboarding import IsOnboarding +from influxdb_client.domain.label import Label +from influxdb_client.domain.label_create_request import LabelCreateRequest +from influxdb_client.domain.label_mapping import LabelMapping +from influxdb_client.domain.label_response import LabelResponse +from influxdb_client.domain.label_update import LabelUpdate +from influxdb_client.domain.labels_response import LabelsResponse +from influxdb_client.domain.language_request import LanguageRequest +from influxdb_client.domain.legacy_authorization_post_request import LegacyAuthorizationPostRequest +from influxdb_client.domain.lesser_threshold import LesserThreshold +from influxdb_client.domain.line_plus_single_stat_properties import LinePlusSingleStatProperties +from influxdb_client.domain.line_protocol_error import LineProtocolError +from influxdb_client.domain.line_protocol_length_error import LineProtocolLengthError +from influxdb_client.domain.links import Links +from influxdb_client.domain.list_stacks_response import ListStacksResponse +from influxdb_client.domain.log_event import LogEvent +from influxdb_client.domain.logical_expression import LogicalExpression +from influxdb_client.domain.logs import Logs +from influxdb_client.domain.map_variable_properties import MapVariableProperties +from influxdb_client.domain.markdown_view_properties import MarkdownViewProperties +from influxdb_client.domain.measurement_schema import MeasurementSchema +from influxdb_client.domain.measurement_schema_column import MeasurementSchemaColumn +from influxdb_client.domain.measurement_schema_create_request import MeasurementSchemaCreateRequest +from influxdb_client.domain.measurement_schema_list import MeasurementSchemaList +from influxdb_client.domain.measurement_schema_update_request import MeasurementSchemaUpdateRequest +from influxdb_client.domain.member_assignment import MemberAssignment +from influxdb_client.domain.member_expression import MemberExpression +from influxdb_client.domain.metadata_backup import MetadataBackup +from influxdb_client.domain.model_property import ModelProperty +from influxdb_client.domain.mosaic_view_properties import MosaicViewProperties +from influxdb_client.domain.node import Node +from influxdb_client.domain.notification_endpoint import NotificationEndpoint +from influxdb_client.domain.notification_endpoint_base import NotificationEndpointBase +from influxdb_client.domain.notification_endpoint_base_links import NotificationEndpointBaseLinks +from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator +from influxdb_client.domain.notification_endpoint_type import NotificationEndpointType +from influxdb_client.domain.notification_endpoint_update import NotificationEndpointUpdate +from influxdb_client.domain.notification_endpoints import NotificationEndpoints +from influxdb_client.domain.notification_rule import NotificationRule +from influxdb_client.domain.notification_rule_base import NotificationRuleBase +from influxdb_client.domain.notification_rule_base_links import NotificationRuleBaseLinks +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator +from influxdb_client.domain.notification_rule_update import NotificationRuleUpdate +from influxdb_client.domain.notification_rules import NotificationRules +from influxdb_client.domain.object_expression import ObjectExpression +from influxdb_client.domain.onboarding_request import OnboardingRequest +from influxdb_client.domain.onboarding_response import OnboardingResponse +from influxdb_client.domain.option_statement import OptionStatement +from influxdb_client.domain.organization import Organization +from influxdb_client.domain.organization_links import OrganizationLinks +from influxdb_client.domain.organizations import Organizations +from influxdb_client.domain.package import Package +from influxdb_client.domain.package_clause import PackageClause +from influxdb_client.domain.pager_duty_notification_endpoint import PagerDutyNotificationEndpoint +from influxdb_client.domain.pager_duty_notification_rule import PagerDutyNotificationRule +from influxdb_client.domain.pager_duty_notification_rule_base import PagerDutyNotificationRuleBase +from influxdb_client.domain.paren_expression import ParenExpression +from influxdb_client.domain.password_reset_body import PasswordResetBody +from influxdb_client.domain.patch_bucket_request import PatchBucketRequest +from influxdb_client.domain.patch_dashboard_request import PatchDashboardRequest +from influxdb_client.domain.patch_organization_request import PatchOrganizationRequest +from influxdb_client.domain.patch_retention_rule import PatchRetentionRule +from influxdb_client.domain.patch_stack_request import PatchStackRequest +from influxdb_client.domain.patch_stack_request_additional_resources import PatchStackRequestAdditionalResources +from influxdb_client.domain.permission import Permission +from influxdb_client.domain.permission_resource import PermissionResource +from influxdb_client.domain.pipe_expression import PipeExpression +from influxdb_client.domain.pipe_literal import PipeLiteral +from influxdb_client.domain.post_bucket_request import PostBucketRequest +from influxdb_client.domain.post_check import PostCheck +from influxdb_client.domain.post_notification_endpoint import PostNotificationEndpoint +from influxdb_client.domain.post_notification_rule import PostNotificationRule +from influxdb_client.domain.post_organization_request import PostOrganizationRequest +from influxdb_client.domain.post_restore_kv_response import PostRestoreKVResponse +from influxdb_client.domain.post_stack_request import PostStackRequest +from influxdb_client.domain.property_key import PropertyKey +from influxdb_client.domain.query import Query +from influxdb_client.domain.query_edit_mode import QueryEditMode +from influxdb_client.domain.query_variable_properties import QueryVariableProperties +from influxdb_client.domain.query_variable_properties_values import QueryVariablePropertiesValues +from influxdb_client.domain.range_threshold import RangeThreshold +from influxdb_client.domain.ready import Ready +from influxdb_client.domain.regexp_literal import RegexpLiteral +from influxdb_client.domain.remote_connection import RemoteConnection +from influxdb_client.domain.remote_connection_creation_request import RemoteConnectionCreationRequest +from influxdb_client.domain.remote_connection_update_request import RemoteConnectionUpdateRequest +from influxdb_client.domain.remote_connections import RemoteConnections +from influxdb_client.domain.renamable_field import RenamableField +from influxdb_client.domain.replication import Replication +from influxdb_client.domain.replication_creation_request import ReplicationCreationRequest +from influxdb_client.domain.replication_update_request import ReplicationUpdateRequest +from influxdb_client.domain.replications import Replications +from influxdb_client.domain.resource_member import ResourceMember +from influxdb_client.domain.resource_members import ResourceMembers +from influxdb_client.domain.resource_members_links import ResourceMembersLinks +from influxdb_client.domain.resource_owner import ResourceOwner +from influxdb_client.domain.resource_owners import ResourceOwners +from influxdb_client.domain.restored_bucket_mappings import RestoredBucketMappings +from influxdb_client.domain.retention_policy_manifest import RetentionPolicyManifest +from influxdb_client.domain.return_statement import ReturnStatement +from influxdb_client.domain.routes import Routes +from influxdb_client.domain.routes_external import RoutesExternal +from influxdb_client.domain.routes_query import RoutesQuery +from influxdb_client.domain.routes_system import RoutesSystem +from influxdb_client.domain.rule_status_level import RuleStatusLevel +from influxdb_client.domain.run import Run +from influxdb_client.domain.run_links import RunLinks +from influxdb_client.domain.run_manually import RunManually +from influxdb_client.domain.runs import Runs +from influxdb_client.domain.smtp_notification_rule import SMTPNotificationRule +from influxdb_client.domain.smtp_notification_rule_base import SMTPNotificationRuleBase +from influxdb_client.domain.scatter_view_properties import ScatterViewProperties +from influxdb_client.domain.schema_type import SchemaType +from influxdb_client.domain.scraper_target_request import ScraperTargetRequest +from influxdb_client.domain.scraper_target_response import ScraperTargetResponse +from influxdb_client.domain.scraper_target_responses import ScraperTargetResponses +from influxdb_client.domain.script import Script +from influxdb_client.domain.script_create_request import ScriptCreateRequest +from influxdb_client.domain.script_invocation_params import ScriptInvocationParams +from influxdb_client.domain.script_language import ScriptLanguage +from influxdb_client.domain.script_update_request import ScriptUpdateRequest +from influxdb_client.domain.scripts import Scripts +from influxdb_client.domain.secret_keys import SecretKeys +from influxdb_client.domain.secret_keys_response import SecretKeysResponse +from influxdb_client.domain.shard_group_manifest import ShardGroupManifest +from influxdb_client.domain.shard_manifest import ShardManifest +from influxdb_client.domain.shard_owner import ShardOwner +from influxdb_client.domain.simple_table_view_properties import SimpleTableViewProperties +from influxdb_client.domain.single_stat_view_properties import SingleStatViewProperties +from influxdb_client.domain.slack_notification_endpoint import SlackNotificationEndpoint +from influxdb_client.domain.slack_notification_rule import SlackNotificationRule +from influxdb_client.domain.slack_notification_rule_base import SlackNotificationRuleBase +from influxdb_client.domain.source import Source +from influxdb_client.domain.source_links import SourceLinks +from influxdb_client.domain.sources import Sources +from influxdb_client.domain.stack import Stack +from influxdb_client.domain.stack_associations import StackAssociations +from influxdb_client.domain.stack_events import StackEvents +from influxdb_client.domain.stack_links import StackLinks +from influxdb_client.domain.stack_resources import StackResources +from influxdb_client.domain.statement import Statement +from influxdb_client.domain.static_legend import StaticLegend +from influxdb_client.domain.status_rule import StatusRule +from influxdb_client.domain.string_literal import StringLiteral +from influxdb_client.domain.subscription_manifest import SubscriptionManifest +from influxdb_client.domain.table_view_properties import TableViewProperties +from influxdb_client.domain.table_view_properties_table_options import TableViewPropertiesTableOptions +from influxdb_client.domain.tag_rule import TagRule +from influxdb_client.domain.task import Task +from influxdb_client.domain.task_create_request import TaskCreateRequest +from influxdb_client.domain.task_links import TaskLinks +from influxdb_client.domain.task_status_type import TaskStatusType +from influxdb_client.domain.task_update_request import TaskUpdateRequest +from influxdb_client.domain.tasks import Tasks +from influxdb_client.domain.telegraf import Telegraf +from influxdb_client.domain.telegraf_plugin import TelegrafPlugin +from influxdb_client.domain.telegraf_plugin_request import TelegrafPluginRequest +from influxdb_client.domain.telegraf_plugin_request_plugins import TelegrafPluginRequestPlugins +from influxdb_client.domain.telegraf_plugins import TelegrafPlugins +from influxdb_client.domain.telegraf_request import TelegrafRequest +from influxdb_client.domain.telegraf_request_metadata import TelegrafRequestMetadata +from influxdb_client.domain.telegrafs import Telegrafs +from influxdb_client.domain.telegram_notification_endpoint import TelegramNotificationEndpoint +from influxdb_client.domain.telegram_notification_rule import TelegramNotificationRule +from influxdb_client.domain.telegram_notification_rule_base import TelegramNotificationRuleBase +from influxdb_client.domain.template_apply import TemplateApply +from influxdb_client.domain.template_apply_remotes import TemplateApplyRemotes +from influxdb_client.domain.template_apply_template import TemplateApplyTemplate +from influxdb_client.domain.template_chart import TemplateChart +from influxdb_client.domain.template_export_by_id import TemplateExportByID +from influxdb_client.domain.template_export_by_id_org_ids import TemplateExportByIDOrgIDs +from influxdb_client.domain.template_export_by_id_resource_filters import TemplateExportByIDResourceFilters +from influxdb_client.domain.template_export_by_id_resources import TemplateExportByIDResources +from influxdb_client.domain.template_kind import TemplateKind +from influxdb_client.domain.template_summary import TemplateSummary +from influxdb_client.domain.template_summary_diff import TemplateSummaryDiff +from influxdb_client.domain.template_summary_diff_buckets import TemplateSummaryDiffBuckets +from influxdb_client.domain.template_summary_diff_buckets_new_old import TemplateSummaryDiffBucketsNewOld +from influxdb_client.domain.template_summary_diff_checks import TemplateSummaryDiffChecks +from influxdb_client.domain.template_summary_diff_dashboards import TemplateSummaryDiffDashboards +from influxdb_client.domain.template_summary_diff_dashboards_new_old import TemplateSummaryDiffDashboardsNewOld +from influxdb_client.domain.template_summary_diff_label_mappings import TemplateSummaryDiffLabelMappings +from influxdb_client.domain.template_summary_diff_labels import TemplateSummaryDiffLabels +from influxdb_client.domain.template_summary_diff_labels_new_old import TemplateSummaryDiffLabelsNewOld +from influxdb_client.domain.template_summary_diff_notification_endpoints import TemplateSummaryDiffNotificationEndpoints +from influxdb_client.domain.template_summary_diff_notification_rules import TemplateSummaryDiffNotificationRules +from influxdb_client.domain.template_summary_diff_notification_rules_new_old import TemplateSummaryDiffNotificationRulesNewOld +from influxdb_client.domain.template_summary_diff_tasks import TemplateSummaryDiffTasks +from influxdb_client.domain.template_summary_diff_tasks_new_old import TemplateSummaryDiffTasksNewOld +from influxdb_client.domain.template_summary_diff_telegraf_configs import TemplateSummaryDiffTelegrafConfigs +from influxdb_client.domain.template_summary_diff_variables import TemplateSummaryDiffVariables +from influxdb_client.domain.template_summary_diff_variables_new_old import TemplateSummaryDiffVariablesNewOld +from influxdb_client.domain.template_summary_errors import TemplateSummaryErrors +from influxdb_client.domain.template_summary_label import TemplateSummaryLabel +from influxdb_client.domain.template_summary_label_properties import TemplateSummaryLabelProperties +from influxdb_client.domain.template_summary_summary import TemplateSummarySummary +from influxdb_client.domain.template_summary_summary_buckets import TemplateSummarySummaryBuckets +from influxdb_client.domain.template_summary_summary_dashboards import TemplateSummarySummaryDashboards +from influxdb_client.domain.template_summary_summary_label_mappings import TemplateSummarySummaryLabelMappings +from influxdb_client.domain.template_summary_summary_notification_rules import TemplateSummarySummaryNotificationRules +from influxdb_client.domain.template_summary_summary_status_rules import TemplateSummarySummaryStatusRules +from influxdb_client.domain.template_summary_summary_tag_rules import TemplateSummarySummaryTagRules +from influxdb_client.domain.template_summary_summary_tasks import TemplateSummarySummaryTasks +from influxdb_client.domain.template_summary_summary_variables import TemplateSummarySummaryVariables +from influxdb_client.domain.test_statement import TestStatement +from influxdb_client.domain.threshold import Threshold +from influxdb_client.domain.threshold_base import ThresholdBase +from influxdb_client.domain.threshold_check import ThresholdCheck +from influxdb_client.domain.unary_expression import UnaryExpression +from influxdb_client.domain.unsigned_integer_literal import UnsignedIntegerLiteral +from influxdb_client.domain.user import User +from influxdb_client.domain.user_response import UserResponse +from influxdb_client.domain.user_response_links import UserResponseLinks +from influxdb_client.domain.users import Users +from influxdb_client.domain.variable import Variable +from influxdb_client.domain.variable_assignment import VariableAssignment +from influxdb_client.domain.variable_links import VariableLinks +from influxdb_client.domain.variable_properties import VariableProperties +from influxdb_client.domain.variables import Variables +from influxdb_client.domain.view import View +from influxdb_client.domain.view_links import ViewLinks +from influxdb_client.domain.view_properties import ViewProperties +from influxdb_client.domain.views import Views +from influxdb_client.domain.write_precision import WritePrecision +from influxdb_client.domain.xy_geom import XYGeom +from influxdb_client.domain.xy_view_properties import XYViewProperties + +from influxdb_client.client.authorizations_api import AuthorizationsApi +from influxdb_client.client.bucket_api import BucketsApi +from influxdb_client.client.delete_api import DeleteApi +from influxdb_client.client.invokable_scripts_api import InvokableScriptsApi +from influxdb_client.client.labels_api import LabelsApi +from influxdb_client.client.organizations_api import OrganizationsApi +from influxdb_client.client.query_api import QueryApi +from influxdb_client.client.tasks_api import TasksApi +from influxdb_client.client.users_api import UsersApi +from influxdb_client.client.write_api import WriteApi, WriteOptions +from influxdb_client.client.influxdb_client import InfluxDBClient +from influxdb_client.client.logging_handler import InfluxLoggingHandler +from influxdb_client.client.write.point import Point + +from influxdb_client.version import VERSION + +__version__ = VERSION diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..942daa7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/configuration.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/configuration.cpython-312.pyc new file mode 100644 index 0000000..6d72cfe Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/configuration.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/extras.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/extras.cpython-312.pyc new file mode 100644 index 0000000..c2ed8f1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/extras.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/rest.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/rest.cpython-312.pyc new file mode 100644 index 0000000..9ff6afc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/rest.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/version.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/version.cpython-312.pyc new file mode 100644 index 0000000..2f3a5f6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/__pycache__/version.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_async/__init__.py b/myenv/lib/python3.12/site-packages/influxdb_client/_async/__init__.py new file mode 100644 index 0000000..743539c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/_async/__init__.py @@ -0,0 +1 @@ +"""Asynchronous REST APIs.""" diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_async/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/_async/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2839832 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/_async/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_async/__pycache__/api_client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/_async/__pycache__/api_client.cpython-312.pyc new file mode 100644 index 0000000..37ab0b3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/_async/__pycache__/api_client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_async/__pycache__/rest.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/_async/__pycache__/rest.cpython-312.pyc new file mode 100644 index 0000000..049c459 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/_async/__pycache__/rest.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_async/api_client.py b/myenv/lib/python3.12/site-packages/influxdb_client/_async/api_client.py new file mode 100644 index 0000000..3a396a1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/_async/api_client.py @@ -0,0 +1,663 @@ +# coding: utf-8 +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + +from __future__ import absolute_import + +import datetime +import json +import mimetypes +import os +import re +import tempfile +from multiprocessing.pool import ThreadPool +from urllib.parse import quote + +import influxdb_client.domain +from influxdb_client import SigninService +from influxdb_client import SignoutService +from influxdb_client._async import rest +from influxdb_client.configuration import Configuration +from influxdb_client.rest import _requires_create_user_session, _requires_expire_user_session + + +class ApiClientAsync(object): + """Generic API client for OpenAPI client library Build. + + OpenAPI generic API client. This client handles the client- + server communication, and is invariant across implementations. Specifics of + the methods and models for each application are generated from the OpenAPI + templates. + + NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param configuration: .Configuration object for this client + :param header_name: a header to pass when making calls to the API. + :param header_value: a header value to pass when making calls to + the API. + :param cookie: a cookie to include in the header when making calls + to the API + :param pool_threads: The number of threads to use for async requests + to the API. More threads means more concurrent API requests. + """ + + PRIMITIVE_TYPES = (float, bool, bytes, str, int) + NATIVE_TYPES_MAPPING = { + 'int': int, + 'long': int, + 'float': float, + 'str': str, + 'bool': bool, + 'date': datetime.date, + 'datetime': datetime.datetime, + 'object': object, + } + _pool = None + + def __init__(self, configuration=None, header_name=None, header_value=None, + cookie=None, pool_threads=None, **kwargs): + """Initialize generic API client.""" + if configuration is None: + configuration = Configuration() + self.configuration = configuration + self.pool_threads = pool_threads + + self.rest_client = rest.RESTClientObjectAsync(configuration, **kwargs) + self.default_headers = {} + if header_name is not None: + self.default_headers[header_name] = header_value + self.cookie = cookie + # Set default User-Agent. + from influxdb_client import VERSION + self.user_agent = f'influxdb-client-python/{VERSION}' + + async def close(self): + """Dispose api client.""" + await self._signout() + await self.rest_client.close() + """Dispose pools.""" + if self._pool: + self._pool.close() + self._pool.join() + self._pool = None + + @property + def pool(self): + """Create thread pool on first request avoids instantiating unused threadpool for blocking clients.""" + if self._pool is None: + self._pool = ThreadPool(self.pool_threads) + return self._pool + + @property + def user_agent(self): + """User agent for this API client.""" + return self.default_headers['User-Agent'] + + @user_agent.setter + def user_agent(self, value): + """Set User agent for this API client.""" + self.default_headers['User-Agent'] = value + + def set_default_header(self, header_name, header_value): + """Set HTTP header for this API client.""" + self.default_headers[header_name] = header_value + + async def __call_api( + self, resource_path, method, path_params=None, + query_params=None, header_params=None, body=None, post_params=None, + files=None, response_type=None, auth_settings=None, + _return_http_data_only=None, collection_formats=None, + _preload_content=True, _request_timeout=None, urlopen_kw=None): + + config = self.configuration + await self._signin(resource_path=resource_path) + + # header parameters + header_params = header_params or {} + config.update_request_header_params(resource_path, header_params) + header_params.update(self.default_headers) + if self.cookie: + header_params['Cookie'] = self.cookie + if header_params: + header_params = self.sanitize_for_serialization(header_params) + header_params = dict(self.parameters_to_tuples(header_params, + collection_formats)) + + # path parameters + if path_params: + path_params = self.sanitize_for_serialization(path_params) + path_params = self.parameters_to_tuples(path_params, + collection_formats) + for k, v in path_params: + # specified safe chars, encode everything + resource_path = resource_path.replace( + '{%s}' % k, + quote(str(v), safe=config.safe_chars_for_path_param) + ) + + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + query_params = self.parameters_to_tuples(query_params, + collection_formats) + + # post parameters + if post_params or files: + post_params = self.prepare_post_parameters(post_params, files) + post_params = self.sanitize_for_serialization(post_params) + post_params = self.parameters_to_tuples(post_params, + collection_formats) + + # auth setting + self.update_params_for_auth(header_params, query_params, auth_settings) + + # body + if body: + body = self.sanitize_for_serialization(body) + body = config.update_request_body(resource_path, body) + + # request url + url = self.configuration.host + resource_path + + urlopen_kw = urlopen_kw or {} + + # perform request and return response + response_data = await self.request( + method, url, query_params=query_params, headers=header_params, + post_params=post_params, body=body, + _preload_content=_preload_content, + _request_timeout=_request_timeout, **urlopen_kw) + + self.last_response = response_data + + return_data = response_data + if _preload_content: + # deserialize response data + if response_type: + return_data = self.deserialize(response_data, response_type) + else: + return_data = None + + if _return_http_data_only is not False: + return return_data + else: + return (return_data, response_data.status, + response_data.getheaders()) + + def sanitize_for_serialization(self, obj): + """Build a JSON POST object. + + If obj is None, return None. + If obj is str, int, long, float, bool, return directly. + If obj is datetime.datetime, datetime.date + convert to string in iso8601 format. + If obj is list, sanitize each element in the list. + If obj is dict, return the dict. + If obj is OpenAPI model, return the properties dict. + + :param obj: The data to serialize. + :return: The serialized form of data. + """ + if obj is None: + return None + elif isinstance(obj, self.PRIMITIVE_TYPES): + return obj + elif isinstance(obj, list): + return [self.sanitize_for_serialization(sub_obj) + for sub_obj in obj] + elif isinstance(obj, tuple): + return tuple(self.sanitize_for_serialization(sub_obj) + for sub_obj in obj) + elif isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + + if isinstance(obj, dict): + obj_dict = obj + else: + # Convert model obj to dict except + # attributes `openapi_types`, `attribute_map` + # and attributes which value is not None. + # Convert attribute name to json key in + # model definition for request. + obj_dict = {obj.attribute_map[attr]: getattr(obj, attr) + for attr, _ in obj.openapi_types.items() + if getattr(obj, attr) is not None} + + return {key: self.sanitize_for_serialization(val) + for key, val in obj_dict.items()} + + def deserialize(self, response, response_type): + """Deserializes response into an object. + + :param response: RESTResponse object to be deserialized. + :param response_type: class literal for + deserialized object, or string of class name. + + :return: deserialized object. + """ + # handle file downloading + # save response body into a tmp file and return the instance + if response_type == "file": + return self.__deserialize_file(response) + + # fetch data from response object + try: + data = json.loads(response.data) + except ValueError: + data = response.data + + return self.__deserialize(data, response_type) + + def __deserialize(self, data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if type(klass) == str: + if klass.startswith('list['): + sub_kls = re.match(r'list\[(.*)\]', klass).group(1) + return [self.__deserialize(sub_data, sub_kls) + for sub_data in data] + + if klass.startswith('dict('): + sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2) + return {k: self.__deserialize(v, sub_kls) + for k, v in data.items()} + + # convert str to class + if klass in self.NATIVE_TYPES_MAPPING: + klass = self.NATIVE_TYPES_MAPPING[klass] + else: + klass = getattr(influxdb_client.domain, klass) + + if klass in self.PRIMITIVE_TYPES: + return self.__deserialize_primitive(data, klass) + elif klass == object: + return self.__deserialize_object(data) + elif klass == datetime.date: + return self.__deserialize_date(data) + elif klass == datetime.datetime: + return self.__deserialize_datatime(data) + else: + return self.__deserialize_model(data, klass) + + def call_api(self, resource_path, method, + path_params=None, query_params=None, header_params=None, + body=None, post_params=None, files=None, + response_type=None, auth_settings=None, async_req=None, + _return_http_data_only=None, collection_formats=None, + _preload_content=True, _request_timeout=None, urlopen_kw=None): + """Make the HTTP request (synchronous) and Return deserialized data. + + To make an async_req request, set the async_req parameter. + + :param resource_path: Path to method endpoint. + :param method: Method to call. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param response: Response data type. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. + :param async_req bool: execute request asynchronously + :param _return_http_data_only: response data without head status code + and headers + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param urlopen_kw: Additional parameters are passed to + :meth:`urllib3.request.RequestMethods.request` + :return: + If async_req parameter is True, + the request will be called asynchronously. + The method will return the request thread. + If parameter async_req is False or missing, + then the method will return the response directly. + """ + if not async_req: + return self.__call_api(resource_path, method, + path_params, query_params, header_params, + body, post_params, files, + response_type, auth_settings, + _return_http_data_only, collection_formats, + _preload_content, _request_timeout, urlopen_kw) + else: + thread = self.pool.apply_async(self.__call_api, (resource_path, + method, path_params, query_params, + header_params, body, + post_params, files, + response_type, auth_settings, + _return_http_data_only, + collection_formats, + _preload_content, _request_timeout, urlopen_kw)) + return thread + + def request(self, method, url, query_params=None, headers=None, + post_params=None, body=None, _preload_content=True, + _request_timeout=None, **urlopen_kw): + """Make the HTTP request using RESTClient.""" + if method == "GET": + return self.rest_client.GET(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers, + **urlopen_kw) + elif method == "HEAD": + return self.rest_client.HEAD(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers, + **urlopen_kw) + elif method == "OPTIONS": + return self.rest_client.OPTIONS(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + elif method == "POST": + return self.rest_client.POST(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + elif method == "PUT": + return self.rest_client.PUT(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + elif method == "PATCH": + return self.rest_client.PATCH(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + elif method == "DELETE": + return self.rest_client.DELETE(url, + query_params=query_params, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + else: + raise ValueError( + "http method must be `GET`, `HEAD`, `OPTIONS`," + " `POST`, `PATCH`, `PUT` or `DELETE`." + ) + + def parameters_to_tuples(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: Parameters as list of tuples, collections formatted + """ + new_params = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, value) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(str(value) for value in v))) + else: + new_params.append((k, v)) + return new_params + + def prepare_post_parameters(self, post_params=None, files=None): + """Build form parameters. + + :param post_params: Normal form parameters. + :param files: File parameters. + :return: Form parameters with files. + """ + params = [] + + if post_params: + params = post_params + + if files: + for k, v in files.items(): + if not v: + continue + file_names = v if type(v) is list else [v] + for n in file_names: + with open(n, 'rb') as f: + filename = os.path.basename(f.name) + filedata = f.read() + mimetype = (mimetypes.guess_type(filename)[0] or + 'application/octet-stream') + params.append( + tuple([k, tuple([filename, filedata, mimetype])])) + + return params + + def select_header_accept(self, accepts): + """Return `Accept` based on an array of accepts provided. + + :param accepts: List of headers. + :return: Accept (e.g. application/json). + """ + if not accepts: + return + + accepts = [x.lower() for x in accepts] + + if 'application/json' in accepts: + return 'application/json' + else: + return ', '.join(accepts) + + def select_header_content_type(self, content_types): + """Return `Content-Type` based on an array of content_types provided. + + :param content_types: List of content-types. + :return: Content-Type (e.g. application/json). + """ + if not content_types: + return 'application/json' + + content_types = [x.lower() for x in content_types] + + if 'application/json' in content_types or '*/*' in content_types: + return 'application/json' + else: + return content_types[0] + + def update_params_for_auth(self, headers, querys, auth_settings): + """Update header and query params based on authentication setting. + + :param headers: Header parameters dict to be updated. + :param querys: Query parameters tuple list to be updated. + :param auth_settings: Authentication setting identifiers list. + """ + if not auth_settings: + return + + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + if not auth_setting['value']: + continue + elif auth_setting['in'] == 'header': + headers[auth_setting['key']] = auth_setting['value'] + elif auth_setting['in'] == 'query': + querys.append((auth_setting['key'], auth_setting['value'])) + else: + raise ValueError( + 'Authentication token must be in `query` or `header`' + ) + + def __deserialize_file(self, response): + """Deserializes body to file. + + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + :param response: RESTResponse. + :return: file path. + """ + fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + content_disposition = response.getheader("Content-Disposition") + if content_disposition: + filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition).group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + f.write(response.data) + + return path + + def __deserialize_primitive(self, data, klass): + """Deserializes string to primitive type. + + :param data: str. + :param klass: class literal. + + :return: int, long, float, str, bool. + """ + try: + return klass(data) + except UnicodeEncodeError: + return str(data) + except TypeError: + return data + + def __deserialize_object(self, value): + """Return an original value. + + :return: object. + """ + return value + + def __deserialize_date(self, string): + """Deserializes string to date. + + :param string: str. + :return: date. + """ + try: + from dateutil.parser import parse + return parse(string).date() + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason="Failed to parse `{0}` as date object".format(string) + ) + + def __deserialize_datatime(self, string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :return: datetime. + """ + try: + from dateutil.parser import parse + return parse(string) + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as datetime object" + .format(string) + ) + ) + + def __deserialize_model(self, data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :param klass: class literal. + :return: model object. + """ + if not klass.openapi_types and not hasattr(klass, + 'get_real_child_model'): + return data + + kwargs = {} + if klass.openapi_types is not None: + for attr, attr_type in klass.openapi_types.items(): + if (data is not None and + klass.attribute_map[attr] in data and + isinstance(data, (list, dict))): + value = data[klass.attribute_map[attr]] + kwargs[attr] = self.__deserialize(value, attr_type) + + instance = klass(**kwargs) + + if hasattr(instance, 'get_real_child_model'): + klass_name = instance.get_real_child_model(data) + if klass_name: + instance = self.__deserialize(data, klass_name) + return instance + + async def _signin(self, resource_path: str): + if _requires_create_user_session(self.configuration, self.cookie, resource_path): + http_info = await SigninService(self).post_signin_async(_return_http_data_only=False) + self.cookie = http_info[2]['set-cookie'] + + async def _signout(self): + if _requires_expire_user_session(self.configuration, self.cookie): + await SignoutService(self).post_signout_async() + self.cookie = None diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_async/rest.py b/myenv/lib/python3.12/site-packages/influxdb_client/_async/rest.py new file mode 100644 index 0000000..53c555c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/_async/rest.py @@ -0,0 +1,309 @@ +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import io +import json +import re +import ssl +from urllib.parse import urlencode + +import aiohttp + +from influxdb_client.rest import ApiException +from influxdb_client.rest import _BaseRESTClient +from influxdb_client.rest import _UTF_8_encoding + + +async def _on_request_start(session, trace_config_ctx, params): + _BaseRESTClient.log_request(params.method, params.url) + _BaseRESTClient.log_headers(params.headers, '>>>') + + +async def _on_request_chunk_sent(session, context, params): + if params.chunk: + _BaseRESTClient.log_body(params.chunk, '>>>') + + +async def _on_request_end(session, trace_config_ctx, params): + _BaseRESTClient.log_response(params.response.status) + _BaseRESTClient.log_headers(params.headers, '<<<') + + response_content = params.response.content + data = bytearray() + while True: + chunk = await response_content.read(100) + if not chunk: + break + data += chunk + if data: + _BaseRESTClient.log_body(data.decode(_UTF_8_encoding), '<<<') + response_content.unread_data(data=data) + + +class RESTResponseAsync(io.IOBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + Do not edit the class manually. + """ + + def __init__(self, resp, data): + """Initialize with HTTP response.""" + self.aiohttp_response = resp + self.status = resp.status + self.reason = resp.reason + self.data = data + + def getheaders(self): + """Return a CIMultiDictProxy of the response headers.""" + return self.aiohttp_response.headers + + def getheader(self, name, default=None): + """Return a given response header.""" + return self.aiohttp_response.headers.get(name, default) + + +class RESTClientObjectAsync(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + Do not edit the class manually. + """ + + def __init__(self, configuration, pools_size=4, maxsize=None, **kwargs): + """Initialize REST client.""" + # maxsize is number of requests to host that are allowed in parallel + if maxsize is None: + maxsize = configuration.connection_pool_maxsize + + if configuration.ssl_context is None: + ssl_context = ssl.create_default_context(cafile=configuration.ssl_ca_cert) + if configuration.cert_file: + ssl_context.load_cert_chain( + certfile=configuration.cert_file, keyfile=configuration.cert_key_file, + password=configuration.cert_key_password + ) + + if not configuration.verify_ssl: + ssl_context.check_hostname = False + ssl_context.verify_mode = ssl.CERT_NONE + else: + ssl_context = configuration.ssl_context + + connector = aiohttp.TCPConnector( + limit=maxsize, + ssl=ssl_context + ) + + self.proxy = configuration.proxy + self.proxy_headers = configuration.proxy_headers + self.allow_redirects = kwargs.get('allow_redirects', True) + self.max_redirects = kwargs.get('max_redirects', 10) + + # configure tracing + trace_config = aiohttp.TraceConfig() + trace_config.on_request_start.append(_on_request_start) + trace_config.on_request_chunk_sent.append(_on_request_chunk_sent) + trace_config.on_request_end.append(_on_request_end) + + # timeout + if isinstance(configuration.timeout, (int, float,)): # noqa: E501,F821 + timeout = aiohttp.ClientTimeout(total=configuration.timeout / 1_000) + elif isinstance(configuration.timeout, aiohttp.ClientTimeout): + timeout = configuration.timeout + else: + timeout = aiohttp.client.DEFAULT_TIMEOUT + + # https pool manager + _client_session_type = kwargs.get('client_session_type', aiohttp.ClientSession) + _client_session_kwargs = kwargs.get('client_session_kwargs', {}) + self.pool_manager = _client_session_type( + connector=connector, + timeout=timeout, + trace_configs=[trace_config] if configuration.debug else None, + **_client_session_kwargs + ) + + async def close(self): + """Dispose connection pool manager.""" + await self.pool_manager.close() + + async def request(self, method, url, query_params=None, headers=None, + body=None, post_params=None, _preload_content=True, + _request_timeout=None): + """Execute request. + + :param method: http request method + :param url: http request url + :param query_params: query parameters in the url + :param headers: http request headers + :param body: request json body, for `application/json` + :param post_params: request post parameters, + `application/x-www-form-urlencoded` + and `multipart/form-data` + :param _preload_content: this is a non-applicable field for + the AiohttpClient. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + """ + method = method.upper() + assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', + 'PATCH', 'OPTIONS'] + + if post_params and body: + raise ValueError( + "body parameter cannot be used with post_params parameter." + ) + + post_params = post_params or {} + headers = headers or {} + + if 'Content-Type' not in headers: + headers['Content-Type'] = 'application/json' + + args = { + "method": method, + "url": url, + "headers": headers, + "allow_redirects": self.allow_redirects, + "max_redirects": self.max_redirects + } + + if self.proxy: + args["proxy"] = self.proxy + if self.proxy_headers: + args["proxy_headers"] = self.proxy_headers + + if query_params: + args["url"] += '?' + urlencode(query_params) + + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + if re.search('json', headers['Content-Type'], re.IGNORECASE): + if body is not None: + body = json.dumps(body) + args["data"] = body + elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + args["data"] = aiohttp.FormData(post_params) + elif headers['Content-Type'] == 'multipart/form-data': + # must del headers['Content-Type'], or the correct + # Content-Type which generated by aiohttp + del headers['Content-Type'] + data = aiohttp.FormData() + for param in post_params: + k, v = param + if isinstance(v, tuple) and len(v) == 3: + data.add_field(k, + value=v[1], + filename=v[0], + content_type=v[2]) + else: + data.add_field(k, v) + args["data"] = data + + # Pass a `bytes` parameter directly in the body to support + # other content types than Json when `body` argument is provided + # in serialized form + elif isinstance(body, bytes): + args["data"] = body + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + + r = await self.pool_manager.request(**args) + if _preload_content: + + data = await r.read() + r = RESTResponseAsync(r, data) + + if not 200 <= r.status <= 299: + raise ApiException(http_resp=r) + + return r + + async def GET(self, url, headers=None, query_params=None, + _preload_content=True, _request_timeout=None): + """Perform GET HTTP request.""" + return (await self.request("GET", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params)) + + async def HEAD(self, url, headers=None, query_params=None, + _preload_content=True, _request_timeout=None): + """Perform HEAD HTTP request.""" + return (await self.request("HEAD", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params)) + + async def OPTIONS(self, url, headers=None, query_params=None, + post_params=None, body=None, _preload_content=True, + _request_timeout=None): + """Perform OPTIONS HTTP request.""" + return (await self.request("OPTIONS", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body)) + + async def DELETE(self, url, headers=None, query_params=None, body=None, + _preload_content=True, _request_timeout=None): + """Perform DELETE HTTP request.""" + return (await self.request("DELETE", url, + headers=headers, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body)) + + async def POST(self, url, headers=None, query_params=None, + post_params=None, body=None, _preload_content=True, + _request_timeout=None): + """Perform POST HTTP request.""" + return (await self.request("POST", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body)) + + async def PUT(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + """Perform PUT HTTP request.""" + return (await self.request("PUT", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body)) + + async def PATCH(self, url, headers=None, query_params=None, + post_params=None, body=None, _preload_content=True, + _request_timeout=None): + """Perform PATCH HTTP request.""" + return (await self.request("PATCH", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body)) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__init__.py b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__init__.py new file mode 100644 index 0000000..476e193 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__init__.py @@ -0,0 +1 @@ +"""Synchronous REST APIs.""" diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2778e2b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__pycache__/api_client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__pycache__/api_client.cpython-312.pyc new file mode 100644 index 0000000..c8e5a04 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__pycache__/api_client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__pycache__/rest.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__pycache__/rest.cpython-312.pyc new file mode 100644 index 0000000..394f845 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/__pycache__/rest.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_sync/api_client.py b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/api_client.py new file mode 100644 index 0000000..f61ef26 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/api_client.py @@ -0,0 +1,663 @@ +# coding: utf-8 +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + +from __future__ import absolute_import + +import datetime +import json +import mimetypes +import os +import re +import tempfile +from multiprocessing.pool import ThreadPool +from urllib.parse import quote + +import influxdb_client.domain +from influxdb_client import SigninService +from influxdb_client import SignoutService +from influxdb_client._sync import rest +from influxdb_client.configuration import Configuration +from influxdb_client.rest import _requires_create_user_session, _requires_expire_user_session + + +class ApiClient(object): + """Generic API client for OpenAPI client library Build. + + OpenAPI generic API client. This client handles the client- + server communication, and is invariant across implementations. Specifics of + the methods and models for each application are generated from the OpenAPI + templates. + + NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param configuration: .Configuration object for this client + :param header_name: a header to pass when making calls to the API. + :param header_value: a header value to pass when making calls to + the API. + :param cookie: a cookie to include in the header when making calls + to the API + :param pool_threads: The number of threads to use for async requests + to the API. More threads means more concurrent API requests. + """ + + PRIMITIVE_TYPES = (float, bool, bytes, str, int) + NATIVE_TYPES_MAPPING = { + 'int': int, + 'long': int, + 'float': float, + 'str': str, + 'bool': bool, + 'date': datetime.date, + 'datetime': datetime.datetime, + 'object': object, + } + _pool = None + + def __init__(self, configuration=None, header_name=None, header_value=None, + cookie=None, pool_threads=None, retries=False): + """Initialize generic API client.""" + if configuration is None: + configuration = Configuration() + self.configuration = configuration + self.pool_threads = pool_threads + + self.rest_client = rest.RESTClientObject(configuration, retries=retries) + self.default_headers = {} + if header_name is not None: + self.default_headers[header_name] = header_value + self.cookie = cookie + # Set default User-Agent. + from influxdb_client import VERSION + self.user_agent = f'influxdb-client-python/{VERSION}' + + def __del__(self): + """Dispose pools.""" + self._signout() + if self._pool: + self._pool.close() + self._pool.join() + self._pool = None + if self.rest_client and self.rest_client.pool_manager and hasattr(self.rest_client.pool_manager, 'clear'): + self.rest_client.pool_manager.clear() + + @property + def pool(self): + """Create thread pool on first request avoids instantiating unused threadpool for blocking clients.""" + if self._pool is None: + self._pool = ThreadPool(self.pool_threads) + return self._pool + + @property + def user_agent(self): + """User agent for this API client.""" + return self.default_headers['User-Agent'] + + @user_agent.setter + def user_agent(self, value): + """Set User agent for this API client.""" + self.default_headers['User-Agent'] = value + + def set_default_header(self, header_name, header_value): + """Set HTTP header for this API client.""" + self.default_headers[header_name] = header_value + + def __call_api( + self, resource_path, method, path_params=None, + query_params=None, header_params=None, body=None, post_params=None, + files=None, response_type=None, auth_settings=None, + _return_http_data_only=None, collection_formats=None, + _preload_content=True, _request_timeout=None, urlopen_kw=None): + + config = self.configuration + self._signin(resource_path=resource_path) + + # header parameters + header_params = header_params or {} + config.update_request_header_params(resource_path, header_params) + header_params.update(self.default_headers) + if self.cookie: + header_params['Cookie'] = self.cookie + if header_params: + header_params = self.sanitize_for_serialization(header_params) + header_params = dict(self.parameters_to_tuples(header_params, + collection_formats)) + + # path parameters + if path_params: + path_params = self.sanitize_for_serialization(path_params) + path_params = self.parameters_to_tuples(path_params, + collection_formats) + for k, v in path_params: + # specified safe chars, encode everything + resource_path = resource_path.replace( + '{%s}' % k, + quote(str(v), safe=config.safe_chars_for_path_param) + ) + + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + query_params = self.parameters_to_tuples(query_params, + collection_formats) + + # post parameters + if post_params or files: + post_params = self.prepare_post_parameters(post_params, files) + post_params = self.sanitize_for_serialization(post_params) + post_params = self.parameters_to_tuples(post_params, + collection_formats) + + # auth setting + self.update_params_for_auth(header_params, query_params, auth_settings) + + # body + if body: + body = self.sanitize_for_serialization(body) + body = config.update_request_body(resource_path, body) + + # request url + url = self.configuration.host + resource_path + + urlopen_kw = urlopen_kw or {} + + # perform request and return response + response_data = self.request( + method, url, query_params=query_params, headers=header_params, + post_params=post_params, body=body, + _preload_content=_preload_content, + _request_timeout=_request_timeout, **urlopen_kw) + + self.last_response = response_data + + return_data = response_data + if _preload_content: + # deserialize response data + if response_type: + return_data = self.deserialize(response_data, response_type) + else: + return_data = None + + if _return_http_data_only: + return (return_data) + else: + return (return_data, response_data.status, + response_data.getheaders()) + + def sanitize_for_serialization(self, obj): + """Build a JSON POST object. + + If obj is None, return None. + If obj is str, int, long, float, bool, return directly. + If obj is datetime.datetime, datetime.date + convert to string in iso8601 format. + If obj is list, sanitize each element in the list. + If obj is dict, return the dict. + If obj is OpenAPI model, return the properties dict. + + :param obj: The data to serialize. + :return: The serialized form of data. + """ + if obj is None: + return None + elif isinstance(obj, self.PRIMITIVE_TYPES): + return obj + elif isinstance(obj, list): + return [self.sanitize_for_serialization(sub_obj) + for sub_obj in obj] + elif isinstance(obj, tuple): + return tuple(self.sanitize_for_serialization(sub_obj) + for sub_obj in obj) + elif isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + + if isinstance(obj, dict): + obj_dict = obj + else: + # Convert model obj to dict except + # attributes `openapi_types`, `attribute_map` + # and attributes which value is not None. + # Convert attribute name to json key in + # model definition for request. + obj_dict = {obj.attribute_map[attr]: getattr(obj, attr) + for attr, _ in obj.openapi_types.items() + if getattr(obj, attr) is not None} + + return {key: self.sanitize_for_serialization(val) + for key, val in obj_dict.items()} + + def deserialize(self, response, response_type): + """Deserializes response into an object. + + :param response: RESTResponse object to be deserialized. + :param response_type: class literal for + deserialized object, or string of class name. + + :return: deserialized object. + """ + # handle file downloading + # save response body into a tmp file and return the instance + if response_type == "file": + return self.__deserialize_file(response) + + # fetch data from response object + try: + data = json.loads(response.data) + except ValueError: + data = response.data + + return self.__deserialize(data, response_type) + + def __deserialize(self, data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if type(klass) == str: + if klass.startswith('list['): + sub_kls = re.match(r'list\[(.*)\]', klass).group(1) + return [self.__deserialize(sub_data, sub_kls) + for sub_data in data] + + if klass.startswith('dict('): + sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2) + return {k: self.__deserialize(v, sub_kls) + for k, v in data.items()} + + # convert str to class + if klass in self.NATIVE_TYPES_MAPPING: + klass = self.NATIVE_TYPES_MAPPING[klass] + else: + klass = getattr(influxdb_client.domain, klass) + + if klass in self.PRIMITIVE_TYPES: + return self.__deserialize_primitive(data, klass) + elif klass == object: + return self.__deserialize_object(data) + elif klass == datetime.date: + return self.__deserialize_date(data) + elif klass == datetime.datetime: + return self.__deserialize_datatime(data) + else: + return self.__deserialize_model(data, klass) + + def call_api(self, resource_path, method, + path_params=None, query_params=None, header_params=None, + body=None, post_params=None, files=None, + response_type=None, auth_settings=None, async_req=None, + _return_http_data_only=None, collection_formats=None, + _preload_content=True, _request_timeout=None, urlopen_kw=None): + """Make the HTTP request (synchronous) and Return deserialized data. + + To make an async_req request, set the async_req parameter. + + :param resource_path: Path to method endpoint. + :param method: Method to call. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param response: Response data type. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. + :param async_req bool: execute request asynchronously + :param _return_http_data_only: response data without head status code + and headers + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param urlopen_kw: Additional parameters are passed to + :meth:`urllib3.request.RequestMethods.request` + :return: + If async_req parameter is True, + the request will be called asynchronously. + The method will return the request thread. + If parameter async_req is False or missing, + then the method will return the response directly. + """ + if not async_req: + return self.__call_api(resource_path, method, + path_params, query_params, header_params, + body, post_params, files, + response_type, auth_settings, + _return_http_data_only, collection_formats, + _preload_content, _request_timeout, urlopen_kw) + else: + thread = self.pool.apply_async(self.__call_api, (resource_path, + method, path_params, query_params, + header_params, body, + post_params, files, + response_type, auth_settings, + _return_http_data_only, + collection_formats, + _preload_content, _request_timeout, urlopen_kw)) + return thread + + def request(self, method, url, query_params=None, headers=None, + post_params=None, body=None, _preload_content=True, + _request_timeout=None, **urlopen_kw): + """Make the HTTP request using RESTClient.""" + if method == "GET": + return self.rest_client.GET(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers, + **urlopen_kw) + elif method == "HEAD": + return self.rest_client.HEAD(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers, + **urlopen_kw) + elif method == "OPTIONS": + return self.rest_client.OPTIONS(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + elif method == "POST": + return self.rest_client.POST(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + elif method == "PUT": + return self.rest_client.PUT(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + elif method == "PATCH": + return self.rest_client.PATCH(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + elif method == "DELETE": + return self.rest_client.DELETE(url, + query_params=query_params, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + else: + raise ValueError( + "http method must be `GET`, `HEAD`, `OPTIONS`," + " `POST`, `PATCH`, `PUT` or `DELETE`." + ) + + def parameters_to_tuples(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: Parameters as list of tuples, collections formatted + """ + new_params = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, value) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(str(value) for value in v))) + else: + new_params.append((k, v)) + return new_params + + def prepare_post_parameters(self, post_params=None, files=None): + """Build form parameters. + + :param post_params: Normal form parameters. + :param files: File parameters. + :return: Form parameters with files. + """ + params = [] + + if post_params: + params = post_params + + if files: + for k, v in files.items(): + if not v: + continue + file_names = v if type(v) is list else [v] + for n in file_names: + with open(n, 'rb') as f: + filename = os.path.basename(f.name) + filedata = f.read() + mimetype = (mimetypes.guess_type(filename)[0] or + 'application/octet-stream') + params.append( + tuple([k, tuple([filename, filedata, mimetype])])) + + return params + + def select_header_accept(self, accepts): + """Return `Accept` based on an array of accepts provided. + + :param accepts: List of headers. + :return: Accept (e.g. application/json). + """ + if not accepts: + return + + accepts = [x.lower() for x in accepts] + + if 'application/json' in accepts: + return 'application/json' + else: + return ', '.join(accepts) + + def select_header_content_type(self, content_types): + """Return `Content-Type` based on an array of content_types provided. + + :param content_types: List of content-types. + :return: Content-Type (e.g. application/json). + """ + if not content_types: + return 'application/json' + + content_types = [x.lower() for x in content_types] + + if 'application/json' in content_types or '*/*' in content_types: + return 'application/json' + else: + return content_types[0] + + def update_params_for_auth(self, headers, querys, auth_settings): + """Update header and query params based on authentication setting. + + :param headers: Header parameters dict to be updated. + :param querys: Query parameters tuple list to be updated. + :param auth_settings: Authentication setting identifiers list. + """ + if not auth_settings: + return + + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + if not auth_setting['value']: + continue + elif auth_setting['in'] == 'header': + headers[auth_setting['key']] = auth_setting['value'] + elif auth_setting['in'] == 'query': + querys.append((auth_setting['key'], auth_setting['value'])) + else: + raise ValueError( + 'Authentication token must be in `query` or `header`' + ) + + def __deserialize_file(self, response): + """Deserializes body to file. + + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + :param response: RESTResponse. + :return: file path. + """ + fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + content_disposition = response.getheader("Content-Disposition") + if content_disposition: + filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition).group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + f.write(response.data) + + return path + + def __deserialize_primitive(self, data, klass): + """Deserializes string to primitive type. + + :param data: str. + :param klass: class literal. + + :return: int, long, float, str, bool. + """ + try: + return klass(data) + except UnicodeEncodeError: + return str(data) + except TypeError: + return data + + def __deserialize_object(self, value): + """Return an original value. + + :return: object. + """ + return value + + def __deserialize_date(self, string): + """Deserializes string to date. + + :param string: str. + :return: date. + """ + try: + from dateutil.parser import parse + return parse(string).date() + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason="Failed to parse `{0}` as date object".format(string) + ) + + def __deserialize_datatime(self, string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :return: datetime. + """ + try: + from dateutil.parser import parse + return parse(string) + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as datetime object" + .format(string) + ) + ) + + def __deserialize_model(self, data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :param klass: class literal. + :return: model object. + """ + if not klass.openapi_types and not hasattr(klass, + 'get_real_child_model'): + return data + + kwargs = {} + if klass.openapi_types is not None: + for attr, attr_type in klass.openapi_types.items(): + if (data is not None and + klass.attribute_map[attr] in data and + isinstance(data, (list, dict))): + value = data[klass.attribute_map[attr]] + kwargs[attr] = self.__deserialize(value, attr_type) + + instance = klass(**kwargs) + + if hasattr(instance, 'get_real_child_model'): + klass_name = instance.get_real_child_model(data) + if klass_name: + instance = self.__deserialize(data, klass_name) + return instance + + def _signin(self, resource_path: str): + if _requires_create_user_session(self.configuration, self.cookie, resource_path): + http_info = SigninService(self).post_signin_with_http_info() + self.cookie = http_info[2]['set-cookie'] + + def _signout(self): + if _requires_expire_user_session(self.configuration, self.cookie): + SignoutService(self).post_signout() + self.cookie = None diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/_sync/rest.py b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/rest.py new file mode 100644 index 0000000..eadbf06 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/_sync/rest.py @@ -0,0 +1,355 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import io +import json +import re +import ssl +from urllib.parse import urlencode + +from influxdb_client.rest import ApiException +from influxdb_client.rest import _BaseRESTClient + +try: + import urllib3 +except ImportError: + raise ImportError('OpenAPI Python client requires urllib3.') + + +class RESTResponse(io.IOBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + Do not edit the class manually. + """ + + def __init__(self, resp): + """Initialize with HTTP response.""" + self.urllib3_response = resp + self.status = resp.status + self.reason = resp.reason + self.data = resp.data + + def getheaders(self): + """Return a dictionary of the response headers.""" + return self.urllib3_response.headers + + def getheader(self, name, default=None): + """Return a given response header.""" + return self.urllib3_response.headers.get(name, default) + + +class RESTClientObject(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + Do not edit the class manually. + """ + + def __init__(self, configuration, pools_size=4, maxsize=None, retries=False): + """Initialize REST client.""" + # urllib3.PoolManager will pass all kw parameters to connectionpool + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 + # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 + # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 + + self.configuration = configuration + self.pools_size = pools_size + self.maxsize = maxsize + self.retries = retries + + # cert_reqs + if configuration.verify_ssl: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + + # ca_certs + if configuration.ssl_ca_cert: + ca_certs = configuration.ssl_ca_cert + else: + ca_certs = None + + addition_pool_args = {} + if configuration.assert_hostname is not None: + addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 + addition_pool_args['retries'] = self.retries + + if maxsize is None: + if configuration.connection_pool_maxsize is not None: + maxsize = configuration.connection_pool_maxsize + else: + maxsize = 4 + + # https pool manager + if configuration.proxy: + self.pool_manager = urllib3.ProxyManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=ca_certs, + cert_file=configuration.cert_file, + key_file=configuration.cert_key_file, + key_password=configuration.cert_key_password, + proxy_url=configuration.proxy, + proxy_headers=configuration.proxy_headers, + ssl_context=configuration.ssl_context, + **addition_pool_args + ) + else: + self.pool_manager = urllib3.PoolManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=ca_certs, + cert_file=configuration.cert_file, + key_file=configuration.cert_key_file, + key_password=configuration.cert_key_password, + ssl_context=configuration.ssl_context, + **addition_pool_args + ) + + def request(self, method, url, query_params=None, headers=None, + body=None, post_params=None, _preload_content=True, + _request_timeout=None, **urlopen_kw): + """Perform requests. + + :param method: http request method + :param url: http request url + :param query_params: query parameters in the url + :param headers: http request headers + :param body: request json body, for `application/json` + :param post_params: request post parameters, + `application/x-www-form-urlencoded` + and `multipart/form-data` + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param urlopen_kw: Additional parameters are passed to + :meth:`urllib3.request.RequestMethods.request` + """ + method = method.upper() + assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', + 'PATCH', 'OPTIONS'] + + if post_params and body: + raise ValueError( + "body parameter cannot be used with post_params parameter." + ) + + post_params = post_params or {} + headers = headers or {} + + timeout = None + _configured_timeout = _request_timeout or self.configuration.timeout + if _configured_timeout: + if isinstance(_configured_timeout, (int, float, )): # noqa: E501,F821 + timeout = urllib3.Timeout(total=_configured_timeout / 1_000) + elif (isinstance(_configured_timeout, tuple) and + len(_configured_timeout) == 2): + timeout = urllib3.Timeout( + connect=_configured_timeout[0] / 1_000, read=_configured_timeout[1] / 1_000) + + if 'Content-Type' not in headers: + headers['Content-Type'] = 'application/json' + + if self.configuration.debug: + _BaseRESTClient.log_request(method, f"{url}{'' if query_params is None else '?' + urlencode(query_params)}") + _BaseRESTClient.log_headers(headers, '>>>') + _BaseRESTClient.log_body(body, '>>>') + + try: + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + if query_params: + url += '?' + urlencode(query_params) + if re.search('json', headers['Content-Type'], re.IGNORECASE): + request_body = None + if body is not None: + request_body = json.dumps(body) + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers, + **urlopen_kw) + elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=False, + preload_content=_preload_content, + timeout=timeout, + headers=headers, + **urlopen_kw) + elif headers['Content-Type'] == 'multipart/form-data': + # must del headers['Content-Type'], or the correct + # Content-Type which generated by urllib3 will be + # overwritten. + del headers['Content-Type'] + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=True, + preload_content=_preload_content, + timeout=timeout, + headers=headers, + **urlopen_kw) + # Pass a `string` parameter directly in the body to support + # other content types than Json when `body` argument is + # provided in serialized form + elif isinstance(body, str) or isinstance(body, bytes): + request_body = body + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers, + **urlopen_kw) + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + # For `GET`, `HEAD` + else: + r = self.pool_manager.request(method, url, + fields=query_params, + preload_content=_preload_content, + timeout=timeout, + headers=headers, + **urlopen_kw) + except urllib3.exceptions.SSLError as e: + msg = "{0}\n{1}".format(type(e).__name__, str(e)) + raise ApiException(status=0, reason=msg) + + if _preload_content: + r = RESTResponse(r) + + # In the python 3, the response.data is bytes. + # we need to decode it to string. + r.data = r.data.decode('utf8') + + if self.configuration.debug: + _BaseRESTClient.log_response(r.status) + if hasattr(r, 'headers'): + _BaseRESTClient.log_headers(r.headers, '<<<') + if hasattr(r, 'urllib3_response'): + _BaseRESTClient.log_headers(r.urllib3_response.headers, '<<<') + _BaseRESTClient.log_body(r.data, '<<<') + + if not 200 <= r.status <= 299: + raise ApiException(http_resp=r) + + return r + + def GET(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None, **urlopen_kw): + """Perform GET HTTP request.""" + return self.request("GET", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params, + **urlopen_kw) + + def HEAD(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None, **urlopen_kw): + """Perform HEAD HTTP request.""" + return self.request("HEAD", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params, + **urlopen_kw) + + def OPTIONS(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None, **urlopen_kw): + """Perform OPTIONS HTTP request.""" + return self.request("OPTIONS", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + + def DELETE(self, url, headers=None, query_params=None, body=None, + _preload_content=True, _request_timeout=None, **urlopen_kw): + """Perform DELETE HTTP request.""" + return self.request("DELETE", url, + headers=headers, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + + def POST(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None, **urlopen_kw): + """Perform POST HTTP request.""" + return self.request("POST", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + + def PUT(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None, **urlopen_kw): + """Perform PUT HTTP request.""" + return self.request("PUT", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + + def PATCH(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None, **urlopen_kw): + """Perform PATCH HTTP request.""" + return self.request("PATCH", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + **urlopen_kw) + + def __getstate__(self): + """Return a dict of attributes that you want to pickle.""" + state = self.__dict__.copy() + # Remove Pool managaer + del state['pool_manager'] + return state + + def __setstate__(self, state): + """Set your object with the provided dict.""" + self.__dict__.update(state) + # Init Pool manager + self.__init__(self.configuration, self.pools_size, self.maxsize, self.retries) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__init__.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/__init__.py new file mode 100644 index 0000000..0d21a43 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/__init__.py @@ -0,0 +1,56 @@ +# flake8: noqa + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +# import apis into api package +from influxdb_client.service.authorizations_service import AuthorizationsService +from influxdb_client.service.backup_service import BackupService +from influxdb_client.service.bucket_schemas_service import BucketSchemasService +from influxdb_client.service.buckets_service import BucketsService +from influxdb_client.service.cells_service import CellsService +from influxdb_client.service.checks_service import ChecksService +from influxdb_client.service.config_service import ConfigService +from influxdb_client.service.dbr_ps_service import DBRPsService +from influxdb_client.service.dashboards_service import DashboardsService +from influxdb_client.service.delete_service import DeleteService +from influxdb_client.service.health_service import HealthService +from influxdb_client.service.invokable_scripts_service import InvokableScriptsService +from influxdb_client.service.labels_service import LabelsService +from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService +from influxdb_client.service.metrics_service import MetricsService +from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService +from influxdb_client.service.notification_rules_service import NotificationRulesService +from influxdb_client.service.organizations_service import OrganizationsService +from influxdb_client.service.ping_service import PingService +from influxdb_client.service.query_service import QueryService +from influxdb_client.service.ready_service import ReadyService +from influxdb_client.service.remote_connections_service import RemoteConnectionsService +from influxdb_client.service.replications_service import ReplicationsService +from influxdb_client.service.resources_service import ResourcesService +from influxdb_client.service.restore_service import RestoreService +from influxdb_client.service.routes_service import RoutesService +from influxdb_client.service.rules_service import RulesService +from influxdb_client.service.scraper_targets_service import ScraperTargetsService +from influxdb_client.service.secrets_service import SecretsService +from influxdb_client.service.setup_service import SetupService +from influxdb_client.service.signin_service import SigninService +from influxdb_client.service.signout_service import SignoutService +from influxdb_client.service.sources_service import SourcesService +from influxdb_client.service.tasks_service import TasksService +from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService +from influxdb_client.service.telegrafs_service import TelegrafsService +from influxdb_client.service.templates_service import TemplatesService +from influxdb_client.service.users_service import UsersService +from influxdb_client.service.variables_service import VariablesService +from influxdb_client.service.views_service import ViewsService +from influxdb_client.service.write_service import WriteService diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..514a232 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/_base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/_base.cpython-312.pyc new file mode 100644 index 0000000..1719045 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/_base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/_pages.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/_pages.cpython-312.pyc new file mode 100644 index 0000000..f4b8d92 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/_pages.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/authorizations_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/authorizations_api.cpython-312.pyc new file mode 100644 index 0000000..3de1b02 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/authorizations_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/bucket_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/bucket_api.cpython-312.pyc new file mode 100644 index 0000000..c6bac67 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/bucket_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/delete_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/delete_api.cpython-312.pyc new file mode 100644 index 0000000..3d809f8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/delete_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/delete_api_async.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/delete_api_async.cpython-312.pyc new file mode 100644 index 0000000..f9d8008 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/delete_api_async.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/exceptions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..9e41de8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/exceptions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/flux_csv_parser.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/flux_csv_parser.cpython-312.pyc new file mode 100644 index 0000000..8e26913 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/flux_csv_parser.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/flux_table.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/flux_table.cpython-312.pyc new file mode 100644 index 0000000..0700003 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/flux_table.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/influxdb_client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/influxdb_client.cpython-312.pyc new file mode 100644 index 0000000..ed5e2df Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/influxdb_client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/influxdb_client_async.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/influxdb_client_async.cpython-312.pyc new file mode 100644 index 0000000..a51aaee Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/influxdb_client_async.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/invokable_scripts_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/invokable_scripts_api.cpython-312.pyc new file mode 100644 index 0000000..8469dda Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/invokable_scripts_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/labels_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/labels_api.cpython-312.pyc new file mode 100644 index 0000000..24e0e9d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/labels_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/logging_handler.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/logging_handler.cpython-312.pyc new file mode 100644 index 0000000..a8d8b34 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/logging_handler.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/organizations_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/organizations_api.cpython-312.pyc new file mode 100644 index 0000000..5489ca2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/organizations_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/query_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/query_api.cpython-312.pyc new file mode 100644 index 0000000..49727fb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/query_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/query_api_async.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/query_api_async.cpython-312.pyc new file mode 100644 index 0000000..9125e2e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/query_api_async.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/tasks_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/tasks_api.cpython-312.pyc new file mode 100644 index 0000000..dd5b31b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/tasks_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/users_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/users_api.cpython-312.pyc new file mode 100644 index 0000000..7efa494 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/users_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/warnings.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/warnings.cpython-312.pyc new file mode 100644 index 0000000..e9e6342 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/warnings.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/write_api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/write_api.cpython-312.pyc new file mode 100644 index 0000000..1770232 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/write_api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/write_api_async.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/write_api_async.cpython-312.pyc new file mode 100644 index 0000000..a888e3d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/__pycache__/write_api_async.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/_base.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/_base.py new file mode 100644 index 0000000..8dcf75e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/_base.py @@ -0,0 +1,554 @@ +"""Commons function for Sync and Async client.""" +from __future__ import absolute_import + +import base64 +import configparser +import logging +import os +from datetime import datetime, timedelta +from typing import List, Generator, Any, Union, Iterable, AsyncGenerator + +from urllib3 import HTTPResponse + +from influxdb_client import Configuration, Dialect, Query, OptionStatement, VariableAssignment, Identifier, \ + Expression, BooleanLiteral, IntegerLiteral, FloatLiteral, DateTimeLiteral, UnaryExpression, DurationLiteral, \ + Duration, StringLiteral, ArrayExpression, ImportDeclaration, MemberExpression, MemberAssignment, File, \ + WriteService, QueryService, DeleteService, DeletePredicateRequest +from influxdb_client.client.flux_csv_parser import FluxResponseMetadataMode, FluxCsvParser, FluxSerializationMode +from influxdb_client.client.flux_table import FluxRecord, TableList, CSVIterator +from influxdb_client.client.util.date_utils import get_date_helper +from influxdb_client.client.util.helpers import get_org_query_param +from influxdb_client.client.warnings import MissingPivotFunction +from influxdb_client.client.write.dataframe_serializer import DataframeSerializer +from influxdb_client.rest import _UTF_8_encoding + +try: + import dataclasses + + _HAS_DATACLASS = True +except ModuleNotFoundError: + _HAS_DATACLASS = False + +LOGGERS_NAMES = [ + 'influxdb_client.client.influxdb_client', + 'influxdb_client.client.influxdb_client_async', + 'influxdb_client.client.write_api', + 'influxdb_client.client.write_api_async', + 'influxdb_client.client.write.retry', + 'influxdb_client.client.write.dataframe_serializer', + 'influxdb_client.client.util.multiprocessing_helper', + 'influxdb_client.client.http', + 'influxdb_client.client.exceptions', +] + + +# noinspection PyMethodMayBeStatic +class _BaseClient(object): + def __init__(self, url, token, debug=None, timeout=10_000, enable_gzip=False, org: str = None, + default_tags: dict = None, http_client_logger: str = None, **kwargs) -> None: + self.url = url + self.token = token + self.org = org + + self.default_tags = default_tags + + self.conf = _Configuration() + if self.url.endswith("/"): + self.conf.host = self.url[:-1] + else: + self.conf.host = self.url + self.conf.enable_gzip = enable_gzip + self.conf.verify_ssl = kwargs.get('verify_ssl', True) + self.conf.ssl_ca_cert = kwargs.get('ssl_ca_cert', None) + self.conf.cert_file = kwargs.get('cert_file', None) + self.conf.cert_key_file = kwargs.get('cert_key_file', None) + self.conf.cert_key_password = kwargs.get('cert_key_password', None) + self.conf.ssl_context = kwargs.get('ssl_context', None) + self.conf.proxy = kwargs.get('proxy', None) + self.conf.proxy_headers = kwargs.get('proxy_headers', None) + self.conf.connection_pool_maxsize = kwargs.get('connection_pool_maxsize', self.conf.connection_pool_maxsize) + self.conf.timeout = timeout + # logging + self.conf.loggers["http_client_logger"] = logging.getLogger(http_client_logger) + for client_logger in LOGGERS_NAMES: + self.conf.loggers[client_logger] = logging.getLogger(client_logger) + self.conf.debug = debug + + self.conf.username = kwargs.get('username', None) + self.conf.password = kwargs.get('password', None) + # defaults + self.auth_header_name = None + self.auth_header_value = None + # by token + if self.token: + self.auth_header_name = "Authorization" + self.auth_header_value = "Token " + self.token + # by HTTP basic + auth_basic = kwargs.get('auth_basic', False) + if auth_basic: + self.auth_header_name = "Authorization" + self.auth_header_value = "Basic " + base64.b64encode(token.encode()).decode() + # by username, password + if self.conf.username and self.conf.password: + self.auth_header_name = None + self.auth_header_value = None + + self.retries = kwargs.get('retries', False) + + self.profilers = kwargs.get('profilers', None) + pass + + @classmethod + def _from_config_file(cls, config_file: str = "config.ini", debug=None, enable_gzip=False, **kwargs): + config = configparser.ConfigParser() + config_name = kwargs.get('config_name', 'influx2') + is_json = False + try: + config.read(config_file) + except configparser.ParsingError: + with open(config_file) as json_file: + import json + config = json.load(json_file) + is_json = True + + def _config_value(key: str): + value = str(config[key]) if is_json else config[config_name][key] + return value.strip('"') + + def _has_option(key: str): + return key in config if is_json else config.has_option(config_name, key) + + def _has_section(key: str): + return key in config if is_json else config.has_section(key) + + url = _config_value('url') + token = _config_value('token') + + timeout = None + if _has_option('timeout'): + timeout = _config_value('timeout') + + org = None + if _has_option('org'): + org = _config_value('org') + + verify_ssl = True + if _has_option('verify_ssl'): + verify_ssl = _config_value('verify_ssl') + + ssl_ca_cert = None + if _has_option('ssl_ca_cert'): + ssl_ca_cert = _config_value('ssl_ca_cert') + + cert_file = None + if _has_option('cert_file'): + cert_file = _config_value('cert_file') + + cert_key_file = None + if _has_option('cert_key_file'): + cert_key_file = _config_value('cert_key_file') + + cert_key_password = None + if _has_option('cert_key_password'): + cert_key_password = _config_value('cert_key_password') + + connection_pool_maxsize = None + if _has_option('connection_pool_maxsize'): + connection_pool_maxsize = _config_value('connection_pool_maxsize') + + auth_basic = False + if _has_option('auth_basic'): + auth_basic = _config_value('auth_basic') + + default_tags = None + if _has_section('tags'): + if is_json: + default_tags = config['tags'] + else: + tags = {k: v.strip('"') for k, v in config.items('tags')} + default_tags = dict(tags) + + profilers = None + if _has_option('profilers'): + profilers = [x.strip() for x in _config_value('profilers').split(',')] + + proxy = None + if _has_option('proxy'): + proxy = _config_value('proxy') + + return cls(url, token, debug=debug, timeout=_to_int(timeout), org=org, default_tags=default_tags, + enable_gzip=enable_gzip, verify_ssl=_to_bool(verify_ssl), ssl_ca_cert=ssl_ca_cert, + cert_file=cert_file, cert_key_file=cert_key_file, cert_key_password=cert_key_password, + connection_pool_maxsize=_to_int(connection_pool_maxsize), auth_basic=_to_bool(auth_basic), + profilers=profilers, proxy=proxy, **kwargs) + + @classmethod + def _from_env_properties(cls, debug=None, enable_gzip=False, **kwargs): + url = os.getenv('INFLUXDB_V2_URL', "http://localhost:8086") + token = os.getenv('INFLUXDB_V2_TOKEN', "my-token") + timeout = os.getenv('INFLUXDB_V2_TIMEOUT', "10000") + org = os.getenv('INFLUXDB_V2_ORG', "my-org") + verify_ssl = os.getenv('INFLUXDB_V2_VERIFY_SSL', "True") + ssl_ca_cert = os.getenv('INFLUXDB_V2_SSL_CA_CERT', None) + cert_file = os.getenv('INFLUXDB_V2_CERT_FILE', None) + cert_key_file = os.getenv('INFLUXDB_V2_CERT_KEY_FILE', None) + cert_key_password = os.getenv('INFLUXDB_V2_CERT_KEY_PASSWORD', None) + connection_pool_maxsize = os.getenv('INFLUXDB_V2_CONNECTION_POOL_MAXSIZE', None) + auth_basic = os.getenv('INFLUXDB_V2_AUTH_BASIC', "False") + + prof = os.getenv("INFLUXDB_V2_PROFILERS", None) + profilers = None + if prof is not None: + profilers = [x.strip() for x in prof.split(',')] + + default_tags = dict() + + for key, value in os.environ.items(): + if key.startswith("INFLUXDB_V2_TAG_"): + default_tags[key[16:].lower()] = value + + return cls(url, token, debug=debug, timeout=_to_int(timeout), org=org, default_tags=default_tags, + enable_gzip=enable_gzip, verify_ssl=_to_bool(verify_ssl), ssl_ca_cert=ssl_ca_cert, + cert_file=cert_file, cert_key_file=cert_key_file, cert_key_password=cert_key_password, + connection_pool_maxsize=_to_int(connection_pool_maxsize), auth_basic=_to_bool(auth_basic), + profilers=profilers, **kwargs) + + +# noinspection PyMethodMayBeStatic +class _BaseQueryApi(object): + default_dialect = Dialect(header=True, delimiter=",", comment_prefix="#", + annotations=["datatype", "group", "default"], date_time_format="RFC3339") + + def __init__(self, influxdb_client, query_options=None): + from influxdb_client.client.query_api import QueryOptions + self._query_options = QueryOptions() if query_options is None else query_options + self._influxdb_client = influxdb_client + self._query_api = QueryService(influxdb_client.api_client) + + """Base implementation for Queryable API.""" + + def _to_tables(self, response, query_options=None, response_metadata_mode: + FluxResponseMetadataMode = FluxResponseMetadataMode.full) -> TableList: + """ + Parse HTTP response to TableList. + + :param response: HTTP response from an HTTP client. Expected type: `urllib3.response.HTTPResponse`. + """ + _parser = self._to_tables_parser(response, query_options, response_metadata_mode) + list(_parser.generator()) + return _parser.table_list() + + async def _to_tables_async(self, response, query_options=None, response_metadata_mode: + FluxResponseMetadataMode = FluxResponseMetadataMode.full) -> TableList: + """ + Parse HTTP response to TableList. + + :param response: HTTP response from an HTTP client. Expected type: `aiohttp.client_reqrep.ClientResponse`. + """ + async with self._to_tables_parser(response, query_options, response_metadata_mode) as parser: + async for _ in parser.generator_async(): + pass + return parser.table_list() + + def _to_csv(self, response: HTTPResponse) -> CSVIterator: + """Parse HTTP response to CSV.""" + return CSVIterator(response) + + def _to_flux_record_stream(self, response, query_options=None, + response_metadata_mode: FluxResponseMetadataMode = FluxResponseMetadataMode.full) -> \ + Generator[FluxRecord, Any, None]: + """ + Parse HTTP response to FluxRecord stream. + + :param response: HTTP response from an HTTP client. Expected type: `urllib3.response.HTTPResponse`. + """ + _parser = self._to_flux_record_stream_parser(query_options, response, response_metadata_mode) + return _parser.generator() + + async def _to_flux_record_stream_async(self, response, query_options=None, response_metadata_mode: + FluxResponseMetadataMode = FluxResponseMetadataMode.full) -> \ + AsyncGenerator['FluxRecord', None]: + """ + Parse HTTP response to FluxRecord stream. + + :param response: HTTP response from an HTTP client. Expected type: `aiohttp.client_reqrep.ClientResponse`. + """ + _parser = self._to_flux_record_stream_parser(query_options, response, response_metadata_mode) + return (await _parser.__aenter__()).generator_async() + + def _to_data_frame_stream(self, data_frame_index, response, query_options=None, + response_metadata_mode: FluxResponseMetadataMode = FluxResponseMetadataMode.full, + use_extension_dtypes=False): + """ + Parse HTTP response to DataFrame stream. + + :param response: HTTP response from an HTTP client. Expected type: `urllib3.response.HTTPResponse`. + """ + _parser = self._to_data_frame_stream_parser(data_frame_index, query_options, response, response_metadata_mode, + use_extension_dtypes) + return _parser.generator() + + async def _to_data_frame_stream_async(self, data_frame_index, response, query_options=None, response_metadata_mode: + FluxResponseMetadataMode = FluxResponseMetadataMode.full, + use_extension_dtypes=False): + """ + Parse HTTP response to DataFrame stream. + + :param response: HTTP response from an HTTP client. Expected type: `aiohttp.client_reqrep.ClientResponse`. + """ + _parser = self._to_data_frame_stream_parser(data_frame_index, query_options, response, response_metadata_mode, + use_extension_dtypes) + return (await _parser.__aenter__()).generator_async() + + def _to_tables_parser(self, response, query_options, response_metadata_mode): + return FluxCsvParser(response=response, serialization_mode=FluxSerializationMode.tables, + query_options=query_options, response_metadata_mode=response_metadata_mode) + + def _to_flux_record_stream_parser(self, query_options, response, response_metadata_mode): + return FluxCsvParser(response=response, serialization_mode=FluxSerializationMode.stream, + query_options=query_options, response_metadata_mode=response_metadata_mode) + + def _to_data_frame_stream_parser(self, data_frame_index, query_options, response, response_metadata_mode, + use_extension_dtypes): + return FluxCsvParser(response=response, serialization_mode=FluxSerializationMode.dataFrame, + data_frame_index=data_frame_index, query_options=query_options, + response_metadata_mode=response_metadata_mode, + use_extension_dtypes=use_extension_dtypes) + + def _to_data_frames(self, _generator): + """Parse stream of DataFrames into expected type.""" + from ..extras import pd + if isinstance(_generator, list): + _dataFrames = _generator + else: + _dataFrames = list(_generator) + + if len(_dataFrames) == 0: + return pd.DataFrame(columns=[], index=None) + elif len(_dataFrames) == 1: + return _dataFrames[0] + else: + return _dataFrames + + def _org_param(self, org): + return get_org_query_param(org=org, client=self._influxdb_client) + + def _get_query_options(self): + if self._query_options and self._query_options.profilers: + return self._query_options + elif self._influxdb_client.profilers: + from influxdb_client.client.query_api import QueryOptions + return QueryOptions(profilers=self._influxdb_client.profilers) + + def _create_query(self, query, dialect=default_dialect, params: dict = None, **kwargs): + query_options = self._get_query_options() + profilers = query_options.profilers if query_options is not None else None + q = Query(query=query, dialect=dialect, extern=_BaseQueryApi._build_flux_ast(params, profilers)) + + if profilers: + print("\n===============") + print("Profiler: query") + print("===============") + print(query) + + if kwargs.get('dataframe_query', False): + MissingPivotFunction.print_warning(query) + + return q + + @staticmethod + def _params_to_extern_ast(params: dict) -> List['OptionStatement']: + + statements = [] + for key, value in params.items(): + expression = _BaseQueryApi._parm_to_extern_ast(value) + if expression is None: + continue + + statements.append(OptionStatement("OptionStatement", + VariableAssignment("VariableAssignment", Identifier("Identifier", key), + expression))) + return statements + + @staticmethod + def _parm_to_extern_ast(value) -> Union[Expression, None]: + if value is None: + return None + if isinstance(value, bool): + return BooleanLiteral("BooleanLiteral", value) + elif isinstance(value, int): + return IntegerLiteral("IntegerLiteral", str(value)) + elif isinstance(value, float): + return FloatLiteral("FloatLiteral", value) + elif isinstance(value, datetime): + value = get_date_helper().to_utc(value) + nanoseconds = getattr(value, 'nanosecond', 0) + fraction = f'{(value.microsecond * 1000 + nanoseconds):09d}' + return DateTimeLiteral("DateTimeLiteral", value.strftime('%Y-%m-%dT%H:%M:%S.') + fraction + 'Z') + elif isinstance(value, timedelta): + _micro_delta = int(value / timedelta(microseconds=1)) + if _micro_delta < 0: + return UnaryExpression("UnaryExpression", argument=DurationLiteral("DurationLiteral", [ + Duration(magnitude=-_micro_delta, unit="us")]), operator="-") + else: + return DurationLiteral("DurationLiteral", [Duration(magnitude=_micro_delta, unit="us")]) + elif isinstance(value, str): + return StringLiteral("StringLiteral", str(value)) + elif isinstance(value, Iterable): + return ArrayExpression("ArrayExpression", + elements=list(map(lambda it: _BaseQueryApi._parm_to_extern_ast(it), value))) + else: + return value + + @staticmethod + def _build_flux_ast(params: dict = None, profilers: List[str] = None): + + imports = [] + body = [] + + if profilers is not None and len(profilers) > 0: + imports.append(ImportDeclaration( + "ImportDeclaration", + path=StringLiteral("StringLiteral", "profiler"))) + + elements = [] + for profiler in profilers: + elements.append(StringLiteral("StringLiteral", value=profiler)) + + member = MemberExpression( + "MemberExpression", + object=Identifier("Identifier", "profiler"), + _property=Identifier("Identifier", "enabledProfilers")) + + prof = OptionStatement( + "OptionStatement", + assignment=MemberAssignment( + "MemberAssignment", + member=member, + init=ArrayExpression( + "ArrayExpression", + elements=elements))) + + body.append(prof) + + if params is not None: + body.extend(_BaseQueryApi._params_to_extern_ast(params)) + + return File(package=None, name=None, type=None, imports=imports, body=body) + + +class _BaseWriteApi(object): + def __init__(self, influxdb_client, point_settings=None): + self._influxdb_client = influxdb_client + self._point_settings = point_settings + self._write_service = WriteService(influxdb_client.api_client) + if influxdb_client.default_tags: + for key, value in influxdb_client.default_tags.items(): + self._point_settings.add_default_tag(key, value) + + def _append_default_tag(self, key, val, record): + from influxdb_client import Point + if isinstance(record, bytes) or isinstance(record, str): + pass + elif isinstance(record, Point): + record.tag(key, val) + elif isinstance(record, dict): + record.setdefault("tags", {}) + record.get("tags")[key] = val + elif isinstance(record, Iterable): + for item in record: + self._append_default_tag(key, val, item) + + def _append_default_tags(self, record): + if self._point_settings.defaultTags and record is not None: + for key, val in self._point_settings.defaultTags.items(): + self._append_default_tag(key, val, record) + + def _serialize(self, record, write_precision, payload, **kwargs): + from influxdb_client import Point + if isinstance(record, bytes): + payload[write_precision].append(record) + + elif isinstance(record, str): + self._serialize(record.encode(_UTF_8_encoding), write_precision, payload, **kwargs) + + elif isinstance(record, Point): + precision_from_point = kwargs.get('precision_from_point', True) + precision = record.write_precision if precision_from_point else write_precision + self._serialize(record.to_line_protocol(precision=precision), precision, payload, **kwargs) + + elif isinstance(record, dict): + self._serialize(Point.from_dict(record, write_precision=write_precision, **kwargs), + write_precision, payload, **kwargs) + elif 'DataFrame' in type(record).__name__: + serializer = DataframeSerializer(record, self._point_settings, write_precision, **kwargs) + self._serialize(serializer.serialize(), write_precision, payload, **kwargs) + elif hasattr(record, "_asdict"): + # noinspection PyProtectedMember + self._serialize(record._asdict(), write_precision, payload, **kwargs) + elif _HAS_DATACLASS and dataclasses.is_dataclass(record): + self._serialize(dataclasses.asdict(record), write_precision, payload, **kwargs) + elif isinstance(record, Iterable): + for item in record: + self._serialize(item, write_precision, payload, **kwargs) + + +# noinspection PyMethodMayBeStatic +class _BaseDeleteApi(object): + def __init__(self, influxdb_client): + self._influxdb_client = influxdb_client + self._service = DeleteService(influxdb_client.api_client) + + def _prepare_predicate_request(self, start, stop, predicate): + date_helper = get_date_helper() + if isinstance(start, datetime): + start = date_helper.to_utc(start) + if isinstance(stop, datetime): + stop = date_helper.to_utc(stop) + predicate_request = DeletePredicateRequest(start=start, stop=stop, predicate=predicate) + return predicate_request + + +class _Configuration(Configuration): + def __init__(self): + Configuration.__init__(self) + self.enable_gzip = False + self.username = None + self.password = None + + def update_request_header_params(self, path: str, params: dict): + super().update_request_header_params(path, params) + if self.enable_gzip: + # GZIP Request + if path == '/api/v2/write': + params["Content-Encoding"] = "gzip" + params["Accept-Encoding"] = "identity" + pass + # GZIP Response + if path == '/api/v2/query': + # params["Content-Encoding"] = "gzip" + params["Accept-Encoding"] = "gzip" + pass + pass + pass + + def update_request_body(self, path: str, body): + _body = super().update_request_body(path, body) + if self.enable_gzip: + # GZIP Request + if path == '/api/v2/write': + import gzip + if isinstance(_body, bytes): + return gzip.compress(data=_body) + else: + return gzip.compress(bytes(_body, _UTF_8_encoding)) + + return _body + + +def _to_bool(bool_value): + return str(bool_value).lower() in ("yes", "true") + + +def _to_int(int_value): + return int(int_value) if int_value is not None else None diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/_pages.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/_pages.py new file mode 100644 index 0000000..5e41842 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/_pages.py @@ -0,0 +1,66 @@ + + +class _Page: + def __init__(self, values, has_next, next_after): + self.has_next = has_next + self.values = values + self.next_after = next_after + + @staticmethod + def empty(): + return _Page([], False, None) + + @staticmethod + def initial(after): + return _Page([], True, after) + + +class _PageIterator: + def __init__(self, page: _Page, get_next_page): + self.page = page + self.get_next_page = get_next_page + + def __iter__(self): + return self + + def __next__(self): + if not self.page.values: + if self.page.has_next: + self.page = self.get_next_page(self.page) + if not self.page.values: + raise StopIteration + return self.page.values.pop(0) + + +class _Paginated: + def __init__(self, paginated_getter, pluck_page_resources_from_response): + self.paginated_getter = paginated_getter + self.pluck_page_resources_from_response = pluck_page_resources_from_response + + def find_iter(self, **kwargs): + """Iterate over resources with pagination. + + :key str org: The organization name. + :key str org_id: The organization ID. + :key str after: The last resource ID from which to seek from (but not including). + :key int limit: the maximum number of items per page + :return: resources iterator + """ + + def get_next_page(page: _Page): + return self._find_next_page(page, **kwargs) + + return iter(_PageIterator(_Page.initial(kwargs.get('after')), get_next_page)) + + def _find_next_page(self, page: _Page, **kwargs): + if not page.has_next: + return _Page.empty() + + kw_args = {**kwargs, 'after': page.next_after} if page.next_after is not None else kwargs + response = self.paginated_getter(**kw_args) + + resources = self.pluck_page_resources_from_response(response) + has_next = response.links.next is not None + last_id = resources[-1].id if resources else None + + return _Page(resources, has_next, last_id) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/authorizations_api.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/authorizations_api.py new file mode 100644 index 0000000..b7179b6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/authorizations_api.py @@ -0,0 +1,134 @@ +"""Authorization is about managing the security of your InfluxDB instance.""" + +from influxdb_client import Authorization, AuthorizationsService, User, Organization + + +class AuthorizationsApi(object): + """Implementation for '/api/v2/authorizations' endpoint.""" + + def __init__(self, influxdb_client): + """Initialize defaults.""" + self._influxdb_client = influxdb_client + self._authorizations_service = AuthorizationsService(influxdb_client.api_client) + + def create_authorization(self, org_id=None, permissions: list = None, + authorization: Authorization = None) -> Authorization: + """ + Create an authorization. + + :type permissions: list of Permission + :param org_id: organization id + :param permissions: list of permissions + :type authorization: authorization object + + """ + if authorization is not None: + return self._authorizations_service.post_authorizations(authorization_post_request=authorization) + + # if org_id is not None and permissions is not None: + authorization = Authorization(org_id=org_id, permissions=permissions) + return self._authorizations_service.post_authorizations(authorization_post_request=authorization) + + def find_authorization_by_id(self, auth_id: str) -> Authorization: + """ + Find authorization by id. + + :param auth_id: authorization id + :return: Authorization + """ + return self._authorizations_service.get_authorizations_id(auth_id=auth_id) + + def find_authorizations(self, **kwargs): + """ + Get a list of all authorizations. + + :key str user_id: filter authorizations belonging to a user id + :key str user: filter authorizations belonging to a user name + :key str org_id: filter authorizations belonging to a org id + :key str org: filter authorizations belonging to a org name + :return: Authorizations + """ + authorizations = self._authorizations_service.get_authorizations(**kwargs) + + return authorizations.authorizations + + def find_authorizations_by_user(self, user: User): + """ + Find authorization by User. + + :return: Authorization list + """ + return self.find_authorizations(user_id=user.id) + + def find_authorizations_by_user_id(self, user_id: str): + """ + Find authorization by user id. + + :return: Authorization list + """ + return self.find_authorizations(user_id=user_id) + + def find_authorizations_by_user_name(self, user_name: str): + """ + Find authorization by user name. + + :return: Authorization list + """ + return self.find_authorizations(user=user_name) + + def find_authorizations_by_org(self, org: Organization): + """ + Find authorization by user name. + + :return: Authorization list + """ + if isinstance(org, Organization): + return self.find_authorizations(org_id=org.id) + + def find_authorizations_by_org_name(self, org_name: str): + """ + Find authorization by org name. + + :return: Authorization list + """ + return self.find_authorizations(org=org_name) + + def find_authorizations_by_org_id(self, org_id: str): + """ + Find authorization by org id. + + :return: Authorization list + """ + return self.find_authorizations(org_id=org_id) + + def update_authorization(self, auth): + """ + Update authorization object. + + :param auth: + :return: + """ + return self._authorizations_service.patch_authorizations_id(auth_id=auth.id, authorization_update_request=auth) + + def clone_authorization(self, auth) -> Authorization: + """Clone an authorization.""" + if isinstance(auth, Authorization): + cloned = Authorization(org_id=auth.org_id, permissions=auth.permissions) + # cloned.description = auth.description + # cloned.status = auth.status + return self.create_authorization(authorization=cloned) + + if isinstance(auth, str): + authorization = self.find_authorization_by_id(auth) + return self.clone_authorization(auth=authorization) + + raise ValueError("Invalid argument") + + def delete_authorization(self, auth): + """Delete a authorization.""" + if isinstance(auth, Authorization): + return self._authorizations_service.delete_authorizations_id(auth_id=auth.id) + + if isinstance(auth, str): + return self._authorizations_service.delete_authorizations_id(auth_id=auth) + raise ValueError("Invalid argument") diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/bucket_api.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/bucket_api.py new file mode 100644 index 0000000..684da76 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/bucket_api.py @@ -0,0 +1,132 @@ +""" +A bucket is a named location where time series data is stored. + +All buckets have a retention policy, a duration of time that each data point persists. +A bucket belongs to an organization. +""" +import warnings + +from influxdb_client import BucketsService, Bucket, PostBucketRequest, PatchBucketRequest +from influxdb_client.client.util.helpers import get_org_query_param +from influxdb_client.client._pages import _Paginated + + +class BucketsApi(object): + """Implementation for '/api/v2/buckets' endpoint.""" + + def __init__(self, influxdb_client): + """Initialize defaults.""" + self._influxdb_client = influxdb_client + self._buckets_service = BucketsService(influxdb_client.api_client) + + def create_bucket(self, bucket=None, bucket_name=None, org_id=None, retention_rules=None, + description=None, org=None) -> Bucket: + """Create a bucket. + + :param Bucket|PostBucketRequest bucket: bucket to create + :param bucket_name: bucket name + :param description: bucket description + :param org_id: org_id + :param bucket_name: bucket name + :param retention_rules: retention rules array or single BucketRetentionRules + :param str, Organization org: specifies the organization for create the bucket; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClient.org`` is used. + :return: Bucket + If the method is called asynchronously, + returns the request thread. + """ + if retention_rules is None: + retention_rules = [] + + rules = [] + + if isinstance(retention_rules, list): + rules.extend(retention_rules) + else: + rules.append(retention_rules) + + if org_id is not None: + warnings.warn("org_id is deprecated; use org", DeprecationWarning) + + if bucket is None: + bucket = PostBucketRequest(name=bucket_name, + retention_rules=rules, + description=description, + org_id=get_org_query_param(org=(org_id if org is None else org), + client=self._influxdb_client, + required_id=True)) + + return self._buckets_service.post_buckets(post_bucket_request=bucket) + + def update_bucket(self, bucket: Bucket) -> Bucket: + """Update a bucket. + + :param bucket: Bucket update to apply (required) + :return: Bucket + """ + request = PatchBucketRequest(name=bucket.name, + description=bucket.description, + retention_rules=bucket.retention_rules) + + return self._buckets_service.patch_buckets_id(bucket_id=bucket.id, patch_bucket_request=request) + + def delete_bucket(self, bucket): + """Delete a bucket. + + :param bucket: bucket id or Bucket + :return: Bucket + """ + if isinstance(bucket, Bucket): + bucket_id = bucket.id + else: + bucket_id = bucket + + return self._buckets_service.delete_buckets_id(bucket_id=bucket_id) + + def find_bucket_by_id(self, id): + """Find bucket by ID. + + :param id: + :return: + """ + return self._buckets_service.get_buckets_id(id) + + def find_bucket_by_name(self, bucket_name): + """Find bucket by name. + + :param bucket_name: bucket name + :return: Bucket + """ + buckets = self._buckets_service.get_buckets(name=bucket_name) + + if len(buckets.buckets) > 0: + return buckets.buckets[0] + else: + return None + + def find_buckets(self, **kwargs): + """List buckets. + + :key int offset: Offset for pagination + :key int limit: Limit for pagination + :key str after: The last resource ID from which to seek from (but not including). + This is to be used instead of `offset`. + :key str org: The organization name. + :key str org_id: The organization ID. + :key str name: Only returns buckets with a specific name. + :return: Buckets + """ + return self._buckets_service.get_buckets(**kwargs) + + def find_buckets_iter(self, **kwargs): + """Iterate over all buckets with pagination. + + :key str name: Only returns buckets with the specified name + :key str org: The organization name. + :key str org_id: The organization ID. + :key str after: The last resource ID from which to seek from (but not including). + :key int limit: the maximum number of buckets in one page + :return: Buckets iterator + """ + return _Paginated(self._buckets_service.get_buckets, lambda response: response.buckets).find_iter(**kwargs) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/delete_api.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/delete_api.py new file mode 100644 index 0000000..5f9da47 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/delete_api.py @@ -0,0 +1,35 @@ +"""Delete time series data from InfluxDB.""" + +from datetime import datetime +from typing import Union + +from influxdb_client import Organization +from influxdb_client.client._base import _BaseDeleteApi +from influxdb_client.client.util.helpers import get_org_query_param + + +class DeleteApi(_BaseDeleteApi): + """Implementation for '/api/v2/delete' endpoint.""" + + def __init__(self, influxdb_client): + """Initialize defaults.""" + super().__init__(influxdb_client) + + def delete(self, start: Union[str, datetime], stop: Union[str, datetime], predicate: str, bucket: str, + org: Union[str, Organization, None] = None) -> None: + """ + Delete Time series data from InfluxDB. + + :param str, datetime.datetime start: start time + :param str, datetime.datetime stop: stop time + :param str predicate: predicate + :param str bucket: bucket id or name from which data will be deleted + :param str, Organization org: specifies the organization to delete data from. + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClient.org`` is used. + :return: + """ + predicate_request = self._prepare_predicate_request(start, stop, predicate) + org_param = get_org_query_param(org=org, client=self._influxdb_client, required_id=False) + + return self._service.post_delete(delete_predicate_request=predicate_request, bucket=bucket, org=org_param) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/delete_api_async.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/delete_api_async.py new file mode 100644 index 0000000..ef44c84 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/delete_api_async.py @@ -0,0 +1,37 @@ +"""Delete time series data from InfluxDB.""" + +from datetime import datetime +from typing import Union + +from influxdb_client import Organization +from influxdb_client.client._base import _BaseDeleteApi +from influxdb_client.client.util.helpers import get_org_query_param + + +class DeleteApiAsync(_BaseDeleteApi): + """Async implementation for '/api/v2/delete' endpoint.""" + + def __init__(self, influxdb_client): + """Initialize defaults.""" + super().__init__(influxdb_client) + + async def delete(self, start: Union[str, datetime], stop: Union[str, datetime], predicate: str, bucket: str, + org: Union[str, Organization, None] = None) -> bool: + """ + Delete Time series data from InfluxDB. + + :param str, datetime.datetime start: start time + :param str, datetime.datetime stop: stop time + :param str predicate: predicate + :param str bucket: bucket id or name from which data will be deleted + :param str, Organization org: specifies the organization to delete data from. + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClientAsync.org`` is used. + :return: ``True`` for successfully deleted data, otherwise raise an exception + """ + predicate_request = self._prepare_predicate_request(start, stop, predicate) + org_param = get_org_query_param(org=org, client=self._influxdb_client, required_id=False) + + response = await self._service.post_delete_async(delete_predicate_request=predicate_request, bucket=bucket, + org=org_param, _return_http_data_only=False) + return response[1] == 204 diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/exceptions.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/exceptions.py new file mode 100644 index 0000000..bfa453e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/exceptions.py @@ -0,0 +1,47 @@ +"""Exceptions utils for InfluxDB.""" + +import logging + +from urllib3 import HTTPResponse + +logger = logging.getLogger('influxdb_client.client.exceptions') + + +class InfluxDBError(Exception): + """Raised when a server error occurs.""" + + def __init__(self, response: HTTPResponse = None, message: str = None): + """Initialize the InfluxDBError handler.""" + if response is not None: + self.response = response + self.message = self._get_message(response) + if isinstance(response, HTTPResponse): # response is HTTPResponse + self.headers = response.headers + self.retry_after = response.headers.get('Retry-After') + else: # response is RESTResponse + self.headers = response.getheaders() + self.retry_after = response.getheader('Retry-After') + else: + self.response = None + self.message = message or 'no response' + self.retry_after = None + super().__init__(self.message) + + def _get_message(self, response): + # Body + if response.data: + import json + try: + return json.loads(response.data)["message"] + except Exception as e: + logging.debug(f"Cannot parse error response to JSON: {response.data}, {e}") + return response.data + + # Header + for header_key in ["X-Platform-Error-Code", "X-Influx-Error", "X-InfluxDb-Error"]: + header_value = response.getheader(header_key) + if header_value is not None: + return header_value + + # Http Status + return response.reason diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/flux_csv_parser.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/flux_csv_parser.py new file mode 100644 index 0000000..7a73e3f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/flux_csv_parser.py @@ -0,0 +1,404 @@ +"""Parsing response from InfluxDB to FluxStructures or DataFrame.""" + + +import base64 +import codecs +import csv as csv_parser +import warnings +from enum import Enum +from typing import List + +from influxdb_client.client.flux_table import FluxTable, FluxColumn, FluxRecord, TableList +from influxdb_client.client.util.date_utils import get_date_helper +from influxdb_client.rest import _UTF_8_encoding + +ANNOTATION_DEFAULT = "#default" +ANNOTATION_GROUP = "#group" +ANNOTATION_DATATYPE = "#datatype" +ANNOTATIONS = [ANNOTATION_DEFAULT, ANNOTATION_GROUP, ANNOTATION_DATATYPE] + + +class FluxQueryException(Exception): + """The exception from InfluxDB.""" + + def __init__(self, message, reference) -> None: + """Initialize defaults.""" + self.message = message + self.reference = reference + + +class FluxCsvParserException(Exception): + """The exception for not parsable data.""" + + pass + + +class FluxSerializationMode(Enum): + """The type how we want to serialize data.""" + + tables = 1 + stream = 2 + dataFrame = 3 + + +class FluxResponseMetadataMode(Enum): + """The configuration for expected amount of metadata response from InfluxDB.""" + + full = 1 + # useful for Invokable scripts + only_names = 2 + + +class _FluxCsvParserMetadata(object): + def __init__(self): + self.table_index = 0 + self.table_id = -1 + self.start_new_table = False + self.table = None + self.groups = [] + self.parsing_state_error = False + + +class FluxCsvParser(object): + """Parse to processing response from InfluxDB to FluxStructures or DataFrame.""" + + def __init__(self, response, serialization_mode: FluxSerializationMode, + data_frame_index: List[str] = None, query_options=None, + response_metadata_mode: FluxResponseMetadataMode = FluxResponseMetadataMode.full, + use_extension_dtypes=False) -> None: + """ + Initialize defaults. + + :param response: HTTP response from a HTTP client. + Acceptable types: `urllib3.response.HTTPResponse`, `aiohttp.client_reqrep.ClientResponse`. + """ + self._response = response + self.tables = TableList() + self._serialization_mode = serialization_mode + self._response_metadata_mode = response_metadata_mode + self._use_extension_dtypes = use_extension_dtypes + self._data_frame_index = data_frame_index + self._data_frame_values = [] + self._profilers = query_options.profilers if query_options is not None else None + self._profiler_callback = query_options.profiler_callback if query_options is not None else None + self._async_mode = True if 'ClientResponse' in type(response).__name__ else False + + def _close(self): + self._response.close() + + def __enter__(self): + """Initialize CSV reader.""" + # response can be exhausted by logger, so we have to use data that has already been read + if hasattr(self._response, 'closed') and self._response.closed: + from io import StringIO + self._reader = csv_parser.reader(StringIO(self._response.data.decode(_UTF_8_encoding))) + else: + self._reader = csv_parser.reader(codecs.iterdecode(self._response, _UTF_8_encoding)) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Close HTTP response.""" + self._close() + + async def __aenter__(self) -> 'FluxCsvParser': + """Initialize CSV reader.""" + from aiocsv import AsyncReader + self._reader = AsyncReader(_StreamReaderToWithAsyncRead(self._response.content)) + + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: + """Shutdown the client.""" + self.__exit__(exc_type, exc_val, exc_tb) + + def generator(self): + """Return Python generator.""" + with self as parser: + for val in parser._parse_flux_response(): + yield val + + def generator_async(self): + """Return Python async-generator.""" + return self._parse_flux_response_async() + + def _parse_flux_response(self): + metadata = _FluxCsvParserMetadata() + + for csv in self._reader: + for val in self._parse_flux_response_row(metadata, csv): + yield val + + # Return latest DataFrame + if (self._serialization_mode is FluxSerializationMode.dataFrame) & hasattr(self, '_data_frame'): + df = self._prepare_data_frame() + if not self._is_profiler_table(metadata.table): + yield df + + async def _parse_flux_response_async(self): + metadata = _FluxCsvParserMetadata() + + try: + async for csv in self._reader: + for val in self._parse_flux_response_row(metadata, csv): + yield val + + # Return latest DataFrame + if (self._serialization_mode is FluxSerializationMode.dataFrame) & hasattr(self, '_data_frame'): + df = self._prepare_data_frame() + if not self._is_profiler_table(metadata.table): + yield df + finally: + self._close() + + def _parse_flux_response_row(self, metadata, csv): + if len(csv) < 1: + # Skip empty line in results (new line is used as a delimiter between tables or table and error) + pass + + elif "error" == csv[1] and "reference" == csv[2]: + metadata.parsing_state_error = True + + else: + # Throw InfluxException with error response + if metadata.parsing_state_error: + error = csv[1] + reference_value = csv[2] + raise FluxQueryException(error, reference_value) + + token = csv[0] + # start new table + if (token in ANNOTATIONS and not metadata.start_new_table) or \ + (self._response_metadata_mode is FluxResponseMetadataMode.only_names and not metadata.table): + + # Return already parsed DataFrame + if (self._serialization_mode is FluxSerializationMode.dataFrame) & hasattr(self, '_data_frame'): + df = self._prepare_data_frame() + if not self._is_profiler_table(metadata.table): + yield df + + metadata.start_new_table = True + metadata.table = FluxTable() + self._insert_table(metadata.table, metadata.table_index) + metadata.table_index = metadata.table_index + 1 + metadata.table_id = -1 + elif metadata.table is None: + raise FluxCsvParserException("Unable to parse CSV response. FluxTable definition was not found.") + + # # datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string + if ANNOTATION_DATATYPE == token: + self.add_data_types(metadata.table, csv) + + elif ANNOTATION_GROUP == token: + metadata.groups = csv + + elif ANNOTATION_DEFAULT == token: + self.add_default_empty_values(metadata.table, csv) + + else: + # parse column names + if metadata.start_new_table: + # Invokable scripts doesn't supports dialect => all columns are string + if not metadata.table.columns and \ + self._response_metadata_mode is FluxResponseMetadataMode.only_names: + self.add_data_types(metadata.table, list(map(lambda column: 'string', csv))) + metadata.groups = list(map(lambda column: 'false', csv)) + self.add_groups(metadata.table, metadata.groups) + self.add_column_names_and_tags(metadata.table, csv) + metadata.start_new_table = False + # Create DataFrame with default values + if self._serialization_mode is FluxSerializationMode.dataFrame: + from ..extras import pd + labels = list(map(lambda it: it.label, metadata.table.columns)) + self._data_frame = pd.DataFrame(data=[], columns=labels, index=None) + pass + else: + + # to int conversions todo + current_id = int(csv[2]) + if metadata.table_id == -1: + metadata.table_id = current_id + + if metadata.table_id != current_id: + # create new table with previous column headers settings + flux_columns = metadata.table.columns + metadata.table = FluxTable() + metadata.table.columns.extend(flux_columns) + self._insert_table(metadata.table, metadata.table_index) + metadata.table_index = metadata.table_index + 1 + metadata.table_id = current_id + + flux_record = self.parse_record(metadata.table_index - 1, metadata.table, csv) + + if self._is_profiler_record(flux_record): + self._print_profiler_info(flux_record) + else: + if self._serialization_mode is FluxSerializationMode.tables: + self.tables[metadata.table_index - 1].records.append(flux_record) + + if self._serialization_mode is FluxSerializationMode.stream: + yield flux_record + + if self._serialization_mode is FluxSerializationMode.dataFrame: + self._data_frame_values.append(flux_record.values) + pass + + def _prepare_data_frame(self): + from ..extras import pd + + # We have to create temporary DataFrame because we want to preserve default column values + _temp_df = pd.DataFrame(self._data_frame_values) + self._data_frame_values = [] + + # Custom DataFrame index + if self._data_frame_index: + self._data_frame = self._data_frame.set_index(self._data_frame_index) + _temp_df = _temp_df.set_index(self._data_frame_index) + + # Append data + df = pd.concat([self._data_frame.astype(_temp_df.dtypes), _temp_df]) + + if self._use_extension_dtypes: + return df.convert_dtypes() + return df + + def parse_record(self, table_index, table, csv): + """Parse one record.""" + record = FluxRecord(table_index) + + for fluxColumn in table.columns: + column_name = fluxColumn.label + str_val = csv[fluxColumn.index + 1] + record.values[column_name] = self._to_value(str_val, fluxColumn) + record.row.append(record.values[column_name]) + + return record + + def _to_value(self, str_val, column): + + if str_val == '' or str_val is None: + default_value = column.default_value + if default_value == '' or default_value is None: + if self._serialization_mode is FluxSerializationMode.dataFrame: + if self._use_extension_dtypes: + from ..extras import pd + return pd.NA + return None + return None + return self._to_value(default_value, column) + + if "string" == column.data_type: + return str_val + + if "boolean" == column.data_type: + return "true" == str_val + + if "unsignedLong" == column.data_type or "long" == column.data_type: + return int(str_val) + + if "double" == column.data_type: + return float(str_val) + + if "base64Binary" == column.data_type: + return base64.b64decode(str_val) + + if "dateTime:RFC3339" == column.data_type or "dateTime:RFC3339Nano" == column.data_type: + return get_date_helper().parse_date(str_val) + + if "duration" == column.data_type: + # todo better type ? + return int(str_val) + + @staticmethod + def add_data_types(table, data_types): + """Add data types to columns.""" + for index in range(1, len(data_types)): + column_def = FluxColumn(index=index - 1, data_type=data_types[index]) + table.columns.append(column_def) + + @staticmethod + def add_groups(table, csv): + """Add group keys to columns.""" + i = 1 + for column in table.columns: + column.group = csv[i] == "true" + i += 1 + + @staticmethod + def add_default_empty_values(table, default_values): + """Add default values to columns.""" + i = 1 + for column in table.columns: + column.default_value = default_values[i] + i += 1 + + @staticmethod + def add_column_names_and_tags(table, csv): + """Add labels to columns.""" + if len(csv) != len(set(csv)): + message = f"""The response contains columns with duplicated names: '{csv}'. + +You should use the 'record.row' to access your data instead of 'record.values' dictionary. +""" + warnings.warn(message, UserWarning) + print(message) + i = 1 + for column in table.columns: + column.label = csv[i] + i += 1 + + def _insert_table(self, table, table_index): + if self._serialization_mode is FluxSerializationMode.tables: + self.tables.insert(table_index, table) + + def _is_profiler_record(self, flux_record: FluxRecord) -> bool: + if not self._profilers: + return False + + for profiler in self._profilers: + if "_measurement" in flux_record.values and flux_record["_measurement"] == "profiler/" + profiler: + return True + + return False + + def _is_profiler_table(self, table: FluxTable) -> bool: + + if not self._profilers: + return False + + return any(filter(lambda column: (column.default_value == "_profiler" and column.label == "result"), + table.columns)) + + def table_list(self) -> TableList: + """Get the list of flux tables.""" + if not self._profilers: + return self.tables + else: + return TableList(filter(lambda table: not self._is_profiler_table(table), self.tables)) + + def _print_profiler_info(self, flux_record: FluxRecord): + if flux_record.get_measurement().startswith("profiler/"): + if self._profiler_callback: + self._profiler_callback(flux_record) + else: + msg = "Profiler: " + flux_record.get_measurement() + print("\n" + len(msg) * "=") + print(msg) + print(len(msg) * "=") + for name in flux_record.values: + val = flux_record[name] + if isinstance(val, str) and len(val) > 50: + print(f"{name:<20}: \n\n{val}") + elif val is not None: + print(f"{name:<20}: {val:<20}") + + +class _StreamReaderToWithAsyncRead: + def __init__(self, response): + self.response = response + self.decoder = codecs.getincrementaldecoder(_UTF_8_encoding)() + + async def read(self, size: int) -> str: + raw_bytes = (await self.response.read(size)) + if not raw_bytes: + return self.decoder.decode(b'', final=True) + return self.decoder.decode(raw_bytes, final=False) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/flux_table.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/flux_table.py new file mode 100644 index 0000000..98a8315 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/flux_table.py @@ -0,0 +1,290 @@ +""" +Flux employs a basic data model built from basic data types. + +The data model consists of tables, records, columns. +""" +import codecs +import csv +from http.client import HTTPResponse +from json import JSONEncoder +from typing import List, Iterator +from influxdb_client.rest import _UTF_8_encoding + + +class FluxStructure: + """The data model consists of tables, records, columns.""" + + pass + + +class FluxStructureEncoder(JSONEncoder): + """The FluxStructure encoder to encode query results to JSON.""" + + def default(self, obj): + """Return serializable objects for JSONEncoder.""" + import datetime + if isinstance(obj, FluxStructure): + return obj.__dict__ + elif isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + return super().default(obj) + + +class FluxTable(FluxStructure): + """ + A table is set of records with a common set of columns and a group key. + + The table can be serialized into JSON by:: + + import json + from influxdb_client.client.flux_table import FluxStructureEncoder + + output = json.dumps(tables, cls=FluxStructureEncoder, indent=2) + print(output) + + """ + + def __init__(self) -> None: + """Initialize defaults.""" + self.columns = [] + self.records = [] + + def get_group_key(self): + """ + Group key is a list of columns. + + A table’s group key denotes which subset of the entire dataset is assigned to the table. + """ + return list(filter(lambda column: (column.group is True), self.columns)) + + def __str__(self): + """Return formatted output.""" + cls_name = type(self).__name__ + return cls_name + "() columns: " + str(len(self.columns)) + ", records: " + str(len(self.records)) + + def __repr__(self): + """Format for inspection.""" + return f"<{type(self).__name__}: {len(self.columns)} columns, {len(self.records)} records>" + + def __iter__(self): + """Iterate over records.""" + return iter(self.records) + + +class FluxColumn(FluxStructure): + """A column has a label and a data type.""" + + def __init__(self, index=None, label=None, data_type=None, group=None, default_value=None) -> None: + """Initialize defaults.""" + self.default_value = default_value + self.group = group + self.data_type = data_type + self.label = label + self.index = index + + def __repr__(self): + """Format for inspection.""" + fields = [repr(self.index)] + [ + f'{name}={getattr(self, name)!r}' for name in ( + 'label', 'data_type', 'group', 'default_value' + ) if getattr(self, name) is not None + ] + return f"{type(self).__name__}({', '.join(fields)})" + + +class FluxRecord(FluxStructure): + """A record is a tuple of named values and is represented using an object type.""" + + def __init__(self, table, values=None) -> None: + """Initialize defaults.""" + if values is None: + values = {} + self.table = table + self.values = values + self.row = [] + + def get_start(self): + """Get '_start' value.""" + return self["_start"] + + def get_stop(self): + """Get '_stop' value.""" + return self["_stop"] + + def get_time(self): + """Get timestamp.""" + return self["_time"] + + def get_value(self): + """Get field value.""" + return self["_value"] + + def get_field(self): + """Get field name.""" + return self["_field"] + + def get_measurement(self): + """Get measurement name.""" + return self["_measurement"] + + def __getitem__(self, key): + """Get value by key.""" + return self.values.__getitem__(key) + + def __setitem__(self, key, value): + """Set value with key and value.""" + return self.values.__setitem__(key, value) + + def __str__(self): + """Return formatted output.""" + cls_name = type(self).__name__ + return cls_name + "() table: " + str(self.table) + ", " + str(self.values) + + def __repr__(self): + """Format for inspection.""" + return f"<{type(self).__name__}: field={self.values.get('_field')}, value={self.values.get('_value')}>" + + +class TableList(List[FluxTable]): + """:class:`~influxdb_client.client.flux_table.FluxTable` list with additionally functional to better handle of query result.""" # noqa: E501 + + def to_values(self, columns: List['str'] = None) -> List[List[object]]: + """ + Serialize query results to a flattened list of values. + + :param columns: if not ``None`` then only specified columns are presented in results + :return: :class:`~list` of values + + Output example: + + .. code-block:: python + + [ + ['New York', datetime.datetime(2022, 6, 7, 11, 3, 22, 917593, tzinfo=tzutc()), 24.3], + ['Prague', datetime.datetime(2022, 6, 7, 11, 3, 22, 917593, tzinfo=tzutc()), 25.3], + ... + ] + + Configure required columns: + + .. code-block:: python + + from influxdb_client import InfluxDBClient + + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + + # Query: using Table structure + tables = client.query_api().query('from(bucket:"my-bucket") |> range(start: -10m)') + + # Serialize to values + output = tables.to_values(columns=['location', '_time', '_value']) + print(output) + """ + + def filter_values(record): + if columns is not None: + return [record.values.get(k) for k in columns] + return record.values.values() + + return self._to_values(filter_values) + + def to_json(self, columns: List['str'] = None, **kwargs) -> str: + """ + Serialize query results to a JSON formatted :class:`~str`. + + :param columns: if not ``None`` then only specified columns are presented in results + :return: :class:`~str` + + The query results is flattened to array: + + .. code-block:: javascript + + [ + { + "_measurement": "mem", + "_start": "2021-06-23T06:50:11.897825+00:00", + "_stop": "2021-06-25T06:50:11.897825+00:00", + "_time": "2020-02-27T16:20:00.897825+00:00", + "region": "north", + "_field": "usage", + "_value": 15 + }, + { + "_measurement": "mem", + "_start": "2021-06-23T06:50:11.897825+00:00", + "_stop": "2021-06-25T06:50:11.897825+00:00", + "_time": "2020-02-27T16:20:01.897825+00:00", + "region": "west", + "_field": "usage", + "_value": 10 + }, + ... + ] + + The JSON format could be configured via ``**kwargs`` arguments: + + .. code-block:: python + + from influxdb_client import InfluxDBClient + + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + + # Query: using Table structure + tables = client.query_api().query('from(bucket:"my-bucket") |> range(start: -10m)') + + # Serialize to JSON + output = tables.to_json(indent=5) + print(output) + + For all available options see - `json.dump `_. + """ + if 'indent' not in kwargs: + kwargs['indent'] = 2 + + def filter_values(record): + if columns is not None: + return {k: v for (k, v) in record.values.items() if k in columns} + return record.values + + import json + return json.dumps(self._to_values(filter_values), cls=FluxStructureEncoder, **kwargs) + + def _to_values(self, mapping): + return [mapping(record) for table in self for record in table.records] + + +class CSVIterator(Iterator[List[str]]): + """:class:`Iterator[List[str]]` with additionally functional to better handle of query result.""" + + def __init__(self, response: HTTPResponse) -> None: + """Initialize ``csv.reader``.""" + self.delegate = csv.reader(codecs.iterdecode(response, _UTF_8_encoding)) + + def __iter__(self): + """Return an iterator object.""" + return self + + def __next__(self): + """Retrieve the next item from the iterator.""" + row = self.delegate.__next__() + while not row: + row = self.delegate.__next__() + return row + + def to_values(self) -> List[List[str]]: + """ + Serialize query results to a flattened list of values. + + :return: :class:`~list` of values + + Output example: + + .. code-block:: python + + [ + ['New York', '2022-06-14T08:00:51.749072045Z', '24.3'], + ['Prague', '2022-06-14T08:00:51.749072045Z', '25.3'], + ... + ] + """ + return list(self.__iter__()) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/influxdb_client.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/influxdb_client.py new file mode 100644 index 0000000..6079aac --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/influxdb_client.py @@ -0,0 +1,438 @@ +"""InfluxDBClient is client for API defined in https://github.com/influxdata/influxdb/blob/master/http/swagger.yml.""" + +from __future__ import absolute_import + +import logging +import warnings + +from influxdb_client import HealthCheck, HealthService, Ready, ReadyService, PingService, \ + InvokableScriptsApi +from influxdb_client.client._base import _BaseClient +from influxdb_client.client.authorizations_api import AuthorizationsApi +from influxdb_client.client.bucket_api import BucketsApi +from influxdb_client.client.delete_api import DeleteApi +from influxdb_client.client.labels_api import LabelsApi +from influxdb_client.client.organizations_api import OrganizationsApi +from influxdb_client.client.query_api import QueryApi, QueryOptions +from influxdb_client.client.tasks_api import TasksApi +from influxdb_client.client.users_api import UsersApi +from influxdb_client.client.write_api import WriteApi, WriteOptions, PointSettings + +logger = logging.getLogger('influxdb_client.client.influxdb_client') + + +class InfluxDBClient(_BaseClient): + """InfluxDBClient is client for InfluxDB v2.""" + + def __init__(self, url, token: str = None, debug=None, timeout=10_000, enable_gzip=False, org: str = None, + default_tags: dict = None, **kwargs) -> None: + """ + Initialize defaults. + + :param url: InfluxDB server API url (ex. http://localhost:8086). + :param token: ``token`` to authenticate to the InfluxDB API + :param debug: enable verbose logging of http requests + :param timeout: HTTP client timeout setting for a request specified in milliseconds. + If one number provided, it will be total request timeout. + It can also be a pair (tuple) of (connection, read) timeouts. + :param enable_gzip: Enable Gzip compression for http requests. Currently, only the "Write" and "Query" endpoints + supports the Gzip compression. + :param org: organization name (used as a default in Query, Write and Delete API) + :key bool verify_ssl: Set this to false to skip verifying SSL certificate when calling API from https server. + :key str ssl_ca_cert: Set this to customize the certificate file to verify the peer. + :key str cert_file: Path to the certificate that will be used for mTLS authentication. + :key str cert_key_file: Path to the file contains private key for mTLS certificate. + :key str cert_key_password: String or function which returns password for decrypting the mTLS private key. + :key ssl.SSLContext ssl_context: Specify a custom Python SSL Context for the TLS/ mTLS handshake. + Be aware that only delivered certificate/ key files or an SSL Context are + possible. + :key str proxy: Set this to configure the http proxy to be used (ex. http://localhost:3128) + :key str proxy_headers: A dictionary containing headers that will be sent to the proxy. Could be used for proxy + authentication. + :key int connection_pool_maxsize: Number of connections to save that can be reused by urllib3. + Defaults to "multiprocessing.cpu_count() * 5". + :key urllib3.util.retry.Retry retries: Set the default retry strategy that is used for all HTTP requests + except batching writes. As a default there is no one retry strategy. + :key bool auth_basic: Set this to true to enable basic authentication when talking to a InfluxDB 1.8.x that + does not use auth-enabled but is protected by a reverse proxy with basic authentication. + (defaults to false, don't set to true when talking to InfluxDB 2) + :key str username: ``username`` to authenticate via username and password credentials to the InfluxDB 2.x + :key str password: ``password`` to authenticate via username and password credentials to the InfluxDB 2.x + :key list[str] profilers: list of enabled Flux profilers + """ + super().__init__(url=url, token=token, debug=debug, timeout=timeout, enable_gzip=enable_gzip, org=org, + default_tags=default_tags, http_client_logger="urllib3", **kwargs) + + from .._sync.api_client import ApiClient + self.api_client = ApiClient(configuration=self.conf, header_name=self.auth_header_name, + header_value=self.auth_header_value, retries=self.retries) + + def __enter__(self): + """ + Enter the runtime context related to this object. + + It will bind this method’s return value to the target(s) + specified in the `as` clause of the statement. + + return: self instance + """ + return self + + def __exit__(self, exc_type, exc_value, traceback): + """Exit the runtime context related to this object and close the client.""" + self.close() + + @classmethod + def from_config_file(cls, config_file: str = "config.ini", debug=None, enable_gzip=False, **kwargs): + """ + Configure client via configuration file. The configuration has to be under 'influx' section. + + :param config_file: Path to configuration file + :param debug: Enable verbose logging of http requests + :param enable_gzip: Enable Gzip compression for http requests. Currently, only the "Write" and "Query" endpoints + supports the Gzip compression. + :key config_name: Name of the configuration section of the configuration file + :key str proxy_headers: A dictionary containing headers that will be sent to the proxy. Could be used for proxy + authentication. + :key urllib3.util.retry.Retry retries: Set the default retry strategy that is used for all HTTP requests + except batching writes. As a default there is no one retry strategy. + :key ssl.SSLContext ssl_context: Specify a custom Python SSL Context for the TLS/ mTLS handshake. + Be aware that only delivered certificate/ key files or an SSL Context are + possible. + + The supported formats: + - https://docs.python.org/3/library/configparser.html + - https://toml.io/en/ + - https://www.json.org/json-en.html + + Configuration options: + - url + - org + - token + - timeout, + - verify_ssl + - ssl_ca_cert + - cert_file + - cert_key_file + - cert_key_password + - connection_pool_maxsize + - auth_basic + - profilers + - proxy + + + config.ini example:: + + [influx2] + url=http://localhost:8086 + org=my-org + token=my-token + timeout=6000 + connection_pool_maxsize=25 + auth_basic=false + profilers=query,operator + proxy=http:proxy.domain.org:8080 + + [tags] + id = 132-987-655 + customer = California Miner + data_center = ${env.data_center} + + config.toml example:: + + [influx2] + url = "http://localhost:8086" + token = "my-token" + org = "my-org" + timeout = 6000 + connection_pool_maxsize = 25 + auth_basic = false + profilers="query, operator" + proxy = "http://proxy.domain.org:8080" + + [tags] + id = "132-987-655" + customer = "California Miner" + data_center = "${env.data_center}" + + config.json example:: + + { + "url": "http://localhost:8086", + "token": "my-token", + "org": "my-org", + "active": true, + "timeout": 6000, + "connection_pool_maxsize": 55, + "auth_basic": false, + "profilers": "query, operator", + "tags": { + "id": "132-987-655", + "customer": "California Miner", + "data_center": "${env.data_center}" + } + } + + """ + return InfluxDBClient._from_config_file(config_file=config_file, debug=debug, enable_gzip=enable_gzip, **kwargs) + + @classmethod + def from_env_properties(cls, debug=None, enable_gzip=False, **kwargs): + """ + Configure client via environment properties. + + :param debug: Enable verbose logging of http requests + :param enable_gzip: Enable Gzip compression for http requests. Currently, only the "Write" and "Query" endpoints + supports the Gzip compression. + :key str proxy: Set this to configure the http proxy to be used (ex. http://localhost:3128) + :key str proxy_headers: A dictionary containing headers that will be sent to the proxy. Could be used for proxy + authentication. + :key urllib3.util.retry.Retry retries: Set the default retry strategy that is used for all HTTP requests + except batching writes. As a default there is no one retry strategy. + :key ssl.SSLContext ssl_context: Specify a custom Python SSL Context for the TLS/ mTLS handshake. + Be aware that only delivered certificate/ key files or an SSL Context are + possible. + + Supported environment properties: + - INFLUXDB_V2_URL + - INFLUXDB_V2_ORG + - INFLUXDB_V2_TOKEN + - INFLUXDB_V2_TIMEOUT + - INFLUXDB_V2_VERIFY_SSL + - INFLUXDB_V2_SSL_CA_CERT + - INFLUXDB_V2_CERT_FILE + - INFLUXDB_V2_CERT_KEY_FILE + - INFLUXDB_V2_CERT_KEY_PASSWORD + - INFLUXDB_V2_CONNECTION_POOL_MAXSIZE + - INFLUXDB_V2_AUTH_BASIC + - INFLUXDB_V2_PROFILERS + - INFLUXDB_V2_TAG + """ + return InfluxDBClient._from_env_properties(debug=debug, enable_gzip=enable_gzip, **kwargs) + + def write_api(self, write_options=WriteOptions(), point_settings=PointSettings(), **kwargs) -> WriteApi: + """ + Create Write API instance. + + Example: + .. code-block:: python + + from influxdb_client import InfluxDBClient + from influxdb_client.client.write_api import SYNCHRONOUS + + + # Initialize SYNCHRONOUS instance of WriteApi + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + write_api = client.write_api(write_options=SYNCHRONOUS) + + If you would like to use a **background batching**, you have to configure client like this: + + .. code-block:: python + + from influxdb_client import InfluxDBClient + + # Initialize background batching instance of WriteApi + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + with client.write_api() as write_api: + pass + + There is also possibility to use callbacks to notify about state of background batches: + + .. code-block:: python + + from influxdb_client import InfluxDBClient + from influxdb_client.client.exceptions import InfluxDBError + + + class BatchingCallback(object): + + def success(self, conf: (str, str, str), data: str): + print(f"Written batch: {conf}, data: {data}") + + def error(self, conf: (str, str, str), data: str, exception: InfluxDBError): + print(f"Cannot write batch: {conf}, data: {data} due: {exception}") + + def retry(self, conf: (str, str, str), data: str, exception: InfluxDBError): + print(f"Retryable error occurs for batch: {conf}, data: {data} retry: {exception}") + + + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + callback = BatchingCallback() + with client.write_api(success_callback=callback.success, + error_callback=callback.error, + retry_callback=callback.retry) as write_api: + pass + + :param write_options: Write API configuration + :param point_settings: settings to store default tags + :key success_callback: The callable ``callback`` to run after successfully writen a batch. + + The callable must accept two arguments: + - `Tuple`: ``(bucket, organization, precision)`` + - `str`: written data + + **[batching mode]** + + :key error_callback: The callable ``callback`` to run after unsuccessfully writen a batch. + + The callable must accept three arguments: + - `Tuple`: ``(bucket, organization, precision)`` + - `str`: written data + - `Exception`: an occurred error + + **[batching mode]** + :key retry_callback: The callable ``callback`` to run after retryable error occurred. + + The callable must accept three arguments: + - `Tuple`: ``(bucket, organization, precision)`` + - `str`: written data + - `Exception`: an retryable error + + **[batching mode]** + :return: write api instance + """ + return WriteApi(influxdb_client=self, write_options=write_options, point_settings=point_settings, **kwargs) + + def query_api(self, query_options: QueryOptions = QueryOptions()) -> QueryApi: + """ + Create an Query API instance. + + :param query_options: optional query api configuration + :return: Query api instance + """ + return QueryApi(self, query_options) + + def invokable_scripts_api(self) -> InvokableScriptsApi: + """ + Create an InvokableScripts API instance. + + :return: InvokableScripts API instance + """ + return InvokableScriptsApi(self) + + def close(self): + """Shutdown the client.""" + self.__del__() + + def __del__(self): + """Shutdown the client.""" + if self.api_client: + self.api_client.__del__() + self.api_client = None + + def buckets_api(self) -> BucketsApi: + """ + Create the Bucket API instance. + + :return: buckets api + """ + return BucketsApi(self) + + def authorizations_api(self) -> AuthorizationsApi: + """ + Create the Authorizations API instance. + + :return: authorizations api + """ + return AuthorizationsApi(self) + + def users_api(self) -> UsersApi: + """ + Create the Users API instance. + + :return: users api + """ + return UsersApi(self) + + def organizations_api(self) -> OrganizationsApi: + """ + Create the Organizations API instance. + + :return: organizations api + """ + return OrganizationsApi(self) + + def tasks_api(self) -> TasksApi: + """ + Create the Tasks API instance. + + :return: tasks api + """ + return TasksApi(self) + + def labels_api(self) -> LabelsApi: + """ + Create the Labels API instance. + + :return: labels api + """ + return LabelsApi(self) + + def health(self) -> HealthCheck: + """ + Get the health of an instance. + + :return: HealthCheck + """ + warnings.warn("This method is deprecated. Call 'ping()' instead.", DeprecationWarning) + health_service = HealthService(self.api_client) + + try: + health = health_service.get_health() + return health + except Exception as e: + return HealthCheck(name="influxdb", message=str(e), status="fail") + + def ping(self) -> bool: + """ + Return the status of InfluxDB instance. + + :return: The status of InfluxDB. + """ + ping_service = PingService(self.api_client) + + try: + ping_service.get_ping() + return True + except Exception as ex: + logger.debug("Unexpected error during /ping: %s", ex) + return False + + def version(self) -> str: + """ + Return the version of the connected InfluxDB Server. + + :return: The version of InfluxDB. + """ + ping_service = PingService(self.api_client) + + response = ping_service.get_ping_with_http_info(_return_http_data_only=False) + + return ping_service.response_header(response) + + def build(self) -> str: + """ + Return the build type of the connected InfluxDB Server. + + :return: The type of InfluxDB build. + """ + ping_service = PingService(self.api_client) + + return ping_service.build_type() + + def ready(self) -> Ready: + """ + Get The readiness of the InfluxDB 2.0. + + :return: Ready + """ + ready_service = ReadyService(self.api_client) + return ready_service.get_ready() + + def delete_api(self) -> DeleteApi: + """ + Get the delete metrics API instance. + + :return: delete api + """ + return DeleteApi(self) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/influxdb_client_async.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/influxdb_client_async.py new file mode 100644 index 0000000..a8a2d17 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/influxdb_client_async.py @@ -0,0 +1,301 @@ +"""InfluxDBClientAsync is client for API defined in https://github.com/influxdata/openapi/blob/master/contracts/oss.yml.""" # noqa: E501 +import logging +import sys + +from influxdb_client import PingService +from influxdb_client.client._base import _BaseClient +from influxdb_client.client.delete_api_async import DeleteApiAsync +from influxdb_client.client.query_api import QueryOptions +from influxdb_client.client.query_api_async import QueryApiAsync +from influxdb_client.client.write_api import PointSettings +from influxdb_client.client.write_api_async import WriteApiAsync + +logger = logging.getLogger('influxdb_client.client.influxdb_client_async') + + +class InfluxDBClientAsync(_BaseClient): + """InfluxDBClientAsync is client for InfluxDB v2.""" + + def __init__(self, url, token: str = None, org: str = None, debug=None, timeout=10_000, enable_gzip=False, + **kwargs) -> None: + """ + Initialize defaults. + + :param url: InfluxDB server API url (ex. http://localhost:8086). + :param token: ``token`` to authenticate to the InfluxDB 2.x + :param org: organization name (used as a default in Query, Write and Delete API) + :param debug: enable verbose logging of http requests + :param timeout: The maximal number of milliseconds for the whole HTTP request including + connection establishment, request sending and response reading. + It can also be a :class:`~aiohttp.ClientTimeout` which is directly pass to ``aiohttp``. + :param enable_gzip: Enable Gzip compression for http requests. Currently, only the "Write" and "Query" endpoints + supports the Gzip compression. + :key bool verify_ssl: Set this to false to skip verifying SSL certificate when calling API from https server. + :key str ssl_ca_cert: Set this to customize the certificate file to verify the peer. + :key str cert_file: Path to the certificate that will be used for mTLS authentication. + :key str cert_key_file: Path to the file contains private key for mTLS certificate. + :key str cert_key_password: String or function which returns password for decrypting the mTLS private key. + :key ssl.SSLContext ssl_context: Specify a custom Python SSL Context for the TLS/ mTLS handshake. + Be aware that only delivered certificate/ key files or an SSL Context are + possible. + :key str proxy: Set this to configure the http proxy to be used (ex. http://localhost:3128) + :key str proxy_headers: A dictionary containing headers that will be sent to the proxy. Could be used for proxy + authentication. + :key int connection_pool_maxsize: The total number of simultaneous connections. + Defaults to "multiprocessing.cpu_count() * 5". + :key bool auth_basic: Set this to true to enable basic authentication when talking to a InfluxDB 1.8.x that + does not use auth-enabled but is protected by a reverse proxy with basic authentication. + (defaults to false, don't set to true when talking to InfluxDB 2) + :key str username: ``username`` to authenticate via username and password credentials to the InfluxDB 2.x + :key str password: ``password`` to authenticate via username and password credentials to the InfluxDB 2.x + :key bool allow_redirects: If set to ``False``, do not follow HTTP redirects. ``True`` by default. + :key int max_redirects: Maximum number of HTTP redirects to follow. ``10`` by default. + :key dict client_session_kwargs: Additional configuration arguments for :class:`~aiohttp.ClientSession` + :key type client_session_type: Type of aiohttp client to use. Useful for third party wrappers like + ``aiohttp-retry``. :class:`~aiohttp.ClientSession` by default. + :key list[str] profilers: list of enabled Flux profilers + """ + super().__init__(url=url, token=token, org=org, debug=debug, timeout=timeout, enable_gzip=enable_gzip, + http_client_logger="aiohttp.client", **kwargs) + + # compatibility with Python 3.6 + if sys.version_info[:2] >= (3, 7): + from asyncio import get_running_loop + else: + from asyncio import _get_running_loop as get_running_loop + + # check present asynchronous context + try: + loop = get_running_loop() + # compatibility with Python 3.6 + if loop is None: + raise RuntimeError('no running event loop') + except RuntimeError: + from influxdb_client.client.exceptions import InfluxDBError + message = "The async client should be initialised inside async coroutine " \ + "otherwise there can be unexpected behaviour." + raise InfluxDBError(response=None, message=message) + + from .._async.api_client import ApiClientAsync + self.api_client = ApiClientAsync(configuration=self.conf, header_name=self.auth_header_name, + header_value=self.auth_header_value, **kwargs) + + async def __aenter__(self) -> 'InfluxDBClientAsync': + """ + Enter the runtime context related to this object. + + return: self instance + """ + return self + + async def __aexit__(self, exc_type, exc, tb) -> None: + """Shutdown the client.""" + await self.close() + + async def close(self): + """Shutdown the client.""" + if self.api_client: + await self.api_client.close() + self.api_client = None + + @classmethod + def from_config_file(cls, config_file: str = "config.ini", debug=None, enable_gzip=False, **kwargs): + """ + Configure client via configuration file. The configuration has to be under 'influx' section. + + :param config_file: Path to configuration file + :param debug: Enable verbose logging of http requests + :param enable_gzip: Enable Gzip compression for http requests. Currently, only the "Write" and "Query" endpoints + supports the Gzip compression. + :key config_name: Name of the configuration section of the configuration file + :key str proxy_headers: A dictionary containing headers that will be sent to the proxy. Could be used for proxy + authentication. + :key urllib3.util.retry.Retry retries: Set the default retry strategy that is used for all HTTP requests + except batching writes. As a default there is no one retry strategy. + :key ssl.SSLContext ssl_context: Specify a custom Python SSL Context for the TLS/ mTLS handshake. + Be aware that only delivered certificate/ key files or an SSL Context are + possible. + + The supported formats: + - https://docs.python.org/3/library/configparser.html + - https://toml.io/en/ + - https://www.json.org/json-en.html + + Configuration options: + - url + - org + - token + - timeout, + - verify_ssl + - ssl_ca_cert + - cert_file + - cert_key_file + - cert_key_password + - connection_pool_maxsize + - auth_basic + - profilers + - proxy + + + config.ini example:: + + [influx2] + url=http://localhost:8086 + org=my-org + token=my-token + timeout=6000 + connection_pool_maxsize=25 + auth_basic=false + profilers=query,operator + proxy=http:proxy.domain.org:8080 + + [tags] + id = 132-987-655 + customer = California Miner + data_center = ${env.data_center} + + config.toml example:: + + [influx2] + url = "http://localhost:8086" + token = "my-token" + org = "my-org" + timeout = 6000 + connection_pool_maxsize = 25 + auth_basic = false + profilers="query, operator" + proxy = "http://proxy.domain.org:8080" + + [tags] + id = "132-987-655" + customer = "California Miner" + data_center = "${env.data_center}" + + config.json example:: + + { + "url": "http://localhost:8086", + "token": "my-token", + "org": "my-org", + "active": true, + "timeout": 6000, + "connection_pool_maxsize": 55, + "auth_basic": false, + "profilers": "query, operator", + "tags": { + "id": "132-987-655", + "customer": "California Miner", + "data_center": "${env.data_center}" + } + } + + """ + return InfluxDBClientAsync._from_config_file(config_file=config_file, debug=debug, + enable_gzip=enable_gzip, **kwargs) + + @classmethod + def from_env_properties(cls, debug=None, enable_gzip=False, **kwargs): + """ + Configure client via environment properties. + + :param debug: Enable verbose logging of http requests + :param enable_gzip: Enable Gzip compression for http requests. Currently, only the "Write" and "Query" endpoints + supports the Gzip compression. + :key str proxy: Set this to configure the http proxy to be used (ex. http://localhost:3128) + :key str proxy_headers: A dictionary containing headers that will be sent to the proxy. Could be used for proxy + authentication. + :key urllib3.util.retry.Retry retries: Set the default retry strategy that is used for all HTTP requests + except batching writes. As a default there is no one retry strategy. + :key ssl.SSLContext ssl_context: Specify a custom Python SSL Context for the TLS/ mTLS handshake. + Be aware that only delivered certificate/ key files or an SSL Context are + possible. + + + Supported environment properties: + - INFLUXDB_V2_URL + - INFLUXDB_V2_ORG + - INFLUXDB_V2_TOKEN + - INFLUXDB_V2_TIMEOUT + - INFLUXDB_V2_VERIFY_SSL + - INFLUXDB_V2_SSL_CA_CERT + - INFLUXDB_V2_CERT_FILE + - INFLUXDB_V2_CERT_KEY_FILE + - INFLUXDB_V2_CERT_KEY_PASSWORD + - INFLUXDB_V2_CONNECTION_POOL_MAXSIZE + - INFLUXDB_V2_AUTH_BASIC + - INFLUXDB_V2_PROFILERS + - INFLUXDB_V2_TAG + """ + return InfluxDBClientAsync._from_env_properties(debug=debug, enable_gzip=enable_gzip, **kwargs) + + async def ping(self) -> bool: + """ + Return the status of InfluxDB instance. + + :return: The status of InfluxDB. + """ + ping_service = PingService(self.api_client) + + try: + await ping_service.get_ping_async() + return True + except Exception as ex: + logger.debug("Unexpected error during /ping: %s", ex) + raise ex + + async def version(self) -> str: + """ + Return the version of the connected InfluxDB Server. + + :return: The version of InfluxDB. + """ + ping_service = PingService(self.api_client) + + response = await ping_service.get_ping_async(_return_http_data_only=False) + return ping_service.response_header(response) + + async def build(self) -> str: + """ + Return the build type of the connected InfluxDB Server. + + :return: The type of InfluxDB build. + """ + ping_service = PingService(self.api_client) + + return await ping_service.build_type_async() + + def query_api(self, query_options: QueryOptions = QueryOptions()) -> QueryApiAsync: + """ + Create an asynchronous Query API instance. + + :param query_options: optional query api configuration + :return: Query api instance + """ + return QueryApiAsync(self, query_options) + + def write_api(self, point_settings=PointSettings()) -> WriteApiAsync: + """ + Create an asynchronous Write API instance. + + Example: + .. code-block:: python + + from influxdb_client_async import InfluxDBClientAsync + + + # Initialize async/await instance of Write API + async with InfluxDBClientAsync(url="http://localhost:8086", token="my-token", org="my-org") as client: + write_api = client.write_api() + + :param point_settings: settings to store default tags + :return: write api instance + """ + return WriteApiAsync(influxdb_client=self, point_settings=point_settings) + + def delete_api(self) -> DeleteApiAsync: + """ + Get the asynchronous delete metrics API instance. + + :return: delete api + """ + return DeleteApiAsync(self) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/invokable_scripts_api.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/invokable_scripts_api.py new file mode 100644 index 0000000..cf5df28 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/invokable_scripts_api.py @@ -0,0 +1,293 @@ +""" +Use API invokable scripts to create custom InfluxDB API endpoints that query, process, and shape data. + +API invokable scripts let you assign scripts to API endpoints and then execute them as standard REST operations +in InfluxDB Cloud. +""" + +from typing import List, Iterator, Generator, Any + +from influxdb_client import Script, InvokableScriptsService, ScriptCreateRequest, ScriptUpdateRequest, \ + ScriptInvocationParams +from influxdb_client.client._base import _BaseQueryApi +from influxdb_client.client.flux_csv_parser import FluxResponseMetadataMode +from influxdb_client.client.flux_table import FluxRecord, TableList, CSVIterator + + +class InvokableScriptsApi(_BaseQueryApi): + """Use API invokable scripts to create custom InfluxDB API endpoints that query, process, and shape data.""" + + def __init__(self, influxdb_client): + """Initialize defaults.""" + self._influxdb_client = influxdb_client + self._invokable_scripts_service = InvokableScriptsService(influxdb_client.api_client) + + def create_script(self, create_request: ScriptCreateRequest) -> Script: + """Create a script. + + :param ScriptCreateRequest create_request: The script to create. (required) + :return: The created script. + """ + return self._invokable_scripts_service.post_scripts(script_create_request=create_request) + + def update_script(self, script_id: str, update_request: ScriptUpdateRequest) -> Script: + """Update a script. + + :param str script_id: The ID of the script to update. (required) + :param ScriptUpdateRequest update_request: Script updates to apply (required) + :return: The updated. + """ + return self._invokable_scripts_service.patch_scripts_id(script_id=script_id, + script_update_request=update_request) + + def delete_script(self, script_id: str) -> None: + """Delete a script. + + :param str script_id: The ID of the script to delete. (required) + :return: None + """ + self._invokable_scripts_service.delete_scripts_id(script_id=script_id) + + def find_scripts(self, **kwargs): + """List scripts. + + :key int limit: The number of scripts to return. + :key int offset: The offset for pagination. + :return: List of scripts. + :rtype: list[Script] + """ + return self._invokable_scripts_service.get_scripts(**kwargs).scripts + + def invoke_script(self, script_id: str, params: dict = None) -> TableList: + """ + Invoke synchronously a script and return result as a TableList. + + The bind parameters referenced in the script are substitutes with `params` key-values sent in the request body. + + :param str script_id: The ID of the script to invoke. (required) + :param params: bind parameters + :return: :class:`~influxdb_client.client.flux_table.FluxTable` list wrapped into + :class:`~influxdb_client.client.flux_table.TableList` + :rtype: TableList + + Serialization the query results to flattened list of values via :func:`~influxdb_client.client.flux_table.TableList.to_values`: + + .. code-block:: python + + from influxdb_client import InfluxDBClient + + with InfluxDBClient(url="https://us-west-2-1.aws.cloud2.influxdata.com", token="my-token", org="my-org") as client: + + # Query: using Table structure + tables = client.invokable_scripts_api().invoke_script(script_id="script-id") + + # Serialize to values + output = tables.to_values(columns=['location', '_time', '_value']) + print(output) + + .. code-block:: python + + [ + ['New York', datetime.datetime(2022, 6, 7, 11, 3, 22, 917593, tzinfo=tzutc()), 24.3], + ['Prague', datetime.datetime(2022, 6, 7, 11, 3, 22, 917593, tzinfo=tzutc()), 25.3], + ... + ] + + Serialization the query results to JSON via :func:`~influxdb_client.client.flux_table.TableList.to_json`: + + .. code-block:: python + + from influxdb_client import InfluxDBClient + + with InfluxDBClient(url="https://us-west-2-1.aws.cloud2.influxdata.com", token="my-token", org="my-org") as client: + + # Query: using Table structure + tables = client.invokable_scripts_api().invoke_script(script_id="script-id") + + # Serialize to JSON + output = tables.to_json(indent=5) + print(output) + + .. code-block:: javascript + + [ + { + "_measurement": "mem", + "_start": "2021-06-23T06:50:11.897825+00:00", + "_stop": "2021-06-25T06:50:11.897825+00:00", + "_time": "2020-02-27T16:20:00.897825+00:00", + "region": "north", + "_field": "usage", + "_value": 15 + }, + { + "_measurement": "mem", + "_start": "2021-06-23T06:50:11.897825+00:00", + "_stop": "2021-06-25T06:50:11.897825+00:00", + "_time": "2020-02-27T16:20:01.897825+00:00", + "region": "west", + "_field": "usage", + "_value": 10 + }, + ... + ] + """ # noqa: E501 + response = self._invokable_scripts_service \ + .post_scripts_id_invoke(script_id=script_id, + script_invocation_params=ScriptInvocationParams(params=params), + async_req=False, + _preload_content=False, + _return_http_data_only=False) + return self._to_tables(response, query_options=None, response_metadata_mode=FluxResponseMetadataMode.only_names) + + def invoke_script_stream(self, script_id: str, params: dict = None) -> Generator['FluxRecord', Any, None]: + """ + Invoke synchronously a script and return result as a Generator['FluxRecord']. + + The bind parameters referenced in the script are substitutes with `params` key-values sent in the request body. + + :param str script_id: The ID of the script to invoke. (required) + :param params: bind parameters + :return: Stream of FluxRecord. + :rtype: Generator['FluxRecord'] + """ + response = self._invokable_scripts_service \ + .post_scripts_id_invoke(script_id=script_id, + script_invocation_params=ScriptInvocationParams(params=params), + async_req=False, + _preload_content=False, + _return_http_data_only=False) + + return self._to_flux_record_stream(response, query_options=None, + response_metadata_mode=FluxResponseMetadataMode.only_names) + + def invoke_script_data_frame(self, script_id: str, params: dict = None, data_frame_index: List[str] = None): + """ + Invoke synchronously a script and return Pandas DataFrame. + + The bind parameters referenced in the script are substitutes with `params` key-values sent in the request body. + + .. note:: If the ``script`` returns tables with differing schemas than the client generates a :class:`~DataFrame` for each of them. + + :param str script_id: The ID of the script to invoke. (required) + :param List[str] data_frame_index: The list of columns that are used as DataFrame index. + :param params: bind parameters + :return: :class:`~DataFrame` or :class:`~List[DataFrame]` + + .. warning:: For the optimal processing of the query results use the ``pivot() function`` which align results as a table. + + .. code-block:: text + + from(bucket:"my-bucket") + |> range(start: -5m, stop: now()) + |> filter(fn: (r) => r._measurement == "mem") + |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") + + For more info see: + - https://docs.influxdata.com/resources/videos/pivots-in-flux/ + - https://docs.influxdata.com/flux/latest/stdlib/universe/pivot/ + - https://docs.influxdata.com/flux/latest/stdlib/influxdata/influxdb/schema/fieldsascols/ + """ # noqa: E501 + _generator = self.invoke_script_data_frame_stream(script_id=script_id, + params=params, + data_frame_index=data_frame_index) + return self._to_data_frames(_generator) + + def invoke_script_data_frame_stream(self, script_id: str, params: dict = None, data_frame_index: List[str] = None): + """ + Invoke synchronously a script and return stream of Pandas DataFrame as a Generator['pd.DataFrame']. + + The bind parameters referenced in the script are substitutes with `params` key-values sent in the request body. + + .. note:: If the ``script`` returns tables with differing schemas than the client generates a :class:`~DataFrame` for each of them. + + :param str script_id: The ID of the script to invoke. (required) + :param List[str] data_frame_index: The list of columns that are used as DataFrame index. + :param params: bind parameters + :return: :class:`~Generator[DataFrame]` + + .. warning:: For the optimal processing of the query results use the ``pivot() function`` which align results as a table. + + .. code-block:: text + + from(bucket:"my-bucket") + |> range(start: -5m, stop: now()) + |> filter(fn: (r) => r._measurement == "mem") + |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") + + For more info see: + - https://docs.influxdata.com/resources/videos/pivots-in-flux/ + - https://docs.influxdata.com/flux/latest/stdlib/universe/pivot/ + - https://docs.influxdata.com/flux/latest/stdlib/influxdata/influxdb/schema/fieldsascols/ + """ # noqa: E501 + response = self._invokable_scripts_service \ + .post_scripts_id_invoke(script_id=script_id, + script_invocation_params=ScriptInvocationParams(params=params), + async_req=False, + _preload_content=False, + _return_http_data_only=False) + + return self._to_data_frame_stream(data_frame_index, response, query_options=None, + response_metadata_mode=FluxResponseMetadataMode.only_names) + + def invoke_script_csv(self, script_id: str, params: dict = None) -> CSVIterator: + """ + Invoke synchronously a script and return result as a CSV iterator. Each iteration returns a row of the CSV file. + + The bind parameters referenced in the script are substitutes with `params` key-values sent in the request body. + + :param str script_id: The ID of the script to invoke. (required) + :param params: bind parameters + :return: :class:`~Iterator[List[str]]` wrapped into :class:`~influxdb_client.client.flux_table.CSVIterator` + :rtype: CSVIterator + + Serialization the query results to flattened list of values via :func:`~influxdb_client.client.flux_table.CSVIterator.to_values`: + + .. code-block:: python + + from influxdb_client import InfluxDBClient + + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + + # Query: using CSV iterator + csv_iterator = client.invokable_scripts_api().invoke_script_csv(script_id="script-id") + + # Serialize to values + output = csv_iterator.to_values() + print(output) + + .. code-block:: python + + [ + ['', 'result', 'table', '_start', '_stop', '_time', '_value', '_field', '_measurement', 'location'] + ['', '', '0', '2022-06-16', '2022-06-16', '2022-06-16', '24.3', 'temperature', 'my_measurement', 'New York'] + ['', '', '1', '2022-06-16', '2022-06-16', '2022-06-16', '25.3', 'temperature', 'my_measurement', 'Prague'] + ... + ] + + """ # noqa: E501 + response = self._invokable_scripts_service \ + .post_scripts_id_invoke(script_id=script_id, + script_invocation_params=ScriptInvocationParams(params=params), + async_req=False, + _preload_content=False) + + return self._to_csv(response) + + def invoke_script_raw(self, script_id: str, params: dict = None) -> Iterator[List[str]]: + """ + Invoke synchronously a script and return result as raw unprocessed result as a str. + + The bind parameters referenced in the script are substitutes with `params` key-values sent in the request body. + + :param str script_id: The ID of the script to invoke. (required) + :param params: bind parameters + :return: Result as a str. + """ + response = self._invokable_scripts_service \ + .post_scripts_id_invoke(script_id=script_id, + script_invocation_params=ScriptInvocationParams(params=params), + async_req=False, + _preload_content=True) + + return response diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/labels_api.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/labels_api.py new file mode 100644 index 0000000..006cb90 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/labels_api.py @@ -0,0 +1,96 @@ +"""Labels are a way to add visual metadata to dashboards, tasks, and other items in the InfluxDB UI.""" + +from typing import List, Dict, Union + +from influxdb_client import LabelsService, LabelCreateRequest, Label, LabelUpdate + + +class LabelsApi(object): + """Implementation for '/api/v2/labels' endpoint.""" + + def __init__(self, influxdb_client): + """Initialize defaults.""" + self._influxdb_client = influxdb_client + self._service = LabelsService(influxdb_client.api_client) + + def create_label(self, name: str, org_id: str, properties: Dict[str, str] = None) -> Label: + """ + Create a new label. + + :param name: label name + :param org_id: organization id + :param properties: optional label properties + :return: created label + """ + label_request = LabelCreateRequest(org_id=org_id, name=name, properties=properties) + return self._service.post_labels(label_create_request=label_request).label + + def update_label(self, label: Label): + """ + Update an existing label name and properties. + + :param label: label + :return: the updated label + """ + label_update = LabelUpdate() + label_update.properties = label.properties + label_update.name = label.name + return self._service.patch_labels_id(label_id=label.id, label_update=label_update).label + + def delete_label(self, label: Union[str, Label]): + """ + Delete the label. + + :param label: label id or Label + """ + label_id = None + + if isinstance(label, str): + label_id = label + + if isinstance(label, Label): + label_id = label.id + + return self._service.delete_labels_id(label_id=label_id) + + def clone_label(self, cloned_name: str, label: Label) -> Label: + """ + Create the new instance of the label as a copy existing label. + + :param cloned_name: new label name + :param label: existing label + :return: clonned Label + """ + cloned_properties = None + if label.properties is not None: + cloned_properties = label.properties.copy() + + return self.create_label(name=cloned_name, properties=cloned_properties, org_id=label.org_id) + + def find_labels(self, **kwargs) -> List['Label']: + """ + Get all available labels. + + :key str org_id: The organization ID. + + :return: labels + """ + return self._service.get_labels(**kwargs).labels + + def find_label_by_id(self, label_id: str): + """ + Retrieve the label by id. + + :param label_id: + :return: Label + """ + return self._service.get_labels_id(label_id=label_id).label + + def find_label_by_org(self, org_id) -> List['Label']: + """ + Get the list of all labels for given organization. + + :param org_id: organization id + :return: list of labels + """ + return self._service.get_labels(org_id=org_id).labels diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/logging_handler.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/logging_handler.py new file mode 100644 index 0000000..445a828 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/logging_handler.py @@ -0,0 +1,64 @@ +"""Use the influxdb_client with python native logging.""" +import logging + +from influxdb_client import InfluxDBClient + + +class InfluxLoggingHandler(logging.Handler): + """ + InfluxLoggingHandler instances dispatch logging events to influx. + + There is no need to set a Formatter. + The raw input will be passed on to the influx write api. + """ + + DEFAULT_LOG_RECORD_KEYS = list(logging.makeLogRecord({}).__dict__.keys()) + ['message'] + + def __init__(self, *, url, token, org, bucket, client_args=None, write_api_args=None): + """ + Initialize defaults. + + The arguments `client_args` and `write_api_args` can be dicts of kwargs. + They are passed on to the InfluxDBClient and write_api calls respectively. + """ + super().__init__() + + self.bucket = bucket + + client_args = {} if client_args is None else client_args + self.client = InfluxDBClient(url=url, token=token, org=org, **client_args) + + write_api_args = {} if write_api_args is None else write_api_args + self.write_api = self.client.write_api(**write_api_args) + + def __del__(self): + """Make sure all resources are closed.""" + self.close() + + def close(self) -> None: + """Close the write_api, client and logger.""" + self.write_api.close() + self.client.close() + super().close() + + def emit(self, record: logging.LogRecord) -> None: + """Emit a record via the influxDB WriteApi.""" + try: + message = self.format(record) + extra = self._get_extra_values(record) + return self.write_api.write(record=message, **extra) + except (KeyboardInterrupt, SystemExit): + raise + except (Exception,): + self.handleError(record) + + def _get_extra_values(self, record: logging.LogRecord) -> dict: + """ + Extract all items from the record that were injected via extra. + + Example: `logging.debug(msg, extra={key: value, ...})`. + """ + extra = {'bucket': self.bucket} + extra.update({key: value for key, value in record.__dict__.items() + if key not in self.DEFAULT_LOG_RECORD_KEYS}) + return extra diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/organizations_api.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/organizations_api.py new file mode 100644 index 0000000..e25c698 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/organizations_api.py @@ -0,0 +1,60 @@ +""" +An organization is a workspace for a group of users. + +All dashboards, tasks, buckets, members, etc., belong to an organization. +""" + +from influxdb_client import OrganizationsService, UsersService, Organization, PatchOrganizationRequest + + +class OrganizationsApi(object): + """Implementation for '/api/v2/orgs' endpoint.""" + + def __init__(self, influxdb_client): + """Initialize defaults.""" + self._influxdb_client = influxdb_client + self._organizations_service = OrganizationsService(influxdb_client.api_client) + self._users_service = UsersService(influxdb_client.api_client) + + def me(self): + """Return the current authenticated user.""" + user = self._users_service.get_me() + return user + + def find_organization(self, org_id): + """Retrieve an organization.""" + return self._organizations_service.get_orgs_id(org_id=org_id) + + def find_organizations(self, **kwargs): + """ + List all organizations. + + :key int offset: Offset for pagination + :key int limit: Limit for pagination + :key bool descending: + :key str org: Filter organizations to a specific organization name. + :key str org_id: Filter organizations to a specific organization ID. + :key str user_id: Filter organizations to a specific user ID. + """ + return self._organizations_service.get_orgs(**kwargs).orgs + + def create_organization(self, name: str = None, organization: Organization = None) -> Organization: + """Create an organization.""" + if organization is None: + organization = Organization(name=name) + return self._organizations_service.post_orgs(post_organization_request=organization) + + def update_organization(self, organization: Organization) -> Organization: + """Update an organization. + + :param organization: Organization update to apply (required) + :return: Organization + """ + request = PatchOrganizationRequest(name=organization.name, + description=organization.description) + + return self._organizations_service.patch_orgs_id(org_id=organization.id, patch_organization_request=request) + + def delete_organization(self, org_id: str): + """Delete an organization.""" + return self._organizations_service.delete_orgs_id(org_id=org_id) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/query_api.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/query_api.py new file mode 100644 index 0000000..8611021 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/query_api.py @@ -0,0 +1,310 @@ +""" +Querying InfluxDB by FluxLang. + +Flux is InfluxData’s functional data scripting language designed for querying, analyzing, and acting on data. +""" + +from typing import List, Generator, Any, Callable + +from influxdb_client import Dialect +from influxdb_client.client._base import _BaseQueryApi +from influxdb_client.client.flux_table import FluxRecord, TableList, CSVIterator + + +class QueryOptions(object): + """Query options.""" + + def __init__(self, profilers: List[str] = None, profiler_callback: Callable = None) -> None: + """ + Initialize query options. + + :param profilers: list of enabled flux profilers + :param profiler_callback: callback function return profilers (FluxRecord) + """ + self.profilers = profilers + self.profiler_callback = profiler_callback + + +class QueryApi(_BaseQueryApi): + """Implementation for '/api/v2/query' endpoint.""" + + def __init__(self, influxdb_client, query_options=QueryOptions()): + """ + Initialize query client. + + :param influxdb_client: influxdb client + """ + super().__init__(influxdb_client=influxdb_client, query_options=query_options) + + def query_csv(self, query: str, org=None, dialect: Dialect = _BaseQueryApi.default_dialect, params: dict = None) \ + -> CSVIterator: + """ + Execute the Flux query and return results as a CSV iterator. Each iteration returns a row of the CSV file. + + :param query: a Flux query + :param str, Organization org: specifies the organization for executing the query; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClient.org`` is used. + :param dialect: csv dialect format + :param params: bind parameters + :return: :class:`~Iterator[List[str]]` wrapped into :class:`~influxdb_client.client.flux_table.CSVIterator` + :rtype: CSVIterator + + Serialization the query results to flattened list of values via :func:`~influxdb_client.client.flux_table.CSVIterator.to_values`: + + .. code-block:: python + + from influxdb_client import InfluxDBClient + + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + + # Query: using CSV iterator + csv_iterator = client.query_api().query_csv('from(bucket:"my-bucket") |> range(start: -10m)') + + # Serialize to values + output = csv_iterator.to_values() + print(output) + + .. code-block:: python + + [ + ['#datatype', 'string', 'long', 'dateTime:RFC3339', 'dateTime:RFC3339', 'dateTime:RFC3339', 'double', 'string', 'string', 'string'] + ['#group', 'false', 'false', 'true', 'true', 'false', 'false', 'true', 'true', 'true'] + ['#default', '_result', '', '', '', '', '', '', '', ''] + ['', 'result', 'table', '_start', '_stop', '_time', '_value', '_field', '_measurement', 'location'] + ['', '', '0', '2022-06-16', '2022-06-16', '2022-06-16', '24.3', 'temperature', 'my_measurement', 'New York'] + ['', '', '1', '2022-06-16', '2022-06-16', '2022-06-16', '25.3', 'temperature', 'my_measurement', 'Prague'] + ... + ] + + If you would like to turn off `Annotated CSV header's `_ you can use following code: + + .. code-block:: python + + from influxdb_client import InfluxDBClient, Dialect + + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + + # Query: using CSV iterator + csv_iterator = client.query_api().query_csv('from(bucket:"my-bucket") |> range(start: -10m)', + dialect=Dialect(header=False, annotations=[])) + + for csv_line in csv_iterator: + print(csv_line) + + .. code-block:: python + + [ + ['', '_result', '0', '2022-06-16', '2022-06-16', '2022-06-16', '24.3', 'temperature', 'my_measurement', 'New York'] + ['', '_result', '1', '2022-06-16', '2022-06-16', '2022-06-16', '25.3', 'temperature', 'my_measurement', 'Prague'] + ... + ] + """ # noqa: E501 + org = self._org_param(org) + response = self._query_api.post_query(org=org, query=self._create_query(query, dialect, params), + async_req=False, _preload_content=False) + + return self._to_csv(response) + + def query_raw(self, query: str, org=None, dialect=_BaseQueryApi.default_dialect, params: dict = None): + """ + Execute synchronous Flux query and return result as raw unprocessed result as a str. + + :param query: a Flux query + :param str, Organization org: specifies the organization for executing the query; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClient.org`` is used. + :param dialect: csv dialect format + :param params: bind parameters + :return: str + """ + org = self._org_param(org) + result = self._query_api.post_query(org=org, query=self._create_query(query, dialect, params), async_req=False, + _preload_content=False) + + return result + + def query(self, query: str, org=None, params: dict = None) -> TableList: + """Execute synchronous Flux query and return result as a :class:`~influxdb_client.client.flux_table.FluxTable` list. + + :param query: the Flux query + :param str, Organization org: specifies the organization for executing the query; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClient.org`` is used. + :param params: bind parameters + :return: :class:`~influxdb_client.client.flux_table.FluxTable` list wrapped into + :class:`~influxdb_client.client.flux_table.TableList` + :rtype: TableList + + Serialization the query results to flattened list of values via :func:`~influxdb_client.client.flux_table.TableList.to_values`: + + .. code-block:: python + + from influxdb_client import InfluxDBClient + + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + + # Query: using Table structure + tables = client.query_api().query('from(bucket:"my-bucket") |> range(start: -10m)') + + # Serialize to values + output = tables.to_values(columns=['location', '_time', '_value']) + print(output) + + .. code-block:: python + + [ + ['New York', datetime.datetime(2022, 6, 7, 11, 3, 22, 917593, tzinfo=tzutc()), 24.3], + ['Prague', datetime.datetime(2022, 6, 7, 11, 3, 22, 917593, tzinfo=tzutc()), 25.3], + ... + ] + + Serialization the query results to JSON via :func:`~influxdb_client.client.flux_table.TableList.to_json`: + + .. code-block:: python + + from influxdb_client import InfluxDBClient + + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + + # Query: using Table structure + tables = client.query_api().query('from(bucket:"my-bucket") |> range(start: -10m)') + + # Serialize to JSON + output = tables.to_json(indent=5) + print(output) + + .. code-block:: javascript + + [ + { + "_measurement": "mem", + "_start": "2021-06-23T06:50:11.897825+00:00", + "_stop": "2021-06-25T06:50:11.897825+00:00", + "_time": "2020-02-27T16:20:00.897825+00:00", + "region": "north", + "_field": "usage", + "_value": 15 + }, + { + "_measurement": "mem", + "_start": "2021-06-23T06:50:11.897825+00:00", + "_stop": "2021-06-25T06:50:11.897825+00:00", + "_time": "2020-02-27T16:20:01.897825+00:00", + "region": "west", + "_field": "usage", + "_value": 10 + }, + ... + ] + """ # noqa: E501 + org = self._org_param(org) + + response = self._query_api.post_query(org=org, query=self._create_query(query, self.default_dialect, params), + async_req=False, _preload_content=False, _return_http_data_only=False) + + return self._to_tables(response, query_options=self._get_query_options()) + + def query_stream(self, query: str, org=None, params: dict = None) -> Generator['FluxRecord', Any, None]: + """ + Execute synchronous Flux query and return stream of FluxRecord as a Generator['FluxRecord']. + + :param query: the Flux query + :param str, Organization org: specifies the organization for executing the query; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClient.org`` is used. + :param params: bind parameters + :return: Generator['FluxRecord'] + """ + org = self._org_param(org) + + response = self._query_api.post_query(org=org, query=self._create_query(query, self.default_dialect, params), + async_req=False, _preload_content=False, _return_http_data_only=False) + return self._to_flux_record_stream(response, query_options=self._get_query_options()) + + def query_data_frame(self, query: str, org=None, data_frame_index: List[str] = None, params: dict = None, + use_extension_dtypes: bool = False): + """ + Execute synchronous Flux query and return Pandas DataFrame. + + .. note:: If the ``query`` returns tables with differing schemas than the client generates a :class:`~DataFrame` for each of them. + + :param query: the Flux query + :param str, Organization org: specifies the organization for executing the query; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClient.org`` is used. + :param data_frame_index: the list of columns that are used as DataFrame index + :param params: bind parameters + :param use_extension_dtypes: set to ``True`` to use panda's extension data types. + Useful for queries with ``pivot`` function. + When data has missing values, column data type may change (to ``object`` or ``float64``). + Nullable extension types (``Int64``, ``Float64``, ``boolean``) support ``panda.NA`` value. + For more info, see https://pandas.pydata.org/docs/user_guide/missing_data.html. + :return: :class:`~DataFrame` or :class:`~List[DataFrame]` + + .. warning:: For the optimal processing of the query results use the ``pivot() function`` which align results as a table. + + .. code-block:: text + + from(bucket:"my-bucket") + |> range(start: -5m, stop: now()) + |> filter(fn: (r) => r._measurement == "mem") + |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") + + For more info see: + - https://docs.influxdata.com/resources/videos/pivots-in-flux/ + - https://docs.influxdata.com/flux/latest/stdlib/universe/pivot/ + - https://docs.influxdata.com/flux/latest/stdlib/influxdata/influxdb/schema/fieldsascols/ + """ # noqa: E501 + _generator = self.query_data_frame_stream(query, org=org, data_frame_index=data_frame_index, params=params, + use_extension_dtypes=use_extension_dtypes) + return self._to_data_frames(_generator) + + def query_data_frame_stream(self, query: str, org=None, data_frame_index: List[str] = None, params: dict = None, + use_extension_dtypes: bool = False): + """ + Execute synchronous Flux query and return stream of Pandas DataFrame as a :class:`~Generator[DataFrame]`. + + .. note:: If the ``query`` returns tables with differing schemas than the client generates a :class:`~DataFrame` for each of them. + + :param query: the Flux query + :param str, Organization org: specifies the organization for executing the query; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClient.org`` is used. + :param data_frame_index: the list of columns that are used as DataFrame index + :param params: bind parameters + :param use_extension_dtypes: set to ``True`` to use panda's extension data types. + Useful for queries with ``pivot`` function. + When data has missing values, column data type may change (to ``object`` or ``float64``). + Nullable extension types (``Int64``, ``Float64``, ``boolean``) support ``panda.NA`` value. + For more info, see https://pandas.pydata.org/docs/user_guide/missing_data.html. + :return: :class:`~Generator[DataFrame]` + + .. warning:: For the optimal processing of the query results use the ``pivot() function`` which align results as a table. + + .. code-block:: text + + from(bucket:"my-bucket") + |> range(start: -5m, stop: now()) + |> filter(fn: (r) => r._measurement == "mem") + |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") + + For more info see: + - https://docs.influxdata.com/resources/videos/pivots-in-flux/ + - https://docs.influxdata.com/flux/latest/stdlib/universe/pivot/ + - https://docs.influxdata.com/flux/latest/stdlib/influxdata/influxdb/schema/fieldsascols/ + """ # noqa: E501 + org = self._org_param(org) + + response = self._query_api.post_query(org=org, query=self._create_query(query, self.default_dialect, params, + dataframe_query=True), + async_req=False, _preload_content=False, _return_http_data_only=False) + + return self._to_data_frame_stream(data_frame_index=data_frame_index, + response=response, + query_options=self._get_query_options(), + use_extension_dtypes=use_extension_dtypes) + + def __del__(self): + """Close QueryAPI.""" + pass diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/query_api_async.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/query_api_async.py new file mode 100644 index 0000000..b3b42cb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/query_api_async.py @@ -0,0 +1,236 @@ +""" +Querying InfluxDB by FluxLang. + +Flux is InfluxData’s functional data scripting language designed for querying, analyzing, and acting on data. +""" +from typing import List, AsyncGenerator + +from influxdb_client.client._base import _BaseQueryApi +from influxdb_client.client.flux_table import FluxRecord, TableList +from influxdb_client.client.query_api import QueryOptions +from influxdb_client.rest import _UTF_8_encoding, ApiException +from .._async.rest import RESTResponseAsync + + +class QueryApiAsync(_BaseQueryApi): + """Asynchronous implementation for '/api/v2/query' endpoint.""" + + def __init__(self, influxdb_client, query_options=QueryOptions()): + """ + Initialize query client. + + :param influxdb_client: influxdb client + """ + super().__init__(influxdb_client=influxdb_client, query_options=query_options) + + async def query(self, query: str, org=None, params: dict = None) -> TableList: + """ + Execute asynchronous Flux query and return result as a :class:`~influxdb_client.client.flux_table.FluxTable` list. + + :param query: the Flux query + :param str, Organization org: specifies the organization for executing the query; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClientAsync.org`` is used. + :param params: bind parameters + :return: :class:`~influxdb_client.client.flux_table.FluxTable` list wrapped into + :class:`~influxdb_client.client.flux_table.TableList` + :rtype: TableList + + Serialization the query results to flattened list of values via :func:`~influxdb_client.client.flux_table.TableList.to_values`: + + .. code-block:: python + + from influxdb_client import InfluxDBClient + + async with InfluxDBClientAsync(url="http://localhost:8086", token="my-token", org="my-org") as client: + + # Query: using Table structure + tables = await client.query_api().query('from(bucket:"my-bucket") |> range(start: -10m)') + + # Serialize to values + output = tables.to_values(columns=['location', '_time', '_value']) + print(output) + + .. code-block:: python + + [ + ['New York', datetime.datetime(2022, 6, 7, 11, 3, 22, 917593, tzinfo=tzutc()), 24.3], + ['Prague', datetime.datetime(2022, 6, 7, 11, 3, 22, 917593, tzinfo=tzutc()), 25.3], + ... + ] + + Serialization the query results to JSON via :func:`~influxdb_client.client.flux_table.TableList.to_json`: + + .. code-block:: python + + from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync + + async with InfluxDBClientAsync(url="http://localhost:8086", token="my-token", org="my-org") as client: + # Query: using Table structure + tables = await client.query_api().query('from(bucket:"my-bucket") |> range(start: -10m)') + + # Serialize to JSON + output = tables.to_json(indent=5) + print(output) + + .. code-block:: javascript + + [ + { + "_measurement": "mem", + "_start": "2021-06-23T06:50:11.897825+00:00", + "_stop": "2021-06-25T06:50:11.897825+00:00", + "_time": "2020-02-27T16:20:00.897825+00:00", + "region": "north", + "_field": "usage", + "_value": 15 + }, + { + "_measurement": "mem", + "_start": "2021-06-23T06:50:11.897825+00:00", + "_stop": "2021-06-25T06:50:11.897825+00:00", + "_time": "2020-02-27T16:20:01.897825+00:00", + "region": "west", + "_field": "usage", + "_value": 10 + }, + ... + ] + """ # noqa: E501 + org = self._org_param(org) + + response = await self._post_query(org=org, query=self._create_query(query, self.default_dialect, params)) + + return await self._to_tables_async(response, query_options=self._get_query_options()) + + async def query_stream(self, query: str, org=None, params: dict = None) -> AsyncGenerator['FluxRecord', None]: + """ + Execute asynchronous Flux query and return stream of :class:`~influxdb_client.client.flux_table.FluxRecord` as an AsyncGenerator[:class:`~influxdb_client.client.flux_table.FluxRecord`]. + + :param query: the Flux query + :param str, Organization org: specifies the organization for executing the query; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClientAsync.org`` is used. + :param params: bind parameters + :return: AsyncGenerator[:class:`~influxdb_client.client.flux_table.FluxRecord`] + """ # noqa: E501 + org = self._org_param(org) + + response = await self._post_query(org=org, query=self._create_query(query, self.default_dialect, params)) + + return await self._to_flux_record_stream_async(response, query_options=self._get_query_options()) + + async def query_data_frame(self, query: str, org=None, data_frame_index: List[str] = None, params: dict = None, + use_extension_dtypes: bool = False): + """ + Execute asynchronous Flux query and return :class:`~pandas.core.frame.DataFrame`. + + .. note:: If the ``query`` returns tables with differing schemas than the client generates a :class:`~DataFrame` for each of them. + + :param query: the Flux query + :param str, Organization org: specifies the organization for executing the query; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClientAsync.org`` is used. + :param data_frame_index: the list of columns that are used as DataFrame index + :param params: bind parameters + :param use_extension_dtypes: set to ``True`` to use panda's extension data types. + Useful for queries with ``pivot`` function. + When data has missing values, column data type may change (to ``object`` or ``float64``). + Nullable extension types (``Int64``, ``Float64``, ``boolean``) support ``panda.NA`` value. + For more info, see https://pandas.pydata.org/docs/user_guide/missing_data.html. + :return: :class:`~DataFrame` or :class:`~List[DataFrame]` + + .. warning:: For the optimal processing of the query results use the ``pivot() function`` which align results as a table. + + .. code-block:: text + + from(bucket:"my-bucket") + |> range(start: -5m, stop: now()) + |> filter(fn: (r) => r._measurement == "mem") + |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") + + For more info see: + - https://docs.influxdata.com/resources/videos/pivots-in-flux/ + - https://docs.influxdata.com/flux/latest/stdlib/universe/pivot/ + - https://docs.influxdata.com/flux/latest/stdlib/influxdata/influxdb/schema/fieldsascols/ + """ # noqa: E501 + _generator = await self.query_data_frame_stream(query, org=org, data_frame_index=data_frame_index, + params=params, use_extension_dtypes=use_extension_dtypes) + + dataframes = [] + async for dataframe in _generator: + dataframes.append(dataframe) + + return self._to_data_frames(dataframes) + + async def query_data_frame_stream(self, query: str, org=None, data_frame_index: List[str] = None, + params: dict = None, use_extension_dtypes: bool = False): + """ + Execute asynchronous Flux query and return stream of :class:`~pandas.core.frame.DataFrame` as an AsyncGenerator[:class:`~pandas.core.frame.DataFrame`]. + + .. note:: If the ``query`` returns tables with differing schemas than the client generates a :class:`~DataFrame` for each of them. + + :param query: the Flux query + :param str, Organization org: specifies the organization for executing the query; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClientAsync.org`` is used. + :param data_frame_index: the list of columns that are used as DataFrame index + :param params: bind parameters + :param use_extension_dtypes: set to ``True`` to use panda's extension data types. + Useful for queries with ``pivot`` function. + When data has missing values, column data type may change (to ``object`` or ``float64``). + Nullable extension types (``Int64``, ``Float64``, ``boolean``) support ``panda.NA`` value. + For more info, see https://pandas.pydata.org/docs/user_guide/missing_data.html. + :return: :class:`AsyncGenerator[:class:`DataFrame`]` + + .. warning:: For the optimal processing of the query results use the ``pivot() function`` which align results as a table. + + .. code-block:: text + + from(bucket:"my-bucket") + |> range(start: -5m, stop: now()) + |> filter(fn: (r) => r._measurement == "mem") + |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") + + For more info see: + - https://docs.influxdata.com/resources/videos/pivots-in-flux/ + - https://docs.influxdata.com/flux/latest/stdlib/universe/pivot/ + - https://docs.influxdata.com/flux/latest/stdlib/influxdata/influxdb/schema/fieldsascols/ + """ # noqa: E501 + org = self._org_param(org) + + response = await self._post_query(org=org, query=self._create_query(query, self.default_dialect, params, + dataframe_query=True)) + + return await self._to_data_frame_stream_async(data_frame_index=data_frame_index, response=response, + query_options=self._get_query_options(), + use_extension_dtypes=use_extension_dtypes) + + async def query_raw(self, query: str, org=None, dialect=_BaseQueryApi.default_dialect, params: dict = None): + """ + Execute asynchronous Flux query and return result as raw unprocessed result as a str. + + :param query: a Flux query + :param str, Organization org: specifies the organization for executing the query; + Take the ``ID``, ``Name`` or ``Organization``. + If not specified the default value from ``InfluxDBClientAsync.org`` is used. + :param dialect: csv dialect format + :param params: bind parameters + :return: :class:`~str` + """ + org = self._org_param(org) + result = await self._post_query(org=org, query=self._create_query(query, dialect, params)) + raw_bytes = await result.read() + return raw_bytes.decode(_UTF_8_encoding) + + async def _post_query(self, org, query): + response = await self._query_api.post_query_async(org=org, + query=query, + async_req=False, + _preload_content=False, + _return_http_data_only=True) + if not 200 <= response.status <= 299: + data = await response.read() + raise ApiException(http_resp=RESTResponseAsync(response, data)) + + return response diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/tasks_api.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/tasks_api.py new file mode 100644 index 0000000..5ca18fb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/tasks_api.py @@ -0,0 +1,226 @@ +""" +Process and analyze your data with tasks in the InfluxDB task engine. + +Use tasks (scheduled Flux queries) to input a data stream and then analyze, modify, and act on the data accordingly. +""" + +import datetime +from typing import List + +from influxdb_client import TasksService, Task, TaskCreateRequest, TaskUpdateRequest, LabelResponse, LabelMapping, \ + AddResourceMemberRequestBody, RunManually, Run, LogEvent +from influxdb_client.client._pages import _Paginated + + +class TasksApi(object): + """Implementation for '/api/v2/tasks' endpoint.""" + + def __init__(self, influxdb_client): + """Initialize defaults.""" + self._influxdb_client = influxdb_client + self._service = TasksService(influxdb_client.api_client) + + def find_task_by_id(self, task_id) -> Task: + """Retrieve a task.""" + task = self._service.get_tasks_id(task_id) + return task + + def find_tasks(self, **kwargs): + """List all tasks up to set limit (max 500). + + :key str name: only returns tasks with the specified name + :key str after: returns tasks after specified ID + :key str user: filter tasks to a specific user ID + :key str org: filter tasks to a specific organization name + :key str org_id: filter tasks to a specific organization ID + :key int limit: the number of tasks to return + :return: Tasks + """ + return self._service.get_tasks(**kwargs).tasks + + def find_tasks_iter(self, **kwargs): + """Iterate over all tasks with pagination. + + :key str name: only returns tasks with the specified name + :key str after: returns tasks after specified ID + :key str user: filter tasks to a specific user ID + :key str org: filter tasks to a specific organization name + :key str org_id: filter tasks to a specific organization ID + :key int limit: the number of tasks in one page + :return: Tasks iterator + """ + return _Paginated(self._service.get_tasks, lambda response: response.tasks).find_iter(**kwargs) + + def create_task(self, task: Task = None, task_create_request: TaskCreateRequest = None) -> Task: + """Create a new task.""" + if task_create_request is not None: + return self._service.post_tasks(task_create_request) + + if task is not None: + request = TaskCreateRequest(flux=task.flux, org_id=task.org_id, org=task.org, description=task.description, + status=task.status) + + return self.create_task(task_create_request=request) + + raise ValueError("task or task_create_request must be not None") + + @staticmethod + def _create_task(name: str, flux: str, every, cron, org_id: str) -> Task: + + task = Task(id=0, name=name, org_id=org_id, status="active", flux=flux) + + repetition = "" + if every is not None: + repetition += "every: " + repetition += every + + if cron is not None: + repetition += "cron: " + repetition += '"' + cron + '"' + + flux_with_options = '{} \n\noption task = {{name: "{}", {}}}'.format(flux, name, repetition) + task.flux = flux_with_options + + return task + + def create_task_every(self, name, flux, every, organization) -> Task: + """Create a new task with every repetition schedule.""" + task = self._create_task(name, flux, every, None, organization.id) + return self.create_task(task) + + def create_task_cron(self, name: str, flux: str, cron: str, org_id: str) -> Task: + """Create a new task with cron repetition schedule.""" + task = self._create_task(name=name, flux=flux, cron=cron, org_id=org_id, every=None) + return self.create_task(task) + + def delete_task(self, task_id: str): + """Delete a task.""" + if task_id is not None: + return self._service.delete_tasks_id(task_id=task_id) + + def update_task(self, task: Task) -> Task: + """Update a task.""" + req = TaskUpdateRequest(flux=task.flux, description=task.description, every=task.every, cron=task.cron, + status=task.status, offset=task.offset) + + return self.update_task_request(task_id=task.id, task_update_request=req) + + def update_task_request(self, task_id, task_update_request: TaskUpdateRequest) -> Task: + """Update a task.""" + return self._service.patch_tasks_id(task_id=task_id, task_update_request=task_update_request) + + def clone_task(self, task: Task) -> Task: + """Clone a task.""" + cloned = Task(id=0, name=task.name, org_id=task.org_id, org=task.org, flux=task.flux, status="active") + + created = self.create_task(cloned) + if task.id: + labels = self.get_labels(task.id) + for label in labels.labels: + self.add_label(label.id, created.id) + return created + + def get_labels(self, task_id): + """List all labels for a task.""" + return self._service.get_tasks_id_labels(task_id=task_id) + + def add_label(self, label_id: str, task_id: str) -> LabelResponse: + """Add a label to a task.""" + label_mapping = LabelMapping(label_id=label_id) + return self._service.post_tasks_id_labels(task_id=task_id, label_mapping=label_mapping) + + def delete_label(self, label_id: str, task_id: str): + """Delete a label from a task.""" + return self._service.delete_tasks_id_labels_id(task_id=task_id, label_id=label_id) + + def get_members(self, task_id: str): + """List all task members.""" + return self._service.get_tasks_id_members(task_id=task_id).users + + def add_member(self, member_id, task_id): + """Add a member to a task.""" + user = AddResourceMemberRequestBody(id=member_id) + return self._service.post_tasks_id_members(task_id=task_id, add_resource_member_request_body=user) + + def delete_member(self, member_id, task_id): + """Remove a member from a task.""" + return self._service.delete_tasks_id_members_id(user_id=member_id, task_id=task_id) + + def get_owners(self, task_id): + """List all owners of a task.""" + return self._service.get_tasks_id_owners(task_id=task_id).users + + def add_owner(self, owner_id, task_id): + """Add an owner to a task.""" + user = AddResourceMemberRequestBody(id=owner_id) + return self._service.post_tasks_id_owners(task_id=task_id, add_resource_member_request_body=user) + + def delete_owner(self, owner_id, task_id): + """Remove an owner from a task.""" + return self._service.delete_tasks_id_owners_id(user_id=owner_id, task_id=task_id) + + def get_runs(self, task_id, **kwargs) -> List['Run']: + """ + Retrieve list of run records for a task. + + :param task_id: task id + :key str after: returns runs after specified ID + :key int limit: the number of runs to return + :key datetime after_time: filter runs to those scheduled after this time, RFC3339 + :key datetime before_time: filter runs to those scheduled before this time, RFC3339 + """ + return self._service.get_tasks_id_runs(task_id=task_id, **kwargs).runs + + def get_run(self, task_id: str, run_id: str) -> Run: + """ + Get run record for specific task and run id. + + :param task_id: task id + :param run_id: run id + :return: Run for specified task and run id + """ + return self._service.get_tasks_id_runs_id(task_id=task_id, run_id=run_id) + + def get_run_logs(self, task_id: str, run_id: str) -> List['LogEvent']: + """Retrieve all logs for a run.""" + return self._service.get_tasks_id_runs_id_logs(task_id=task_id, run_id=run_id).events + + def run_manually(self, task_id: str, scheduled_for: datetime = None): + """ + Manually start a run of the task now overriding the current schedule. + + :param task_id: + :param scheduled_for: planned execution + """ + r = RunManually(scheduled_for=scheduled_for) + return self._service.post_tasks_id_runs(task_id=task_id, run_manually=r) + + def retry_run(self, task_id: str, run_id: str): + """ + Retry a task run. + + :param task_id: task id + :param run_id: run id + """ + return self._service.post_tasks_id_runs_id_retry(task_id=task_id, run_id=run_id) + + def cancel_run(self, task_id: str, run_id: str): + """ + Cancel a currently running run. + + :param task_id: + :param run_id: + """ + return self._service.delete_tasks_id_runs_id(task_id=task_id, run_id=run_id) + + def get_logs(self, task_id: str) -> List['LogEvent']: + """ + Retrieve all logs for a task. + + :param task_id: task id + """ + return self._service.get_tasks_id_logs(task_id=task_id).events + + def find_tasks_by_user(self, task_user_id): + """List all tasks by user.""" + return self.find_tasks(user=task_user_id) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/users_api.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/users_api.py new file mode 100644 index 0000000..dbaca7a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/users_api.py @@ -0,0 +1,80 @@ +""" +Users are those with access to InfluxDB. + +To grant a user permission to access data, add them as a member of an organization +and provide them with an authentication token. +""" + +from typing import Union +from influxdb_client import UsersService, User, Users, UserResponse, PasswordResetBody + + +class UsersApi(object): + """Implementation for '/api/v2/users' endpoint.""" + + def __init__(self, influxdb_client): + """Initialize defaults.""" + self._influxdb_client = influxdb_client + self._service = UsersService(influxdb_client.api_client) + + def me(self) -> User: + """Return the current authenticated user.""" + user = self._service.get_me() + return user + + def create_user(self, name: str) -> User: + """Create a user.""" + user = User(name=name) + + return self._service.post_users(user=user) + + def update_user(self, user: User) -> UserResponse: + """Update a user. + + :param user: User update to apply (required) + :return: User + """ + return self._service.patch_users_id(user_id=user.id, user=user) + + def update_password(self, user: Union[str, User, UserResponse], password: str) -> None: + """Update a password. + + :param user: User to update password (required) + :param password: New password (required) + :return: None + """ + user_id = self._user_id(user) + + return self._service.post_users_id_password(user_id=user_id, password_reset_body=PasswordResetBody(password)) + + def delete_user(self, user: Union[str, User, UserResponse]) -> None: + """Delete a user. + + :param user: user id or User + :return: None + """ + user_id = self._user_id(user) + + return self._service.delete_users_id(user_id=user_id) + + def find_users(self, **kwargs) -> Users: + """List all users. + + :key int offset: The offset for pagination. The number of records to skip. + :key int limit: Limits the number of records returned. Default is `20`. + :key str after: The last resource ID from which to seek from (but not including). + This is to be used instead of `offset`. + :key str name: The user name. + :key str id: The user ID. + :return: Buckets + """ + return self._service.get_users(**kwargs) + + def _user_id(self, user: Union[str, User, UserResponse]): + if isinstance(user, User): + user_id = user.id + elif isinstance(user, UserResponse): + user_id = user.id + else: + user_id = user + return user_id diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__init__.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__init__.py new file mode 100644 index 0000000..f9a8320 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__init__.py @@ -0,0 +1 @@ +"""Utils package.""" diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..21d0976 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/date_utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/date_utils.cpython-312.pyc new file mode 100644 index 0000000..c9100c7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/date_utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/date_utils_pandas.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/date_utils_pandas.cpython-312.pyc new file mode 100644 index 0000000..4033ac5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/date_utils_pandas.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/helpers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/helpers.cpython-312.pyc new file mode 100644 index 0000000..639014f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/helpers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/multiprocessing_helper.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/multiprocessing_helper.cpython-312.pyc new file mode 100644 index 0000000..9e89d97 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/__pycache__/multiprocessing_helper.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/util/date_utils.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/date_utils.py new file mode 100644 index 0000000..7b6750c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/date_utils.py @@ -0,0 +1,101 @@ +"""Utils to get right Date parsing function.""" +import datetime +from sys import version_info +import threading +from datetime import timezone as tz + +from dateutil import parser + +date_helper = None + +lock_ = threading.Lock() + + +class DateHelper: + """ + DateHelper to groups different implementations of date operations. + + If you would like to serialize the query results to custom timezone, you can use following code: + + .. code-block:: python + + from influxdb_client.client.util import date_utils + from influxdb_client.client.util.date_utils import DateHelper + import dateutil.parser + from dateutil import tz + + def parse_date(date_string: str): + return dateutil.parser.parse(date_string).astimezone(tz.gettz('ETC/GMT+2')) + + date_utils.date_helper = DateHelper() + date_utils.date_helper.parse_date = parse_date + """ + + def __init__(self, timezone: datetime.tzinfo = tz.utc) -> None: + """ + Initialize defaults. + + :param timezone: Default timezone used for serialization "datetime" without "tzinfo". + Default value is "UTC". + """ + self.timezone = timezone + + def parse_date(self, date_string: str): + """ + Parse string into Date or Timestamp. + + :return: Returns a :class:`datetime.datetime` object or compliant implementation + like :class:`class 'pandas._libs.tslibs.timestamps.Timestamp` + """ + pass + + def to_nanoseconds(self, delta): + """ + Get number of nanoseconds in timedelta. + + Solution comes from v1 client. Thx. + https://github.com/influxdata/influxdb-python/pull/811 + """ + nanoseconds_in_days = delta.days * 86400 * 10 ** 9 + nanoseconds_in_seconds = delta.seconds * 10 ** 9 + nanoseconds_in_micros = delta.microseconds * 10 ** 3 + + return nanoseconds_in_days + nanoseconds_in_seconds + nanoseconds_in_micros + + def to_utc(self, value: datetime): + """ + Convert datetime to UTC timezone. + + :param value: datetime + :return: datetime in UTC + """ + if not value.tzinfo: + return self.to_utc(value.replace(tzinfo=self.timezone)) + else: + return value.astimezone(tz.utc) + + +def get_date_helper() -> DateHelper: + """ + Return DateHelper with proper implementation. + + If there is a 'ciso8601' than use 'ciso8601.parse_datetime' else + use 'datetime.fromisoformat' (Python >= 3.11) or 'dateutil.parse' (Python < 3.11). + """ + global date_helper + if date_helper is None: + with lock_: + # avoid duplicate initialization + if date_helper is None: + _date_helper = DateHelper() + try: + import ciso8601 + _date_helper.parse_date = ciso8601.parse_datetime + except ModuleNotFoundError: + if (version_info.major, version_info.minor) >= (3, 11): + _date_helper.parse_date = datetime.datetime.fromisoformat + else: + _date_helper.parse_date = parser.parse + date_helper = _date_helper + + return date_helper diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/util/date_utils_pandas.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/date_utils_pandas.py new file mode 100644 index 0000000..9a87c6a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/date_utils_pandas.py @@ -0,0 +1,15 @@ +"""Pandas date utils.""" +from influxdb_client.client.util.date_utils import DateHelper +from influxdb_client.extras import pd + + +class PandasDateTimeHelper(DateHelper): + """DateHelper that use Pandas library with nanosecond precision.""" + + def parse_date(self, date_string: str): + """Parse date string into `class 'pandas._libs.tslibs.timestamps.Timestamp`.""" + return pd.to_datetime(date_string) + + def to_nanoseconds(self, delta): + """Get number of nanoseconds with nanos precision.""" + return super().to_nanoseconds(delta) + (delta.nanoseconds if hasattr(delta, 'nanoseconds') else 0) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/util/helpers.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/helpers.py new file mode 100644 index 0000000..e1fdce9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/helpers.py @@ -0,0 +1,50 @@ +"""Functions to share utility across client classes.""" +from influxdb_client.rest import ApiException + + +def _is_id(value): + """ + Check if the value is valid InfluxDB ID. + + :param value: to check + :return: True if provided parameter is valid InfluxDB ID. + """ + if value and len(value) == 16: + try: + int(value, 16) + return True + except ValueError: + return False + return False + + +def get_org_query_param(org, client, required_id=False): + """ + Get required type of Org query parameter. + + :param str, Organization org: value provided as a parameter into API (optional) + :param InfluxDBClient client: with default value for Org parameter + :param bool required_id: true if the query param has to be a ID + :return: request type of org query parameter or None + """ + _org = client.org if org is None else org + if 'Organization' in type(_org).__name__: + _org = _org.id + if required_id and _org and not _is_id(_org): + try: + organizations = client.organizations_api().find_organizations(org=_org) + if len(organizations) < 1: + from influxdb_client.client.exceptions import InfluxDBError + message = f"The client cannot find organization with name: '{_org}' " \ + "to determine their ID. Are you using token with sufficient permission?" + raise InfluxDBError(response=None, message=message) + return organizations[0].id + except ApiException as e: + if e.status == 404: + from influxdb_client.client.exceptions import InfluxDBError + message = f"The client cannot find organization with name: '{_org}' " \ + "to determine their ID." + raise InfluxDBError(response=None, message=message) + raise e + + return _org diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/util/multiprocessing_helper.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/multiprocessing_helper.py new file mode 100644 index 0000000..41530a2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/util/multiprocessing_helper.py @@ -0,0 +1,205 @@ +""" +Helpers classes to make easier use the client in multiprocessing environment. + +For more information how the multiprocessing works see Python's +`reference docs `_. +""" +import logging +import multiprocessing + +from influxdb_client import InfluxDBClient, WriteOptions +from influxdb_client.client.exceptions import InfluxDBError + +logger = logging.getLogger('influxdb_client.client.util.multiprocessing_helper') + + +def _success_callback(conf: (str, str, str), data: str): + """Successfully writen batch.""" + logger.debug(f"Written batch: {conf}, data: {data}") + + +def _error_callback(conf: (str, str, str), data: str, exception: InfluxDBError): + """Unsuccessfully writen batch.""" + logger.debug(f"Cannot write batch: {conf}, data: {data} due: {exception}") + + +def _retry_callback(conf: (str, str, str), data: str, exception: InfluxDBError): + """Retryable error.""" + logger.debug(f"Retryable error occurs for batch: {conf}, data: {data} retry: {exception}") + + +class _PoisonPill: + """To notify process to terminate.""" + + pass + + +class MultiprocessingWriter(multiprocessing.Process): + """ + The Helper class to write data into InfluxDB in independent OS process. + + Example: + .. code-block:: python + + from influxdb_client import WriteOptions + from influxdb_client.client.util.multiprocessing_helper import MultiprocessingWriter + + + def main(): + writer = MultiprocessingWriter(url="http://localhost:8086", token="my-token", org="my-org", + write_options=WriteOptions(batch_size=100)) + writer.start() + + for x in range(1, 1000): + writer.write(bucket="my-bucket", record=f"mem,tag=a value={x}i {x}") + + writer.__del__() + + + if __name__ == '__main__': + main() + + + How to use with context_manager: + .. code-block:: python + + from influxdb_client import WriteOptions + from influxdb_client.client.util.multiprocessing_helper import MultiprocessingWriter + + + def main(): + with MultiprocessingWriter(url="http://localhost:8086", token="my-token", org="my-org", + write_options=WriteOptions(batch_size=100)) as writer: + for x in range(1, 1000): + writer.write(bucket="my-bucket", record=f"mem,tag=a value={x}i {x}") + + + if __name__ == '__main__': + main() + + + How to handle batch events: + .. code-block:: python + + from influxdb_client import WriteOptions + from influxdb_client.client.exceptions import InfluxDBError + from influxdb_client.client.util.multiprocessing_helper import MultiprocessingWriter + + + class BatchingCallback(object): + + def success(self, conf: (str, str, str), data: str): + print(f"Written batch: {conf}, data: {data}") + + def error(self, conf: (str, str, str), data: str, exception: InfluxDBError): + print(f"Cannot write batch: {conf}, data: {data} due: {exception}") + + def retry(self, conf: (str, str, str), data: str, exception: InfluxDBError): + print(f"Retryable error occurs for batch: {conf}, data: {data} retry: {exception}") + + + def main(): + callback = BatchingCallback() + with MultiprocessingWriter(url="http://localhost:8086", token="my-token", org="my-org", + success_callback=callback.success, + error_callback=callback.error, + retry_callback=callback.retry) as writer: + + for x in range(1, 1000): + writer.write(bucket="my-bucket", record=f"mem,tag=a value={x}i {x}") + + + if __name__ == '__main__': + main() + + + """ + + __started__ = False + __disposed__ = False + + def __init__(self, **kwargs) -> None: + """ + Initialize defaults. + + For more information how to initialize the writer see the examples above. + + :param kwargs: arguments are passed into ``__init__`` function of ``InfluxDBClient`` and ``write_api``. + """ + multiprocessing.Process.__init__(self) + self.kwargs = kwargs + self.client = None + self.write_api = None + self.queue_ = multiprocessing.Manager().Queue() + + def write(self, **kwargs) -> None: + """ + Append time-series data into underlying queue. + + For more information how to pass arguments see the examples above. + + :param kwargs: arguments are passed into ``write`` function of ``WriteApi`` + :return: None + """ + assert self.__disposed__ is False, 'Cannot write data: the writer is closed.' + assert self.__started__ is True, 'Cannot write data: the writer is not started.' + self.queue_.put(kwargs) + + def run(self): + """Initialize ``InfluxDBClient`` and waits for data to writes into InfluxDB.""" + # Initialize Client and Write API + self.client = InfluxDBClient(**self.kwargs) + self.write_api = self.client.write_api(write_options=self.kwargs.get('write_options', WriteOptions()), + success_callback=self.kwargs.get('success_callback', _success_callback), + error_callback=self.kwargs.get('error_callback', _error_callback), + retry_callback=self.kwargs.get('retry_callback', _retry_callback)) + # Infinite loop - until poison pill + while True: + next_record = self.queue_.get() + if type(next_record) is _PoisonPill: + # Poison pill means break the loop + self.terminate() + self.queue_.task_done() + break + self.write_api.write(**next_record) + self.queue_.task_done() + + def start(self) -> None: + """Start independent process for writing data into InfluxDB.""" + super().start() + self.__started__ = True + + def terminate(self) -> None: + """ + Cleanup resources in independent process. + + This function **cannot be used** to terminate the ``MultiprocessingWriter``. + If you want to finish your writes please call: ``__del__``. + """ + if self.write_api: + logger.info("flushing data...") + self.write_api.__del__() + self.write_api = None + if self.client: + self.client.__del__() + self.client = None + logger.info("closed") + + def __enter__(self): + """Enter the runtime context related to this object.""" + self.start() + return self + + def __exit__(self, exc_type, exc_value, traceback): + """Exit the runtime context related to this object.""" + self.__del__() + + def __del__(self): + """Dispose the client and write_api.""" + if self.__started__: + self.queue_.put(_PoisonPill()) + self.queue_.join() + self.join() + self.queue_ = None + self.__started__ = False + self.__disposed__ = True diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/warnings.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/warnings.py new file mode 100644 index 0000000..b682ff8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/warnings.py @@ -0,0 +1,52 @@ +"""The warnings message definition.""" +import warnings + + +class MissingPivotFunction(UserWarning): + """User warning about missing pivot() function.""" + + @staticmethod + def print_warning(query: str): + """Print warning about missing pivot() function and how to deal with that.""" + if 'fieldsAsCols' in query or 'pivot' in query: + return + + message = f"""The query doesn't contains the pivot() function. + +The result will not be shaped to optimal processing by pandas.DataFrame. Use the pivot() function by: + + {query} |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") + +You can disable this warning by: + import warnings + from influxdb_client.client.warnings import MissingPivotFunction + + warnings.simplefilter("ignore", MissingPivotFunction) + +For more info see: + - https://docs.influxdata.com/resources/videos/pivots-in-flux/ + - https://docs.influxdata.com/flux/latest/stdlib/universe/pivot/ + - https://docs.influxdata.com/flux/latest/stdlib/influxdata/influxdb/schema/fieldsascols/ +""" + warnings.warn(message, MissingPivotFunction) + + +class CloudOnlyWarning(UserWarning): + """User warning about availability only on the InfluxDB Cloud.""" + + @staticmethod + def print_warning(api_name: str, doc_url: str): + """Print warning about availability only on the InfluxDB Cloud.""" + message = f"""The '{api_name}' is available only on the InfluxDB Cloud. + +For more info see: + - {doc_url} + - https://docs.influxdata.com/influxdb/cloud/ + +You can disable this warning by: + import warnings + from influxdb_client.client.warnings import CloudOnlyWarning + + warnings.simplefilter("ignore", CloudOnlyWarning) +""" + warnings.warn(message, CloudOnlyWarning) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__init__.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__init__.py new file mode 100644 index 0000000..0d21a43 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__init__.py @@ -0,0 +1,56 @@ +# flake8: noqa + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +# import apis into api package +from influxdb_client.service.authorizations_service import AuthorizationsService +from influxdb_client.service.backup_service import BackupService +from influxdb_client.service.bucket_schemas_service import BucketSchemasService +from influxdb_client.service.buckets_service import BucketsService +from influxdb_client.service.cells_service import CellsService +from influxdb_client.service.checks_service import ChecksService +from influxdb_client.service.config_service import ConfigService +from influxdb_client.service.dbr_ps_service import DBRPsService +from influxdb_client.service.dashboards_service import DashboardsService +from influxdb_client.service.delete_service import DeleteService +from influxdb_client.service.health_service import HealthService +from influxdb_client.service.invokable_scripts_service import InvokableScriptsService +from influxdb_client.service.labels_service import LabelsService +from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService +from influxdb_client.service.metrics_service import MetricsService +from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService +from influxdb_client.service.notification_rules_service import NotificationRulesService +from influxdb_client.service.organizations_service import OrganizationsService +from influxdb_client.service.ping_service import PingService +from influxdb_client.service.query_service import QueryService +from influxdb_client.service.ready_service import ReadyService +from influxdb_client.service.remote_connections_service import RemoteConnectionsService +from influxdb_client.service.replications_service import ReplicationsService +from influxdb_client.service.resources_service import ResourcesService +from influxdb_client.service.restore_service import RestoreService +from influxdb_client.service.routes_service import RoutesService +from influxdb_client.service.rules_service import RulesService +from influxdb_client.service.scraper_targets_service import ScraperTargetsService +from influxdb_client.service.secrets_service import SecretsService +from influxdb_client.service.setup_service import SetupService +from influxdb_client.service.signin_service import SigninService +from influxdb_client.service.signout_service import SignoutService +from influxdb_client.service.sources_service import SourcesService +from influxdb_client.service.tasks_service import TasksService +from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService +from influxdb_client.service.telegrafs_service import TelegrafsService +from influxdb_client.service.templates_service import TemplatesService +from influxdb_client.service.users_service import UsersService +from influxdb_client.service.variables_service import VariablesService +from influxdb_client.service.views_service import ViewsService +from influxdb_client.service.write_service import WriteService diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..9d2b9a5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/dataframe_serializer.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/dataframe_serializer.cpython-312.pyc new file mode 100644 index 0000000..1176ec4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/dataframe_serializer.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/point.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/point.cpython-312.pyc new file mode 100644 index 0000000..717c07e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/point.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/retry.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/retry.cpython-312.pyc new file mode 100644 index 0000000..de9f211 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/__pycache__/retry.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/write/dataframe_serializer.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/dataframe_serializer.py new file mode 100644 index 0000000..ccc198a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/dataframe_serializer.py @@ -0,0 +1,290 @@ +""" +Functions for serialize Pandas DataFrame. + +Much of the code here is inspired by that in the aioinflux packet found here: https://github.com/gusutabopb/aioinflux +""" + +import logging +import math +import re + +from influxdb_client import WritePrecision +from influxdb_client.client.write.point import _ESCAPE_KEY, _ESCAPE_STRING, _ESCAPE_MEASUREMENT, DEFAULT_WRITE_PRECISION + +logger = logging.getLogger('influxdb_client.client.write.dataframe_serializer') + + +def _itertuples(data_frame): + cols = [data_frame.iloc[:, k] for k in range(len(data_frame.columns))] + return zip(data_frame.index, *cols) + + +class DataframeSerializer: + """Serialize DataFrame into LineProtocols.""" + + def __init__(self, data_frame, point_settings, precision=DEFAULT_WRITE_PRECISION, chunk_size: int = None, + **kwargs) -> None: + """ + Init serializer. + + :param data_frame: Pandas DataFrame to serialize + :param point_settings: Default Tags + :param precision: The precision for the unix timestamps within the body line-protocol. + :param chunk_size: The size of chunk for serializing into chunks. + :key data_frame_measurement_name: name of measurement for writing Pandas DataFrame + :key data_frame_tag_columns: list of DataFrame columns which are tags, rest columns will be fields + :key data_frame_timestamp_column: name of DataFrame column which contains a timestamp. The column can be defined as a :class:`~str` value + formatted as `2018-10-26`, `2018-10-26 12:00`, `2018-10-26 12:00:00-05:00` + or other formats and types supported by `pandas.to_datetime `_ - ``DataFrame`` + :key data_frame_timestamp_timezone: name of the timezone which is used for timestamp column - ``DataFrame`` + """ # noqa: E501 + # This function is hard to understand but for good reason: + # the approach used here is considerably more efficient + # than the alternatives. + # + # We build up a Python expression that efficiently converts a data point + # tuple into line-protocol entry, and then evaluate the expression + # as a lambda so that we can call it. This avoids the overhead of + # invoking a function on every data value - we only have one function + # call per row instead. The expression consists of exactly + # one f-string, so we build up the parts of it as segments + # that are concatenated together to make the full f-string inside + # the lambda. + # + # Things are made a little more complex because fields and tags with NaN + # values and empty tags are omitted from the generated line-protocol + # output. + # + # As an example, say we have a data frame with two value columns: + # a float + # b int + # + # This will generate a lambda expression to be evaluated that looks like + # this: + # + # lambda p: f"""{measurement_name} {keys[0]}={p[1]},{keys[1]}={p[2]}i {p[0].value}""" + # + # This lambda is then executed for each row p. + # + # When NaNs are present, the expression looks like this (split + # across two lines to satisfy the code-style checker) + # + # lambda p: f"""{measurement_name} {"" if pd.isna(p[1]) + # else f"{keys[0]}={p[1]}"},{keys[1]}={p[2]}i {p[0].value}""" + # + # When there's a NaN value in column a, we'll end up with a comma at the start of the + # fields, so we run a regexp substitution after generating the line-protocol entries + # to remove this. + # + # We're careful to run these potentially costly extra steps only when NaN values actually + # exist in the data. + + from ...extras import pd, np + if not isinstance(data_frame, pd.DataFrame): + raise TypeError('Must be DataFrame, but type was: {0}.' + .format(type(data_frame))) + + data_frame_measurement_name = kwargs.get('data_frame_measurement_name') + if data_frame_measurement_name is None: + raise TypeError('"data_frame_measurement_name" is a Required Argument') + + timestamp_column = kwargs.get('data_frame_timestamp_column', None) + timestamp_timezone = kwargs.get('data_frame_timestamp_timezone', None) + data_frame = data_frame.copy(deep=False) + data_frame_timestamp = data_frame.index if timestamp_column is None else data_frame[timestamp_column] + if isinstance(data_frame_timestamp, pd.PeriodIndex): + data_frame_timestamp = data_frame_timestamp.to_timestamp() + else: + # TODO: this is almost certainly not what you want + # when the index is the default RangeIndex. + # Instead, it would probably be better to leave + # out the timestamp unless a time column is explicitly + # enabled. + data_frame_timestamp = pd.to_datetime(data_frame_timestamp, unit=precision) + + if timestamp_timezone: + if isinstance(data_frame_timestamp, pd.DatetimeIndex): + data_frame_timestamp = data_frame_timestamp.tz_localize(timestamp_timezone) + else: + data_frame_timestamp = data_frame_timestamp.dt.tz_localize(timestamp_timezone) + + if hasattr(data_frame_timestamp, 'tzinfo') and data_frame_timestamp.tzinfo is None: + data_frame_timestamp = data_frame_timestamp.tz_localize('UTC') + if timestamp_column is None: + data_frame.index = data_frame_timestamp + else: + data_frame[timestamp_column] = data_frame_timestamp + + data_frame_tag_columns = kwargs.get('data_frame_tag_columns') + data_frame_tag_columns = set(data_frame_tag_columns or []) + + # keys holds a list of string keys. + keys = [] + # tags holds a list of tag f-string segments ordered alphabetically by tag key. + tags = [] + # fields holds a list of field f-string segments ordered alphebetically by field key + fields = [] + # field_indexes holds the index into each row of all the fields. + field_indexes = [] + + if point_settings.defaultTags: + for key, value in point_settings.defaultTags.items(): + # Avoid overwriting existing data if there's a column + # that already exists with the default tag's name. + # Note: when a new column is added, the old DataFrame + # that we've made a shallow copy of is unaffected. + # TODO: when there are NaN or empty values in + # the column, we could make a deep copy of the + # data and fill in those values with the default tag value. + if key not in data_frame.columns: + data_frame[key] = value + data_frame_tag_columns.add(key) + + # Get a list of all the columns sorted by field/tag key. + # We want to iterate through the columns in sorted order + # so that we know when we're on the first field so we + # can know whether a comma is needed for that + # field. + columns = sorted(enumerate(data_frame.dtypes.items()), key=lambda col: col[1][0]) + + # null_columns has a bool value for each column holding + # whether that column contains any null (NaN or None) values. + null_columns = data_frame.isnull().any() + timestamp_index = 0 + + # Iterate through the columns building up the expression for each column. + for index, (key, value) in columns: + key = str(key) + key_format = f'{{keys[{len(keys)}]}}' + keys.append(key.translate(_ESCAPE_KEY)) + # The field index is one more than the column index because the + # time index is at column zero in the finally zipped-together + # result columns. + field_index = index + 1 + val_format = f'p[{field_index}]' + + if key in data_frame_tag_columns: + # This column is a tag column. + if null_columns.iloc[index]: + key_value = f"""{{ + '' if {val_format} == '' or pd.isna({val_format}) else + f',{key_format}={{str({val_format}).translate(_ESCAPE_STRING)}}' + }}""" + else: + key_value = f',{key_format}={{str({val_format}).translate(_ESCAPE_KEY)}}' + tags.append(key_value) + continue + elif timestamp_column is not None and key in timestamp_column: + timestamp_index = field_index + continue + + # This column is a field column. + # Note: no comma separator is needed for the first field. + # It's important to omit it because when the first + # field column has no nulls, we don't run the comma-removal + # regexp substitution step. + sep = '' if len(field_indexes) == 0 else ',' + if issubclass(value.type, np.integer) or issubclass(value.type, np.floating) or issubclass(value.type, np.bool_): # noqa: E501 + suffix = 'i' if issubclass(value.type, np.integer) else '' + if null_columns.iloc[index]: + field_value = f"""{{"" if pd.isna({val_format}) else f"{sep}{key_format}={{{val_format}}}{suffix}"}}""" # noqa: E501 + else: + field_value = f"{sep}{key_format}={{{val_format}}}{suffix}" + else: + if null_columns.iloc[index]: + field_value = f"""{{ + '' if pd.isna({val_format}) else + f'{sep}{key_format}="{{str({val_format}).translate(_ESCAPE_STRING)}}"' + }}""" + else: + field_value = f'''{sep}{key_format}="{{str({val_format}).translate(_ESCAPE_STRING)}}"''' + field_indexes.append(field_index) + fields.append(field_value) + + measurement_name = str(data_frame_measurement_name).translate(_ESCAPE_MEASUREMENT) + + tags = ''.join(tags) + fields = ''.join(fields) + timestamp = '{p[%s].value}' % timestamp_index + if precision == WritePrecision.US: + timestamp = '{int(p[%s].value / 1e3)}' % timestamp_index + elif precision == WritePrecision.MS: + timestamp = '{int(p[%s].value / 1e6)}' % timestamp_index + elif precision == WritePrecision.S: + timestamp = '{int(p[%s].value / 1e9)}' % timestamp_index + + f = eval(f'lambda p: f"""{{measurement_name}}{tags} {fields} {timestamp}"""', { + 'measurement_name': measurement_name, + '_ESCAPE_KEY': _ESCAPE_KEY, + '_ESCAPE_STRING': _ESCAPE_STRING, + 'keys': keys, + 'pd': pd, + }) + + for k, v in dict(data_frame.dtypes).items(): + if k in data_frame_tag_columns: + data_frame = data_frame.replace({k: ''}, np.nan) + + def _any_not_nan(p, indexes): + return any(map(lambda x: not pd.isna(p[x]), indexes)) + + self.data_frame = data_frame + self.f = f + self.field_indexes = field_indexes + self.first_field_maybe_null = null_columns.iloc[field_indexes[0] - 1] + self._any_not_nan = _any_not_nan + + # + # prepare chunks + # + if chunk_size is not None: + self.number_of_chunks = int(math.ceil(len(data_frame) / float(chunk_size))) + self.chunk_size = chunk_size + else: + self.number_of_chunks = None + + def serialize(self, chunk_idx: int = None): + """ + Serialize chunk into LineProtocols. + + :param chunk_idx: The index of chunk to serialize. If `None` then serialize whole dataframe. + """ + if chunk_idx is None: + chunk = self.data_frame + else: + logger.debug("Serialize chunk %s/%s ...", chunk_idx + 1, self.number_of_chunks) + chunk = self.data_frame[chunk_idx * self.chunk_size:(chunk_idx + 1) * self.chunk_size] + + if self.first_field_maybe_null: + # When the first field is null (None/NaN), we'll have + # a spurious leading comma which needs to be removed. + lp = (re.sub('^(( |[^ ])* ),([a-zA-Z0-9])(.*)', '\\1\\3\\4', self.f(p)) + for p in filter(lambda x: self._any_not_nan(x, self.field_indexes), _itertuples(chunk))) + return list(lp) + else: + return list(map(self.f, _itertuples(chunk))) + + def number_of_chunks(self): + """ + Return the number of chunks. + + :return: number of chunks or None if chunk_size is not specified. + """ + return self.number_of_chunks + + +def data_frame_to_list_of_points(data_frame, point_settings, precision=DEFAULT_WRITE_PRECISION, **kwargs): + """ + Serialize DataFrame into LineProtocols. + + :param data_frame: Pandas DataFrame to serialize + :param point_settings: Default Tags + :param precision: The precision for the unix timestamps within the body line-protocol. + :key data_frame_measurement_name: name of measurement for writing Pandas DataFrame + :key data_frame_tag_columns: list of DataFrame columns which are tags, rest columns will be fields + :key data_frame_timestamp_column: name of DataFrame column which contains a timestamp. The column can be defined as a :class:`~str` value + formatted as `2018-10-26`, `2018-10-26 12:00`, `2018-10-26 12:00:00-05:00` + or other formats and types supported by `pandas.to_datetime `_ - ``DataFrame`` + :key data_frame_timestamp_timezone: name of the timezone which is used for timestamp column - ``DataFrame`` + """ # noqa: E501 + return DataframeSerializer(data_frame, point_settings, precision, **kwargs).serialize() diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/write/point.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/point.py new file mode 100644 index 0000000..cc95d20 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/point.py @@ -0,0 +1,371 @@ +"""Point data structure to represent LineProtocol.""" + +import math +import warnings +from builtins import int +from datetime import datetime, timedelta, timezone +from decimal import Decimal +from numbers import Integral + +from influxdb_client.client.util.date_utils import get_date_helper +from influxdb_client.domain.write_precision import WritePrecision + +EPOCH = datetime.fromtimestamp(0, tz=timezone.utc) + +DEFAULT_WRITE_PRECISION = WritePrecision.NS + +_ESCAPE_MEASUREMENT = str.maketrans({ + ',': r'\,', + ' ': r'\ ', + '\n': r'\n', + '\t': r'\t', + '\r': r'\r', +}) + +_ESCAPE_KEY = str.maketrans({ + ',': r'\,', + '=': r'\=', + ' ': r'\ ', + '\n': r'\n', + '\t': r'\t', + '\r': r'\r', +}) + +_ESCAPE_STRING = str.maketrans({ + '"': r'\"', + '\\': r'\\', +}) + +try: + import numpy as np + + _HAS_NUMPY = True +except ModuleNotFoundError: + _HAS_NUMPY = False + + +class Point(object): + """ + Point defines the values that will be written to the database. + + Ref: https://docs.influxdata.com/influxdb/latest/reference/key-concepts/data-elements/#point + """ + + @staticmethod + def measurement(measurement): + """Create a new Point with specified measurement name.""" + p = Point(measurement) + return p + + @staticmethod + def from_dict(dictionary: dict, write_precision: WritePrecision = DEFAULT_WRITE_PRECISION, **kwargs): + """ + Initialize point from 'dict' structure. + + The expected dict structure is: + - measurement + - tags + - fields + - time + + Example: + .. code-block:: python + + # Use default dictionary structure + dict_structure = { + "measurement": "h2o_feet", + "tags": {"location": "coyote_creek"}, + "fields": {"water_level": 1.0}, + "time": 1 + } + point = Point.from_dict(dict_structure, WritePrecision.NS) + + Example: + .. code-block:: python + + # Use custom dictionary structure + dictionary = { + "name": "sensor_pt859", + "location": "warehouse_125", + "version": "2021.06.05.5874", + "pressure": 125, + "temperature": 10, + "created": 1632208639, + } + point = Point.from_dict(dictionary, + write_precision=WritePrecision.S, + record_measurement_key="name", + record_time_key="created", + record_tag_keys=["location", "version"], + record_field_keys=["pressure", "temperature"]) + + Int Types: + The following example shows how to configure the types of integers fields. + It is useful when you want to serialize integers always as ``float`` to avoid ``field type conflict`` + or use ``unsigned 64-bit integer`` as the type for serialization. + + .. code-block:: python + + # Use custom dictionary structure + dict_structure = { + "measurement": "h2o_feet", + "tags": {"location": "coyote_creek"}, + "fields": { + "water_level": 1.0, + "some_counter": 108913123234 + }, + "time": 1 + } + + point = Point.from_dict(dict_structure, field_types={"some_counter": "uint"}) + + :param dictionary: dictionary for serialize into data Point + :param write_precision: sets the precision for the supplied time values + :key record_measurement_key: key of dictionary with specified measurement + :key record_measurement_name: static measurement name for data Point + :key record_time_key: key of dictionary with specified timestamp + :key record_tag_keys: list of dictionary keys to use as a tag + :key record_field_keys: list of dictionary keys to use as a field + :key field_types: optional dictionary to specify types of serialized fields. Currently, is supported customization for integer types. + Possible integers types: + - ``int`` - serialize integers as "**Signed 64-bit integers**" - ``9223372036854775807i`` (default behaviour) + - ``uint`` - serialize integers as "**Unsigned 64-bit integers**" - ``9223372036854775807u`` + - ``float`` - serialize integers as "**IEEE-754 64-bit floating-point numbers**". Useful for unify number types in your pipeline to avoid field type conflict - ``9223372036854775807`` + The ``field_types`` can be also specified as part of incoming dictionary. For more info see an example above. + :return: new data point + """ # noqa: E501 + measurement_ = kwargs.get('record_measurement_name', None) + if measurement_ is None: + measurement_ = dictionary[kwargs.get('record_measurement_key', 'measurement')] + point = Point(measurement_) + + record_tag_keys = kwargs.get('record_tag_keys', None) + if record_tag_keys is not None: + for tag_key in record_tag_keys: + if tag_key in dictionary: + point.tag(tag_key, dictionary[tag_key]) + elif 'tags' in dictionary: + for tag_key, tag_value in dictionary['tags'].items(): + point.tag(tag_key, tag_value) + + record_field_keys = kwargs.get('record_field_keys', None) + if record_field_keys is not None: + for field_key in record_field_keys: + if field_key in dictionary: + point.field(field_key, dictionary[field_key]) + else: + for field_key, field_value in dictionary['fields'].items(): + point.field(field_key, field_value) + + record_time_key = kwargs.get('record_time_key', 'time') + if record_time_key in dictionary: + point.time(dictionary[record_time_key], write_precision=write_precision) + + _field_types = kwargs.get('field_types', {}) + if 'field_types' in dictionary: + _field_types = dictionary['field_types'] + # Map API fields types to Line Protocol types postfix: + # - int: 'i' + # - uint: 'u' + # - float: '' + point._field_types = dict(map( + lambda item: (item[0], 'i' if item[1] == 'int' else 'u' if item[1] == 'uint' else ''), + _field_types.items() + )) + + return point + + def __init__(self, measurement_name): + """Initialize defaults.""" + self._tags = {} + self._fields = {} + self._name = measurement_name + self._time = None + self._write_precision = DEFAULT_WRITE_PRECISION + self._field_types = {} + + def time(self, time, write_precision=DEFAULT_WRITE_PRECISION): + """ + Specify timestamp for DataPoint with declared precision. + + If time doesn't have specified timezone we assume that timezone is UTC. + + Examples:: + Point.measurement("h2o").field("val", 1).time("2009-11-10T23:00:00.123456Z") + Point.measurement("h2o").field("val", 1).time(1257894000123456000) + Point.measurement("h2o").field("val", 1).time(datetime(2009, 11, 10, 23, 0, 0, 123456)) + Point.measurement("h2o").field("val", 1).time(1257894000123456000, write_precision=WritePrecision.NS) + + + :param time: the timestamp for your data + :param write_precision: sets the precision for the supplied time values + :return: this point + """ + self._write_precision = write_precision + self._time = time + return self + + def tag(self, key, value): + """Add tag with key and value.""" + self._tags[key] = value + return self + + def field(self, field, value): + """Add field with key and value.""" + self._fields[field] = value + return self + + def to_line_protocol(self, precision=None): + """ + Create LineProtocol. + + :param precision: required precision of LineProtocol. If it's not set then use the precision from ``Point``. + """ + _measurement = _escape_key(self._name, _ESCAPE_MEASUREMENT) + if _measurement.startswith("#"): + message = f"""The measurement name '{_measurement}' start with '#'. + +The output Line protocol will be interpret as a comment by InfluxDB. For more info see: + - https://docs.influxdata.com/influxdb/latest/reference/syntax/line-protocol/#comments +""" + warnings.warn(message, SyntaxWarning) + _tags = _append_tags(self._tags) + _fields = _append_fields(self._fields, self._field_types) + if not _fields: + return "" + _time = _append_time(self._time, self._write_precision if precision is None else precision) + + return f"{_measurement}{_tags}{_fields}{_time}" + + @property + def write_precision(self): + """Get precision.""" + return self._write_precision + + @classmethod + def set_str_rep(cls, rep_function): + """Set the string representation for all Points.""" + cls.__str___rep = rep_function + + def __str__(self): + """Create string representation of this Point.""" + return self.to_line_protocol() + + def __eq__(self, other): + """Return true iff other is equal to self.""" + if not isinstance(other, Point): + return False + # assume points are equal iff their instance fields are equal + return (self._tags == other._tags and + self._fields == other._fields and + self._name == other._name and + self._time == other._time and + self._write_precision == other._write_precision and + self._field_types == other._field_types) + + +def _append_tags(tags): + _return = [] + for tag_key, tag_value in sorted(tags.items()): + + if tag_value is None: + continue + + tag = _escape_key(tag_key) + value = _escape_tag_value(tag_value) + if tag != '' and value != '': + _return.append(f'{tag}={value}') + + return f"{',' if _return else ''}{','.join(_return)} " + + +def _append_fields(fields, field_types): + _return = [] + + for field, value in sorted(fields.items()): + if value is None: + continue + + if isinstance(value, float) or isinstance(value, Decimal) or _np_is_subtype(value, 'float'): + if not math.isfinite(value): + continue + s = str(value) + # It's common to represent whole numbers as floats + # and the trailing ".0" that Python produces is unnecessary + # in line-protocol, inconsistent with other line-protocol encoders, + # and takes more space than needed, so trim it off. + if s.endswith('.0'): + s = s[:-2] + _return.append(f'{_escape_key(field)}={s}') + elif (isinstance(value, int) or _np_is_subtype(value, 'int')) and not isinstance(value, bool): + _type = field_types.get(field, "i") + _return.append(f'{_escape_key(field)}={str(value)}{_type}') + elif isinstance(value, bool): + _return.append(f'{_escape_key(field)}={str(value).lower()}') + elif isinstance(value, str): + _return.append(f'{_escape_key(field)}="{_escape_string(value)}"') + else: + raise ValueError(f'Type: "{type(value)}" of field: "{field}" is not supported.') + + return f"{','.join(_return)}" + + +def _append_time(time, write_precision) -> str: + if time is None: + return '' + return f" {int(_convert_timestamp(time, write_precision))}" + + +def _escape_key(tag, escape_list=None) -> str: + if escape_list is None: + escape_list = _ESCAPE_KEY + return str(tag).translate(escape_list) + + +def _escape_tag_value(value) -> str: + ret = _escape_key(value) + if ret.endswith('\\'): + ret += ' ' + return ret + + +def _escape_string(value) -> str: + return str(value).translate(_ESCAPE_STRING) + + +def _convert_timestamp(timestamp, precision=DEFAULT_WRITE_PRECISION): + date_helper = get_date_helper() + if isinstance(timestamp, Integral): + return timestamp # assume precision is correct if timestamp is int + + if isinstance(timestamp, str): + timestamp = date_helper.parse_date(timestamp) + + if isinstance(timestamp, timedelta) or isinstance(timestamp, datetime): + + if isinstance(timestamp, datetime): + timestamp = date_helper.to_utc(timestamp) - EPOCH + + ns = date_helper.to_nanoseconds(timestamp) + + if precision is None or precision == WritePrecision.NS: + return ns + elif precision == WritePrecision.US: + return ns / 1e3 + elif precision == WritePrecision.MS: + return ns / 1e6 + elif precision == WritePrecision.S: + return ns / 1e9 + + raise ValueError(timestamp) + + +def _np_is_subtype(value, np_type): + if not _HAS_NUMPY or not hasattr(value, 'dtype'): + return False + + if np_type == 'float': + return np.issubdtype(value, np.floating) + elif np_type == 'int': + return np.issubdtype(value, np.integer) + return False diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/write/retry.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/retry.py new file mode 100644 index 0000000..2c4b359 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/write/retry.py @@ -0,0 +1,148 @@ +"""Implementation for Retry strategy during HTTP requests.""" + +import logging +from datetime import datetime, timedelta +from itertools import takewhile +from random import random +from typing import Callable + +from urllib3 import Retry +from urllib3.exceptions import MaxRetryError, ResponseError + +from influxdb_client.client.exceptions import InfluxDBError + +logger = logging.getLogger('influxdb_client.client.write.retry') + + +class WritesRetry(Retry): + """ + Writes retry configuration. + + The next delay is computed as random value between range + `retry_interval * exponential_base^(attempts-1)` and `retry_interval * exponential_base^(attempts) + + Example: + for retry_interval=5, exponential_base=2, max_retry_delay=125, total=5 + retry delays are random distributed values within the ranges of + [5-10, 10-20, 20-40, 40-80, 80-125] + """ + + def __init__(self, jitter_interval=0, max_retry_delay=125, exponential_base=2, max_retry_time=180, total=5, + retry_interval=5, retry_callback: Callable[[Exception], int] = None, **kw): + """ + Initialize defaults. + + :param int jitter_interval: random milliseconds when retrying writes + :param num max_retry_delay: maximum delay when retrying write in seconds + :param int max_retry_time: maximum total retry timeout in seconds, + attempt after this timout throws MaxRetryError + :param int total: maximum number of retries + :param num retry_interval: initial first retry delay range in seconds + :param int exponential_base: base for the exponential retry delay, + :param Callable[[Exception], int] retry_callback: the callable ``callback`` to run after retryable + error occurred. + The callable must accept one argument: + - `Exception`: an retryable error + """ + super().__init__(**kw) + self.jitter_interval = jitter_interval + self.total = total + self.retry_interval = retry_interval + self.max_retry_delay = max_retry_delay + self.max_retry_time = max_retry_time + self.exponential_base = exponential_base + self.retry_timeout = datetime.now() + timedelta(seconds=max_retry_time) + self.retry_callback = retry_callback + + def new(self, **kw): + """Initialize defaults.""" + if 'jitter_interval' not in kw: + kw['jitter_interval'] = self.jitter_interval + if 'retry_interval' not in kw: + kw['retry_interval'] = self.retry_interval + if 'max_retry_delay' not in kw: + kw['max_retry_delay'] = self.max_retry_delay + if 'max_retry_time' not in kw: + kw['max_retry_time'] = self.max_retry_time + if 'exponential_base' not in kw: + kw['exponential_base'] = self.exponential_base + if 'retry_callback' not in kw: + kw['retry_callback'] = self.retry_callback + + new = super().new(**kw) + new.retry_timeout = self.retry_timeout + return new + + def is_retry(self, method, status_code, has_retry_after=False): + """is_retry doesn't require retry_after header. If there is not Retry-After we will use backoff.""" + if not self._is_method_retryable(method): + return False + + return self.total and (status_code >= 429) + + def get_backoff_time(self): + """Variant of exponential backoff with initial and max delay and a random jitter delay.""" + # We want to consider only the last consecutive errors sequence (Ignore redirects). + consecutive_errors_len = len( + list( + takewhile(lambda x: x.redirect_location is None, reversed(self.history)) + ) + ) + # First fail doesn't increase backoff + consecutive_errors_len -= 1 + if consecutive_errors_len < 0: + return 0 + + range_start = self.retry_interval + range_stop = self.retry_interval * self.exponential_base + + i = 1 + while i <= consecutive_errors_len: + i += 1 + range_start = range_stop + range_stop = range_stop * self.exponential_base + if range_stop > self.max_retry_delay: + break + + if range_stop > self.max_retry_delay: + range_stop = self.max_retry_delay + + return range_start + (range_stop - range_start) * self._random() + + def get_retry_after(self, response): + """Get the value of Retry-After header and append random jitter delay.""" + retry_after = super().get_retry_after(response) + if retry_after: + retry_after += self._jitter_delay() + return retry_after + + def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None): + """Return a new Retry object with incremented retry counters.""" + if self.retry_timeout < datetime.now(): + raise MaxRetryError(_pool, url, error or ResponseError("max_retry_time exceeded")) + + new_retry = super().increment(method, url, response, error, _pool, _stacktrace) + + if response is not None: + parsed_error = InfluxDBError(response=response) + elif error is not None: + parsed_error = error + else: + parsed_error = f"Failed request to: {url}" + + message = f"The retriable error occurred during request. Reason: '{parsed_error}'." + if isinstance(parsed_error, InfluxDBError): + message += f" Retry in {parsed_error.retry_after}s." + + if self.retry_callback: + self.retry_callback(parsed_error) + + logger.warning(message) + + return new_retry + + def _jitter_delay(self): + return self.jitter_interval * random() + + def _random(self): + return random() diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/write_api.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/write_api.py new file mode 100644 index 0000000..3b3db68 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/write_api.py @@ -0,0 +1,587 @@ +"""Collect and write time series data to InfluxDB Cloud or InfluxDB OSS.""" + +# coding: utf-8 +import logging +import os +import warnings +from collections import defaultdict +from datetime import timedelta +from enum import Enum +from random import random +from time import sleep +from typing import Union, Any, Iterable, NamedTuple + +import reactivex as rx +from reactivex import operators as ops, Observable +from reactivex.scheduler import ThreadPoolScheduler +from reactivex.subject import Subject + +from influxdb_client import WritePrecision +from influxdb_client.client._base import _BaseWriteApi, _HAS_DATACLASS +from influxdb_client.client.util.helpers import get_org_query_param +from influxdb_client.client.write.dataframe_serializer import DataframeSerializer +from influxdb_client.client.write.point import Point, DEFAULT_WRITE_PRECISION +from influxdb_client.client.write.retry import WritesRetry +from influxdb_client.rest import _UTF_8_encoding + +logger = logging.getLogger('influxdb_client.client.write_api') + + +if _HAS_DATACLASS: + import dataclasses + from dataclasses import dataclass + + +class WriteType(Enum): + """Configuration which type of writes will client use.""" + + batching = 1 + asynchronous = 2 + synchronous = 3 + + +class WriteOptions(object): + """Write configuration.""" + + def __init__(self, write_type: WriteType = WriteType.batching, + batch_size=1_000, flush_interval=1_000, + jitter_interval=0, + retry_interval=5_000, + max_retries=5, + max_retry_delay=125_000, + max_retry_time=180_000, + exponential_base=2, + max_close_wait=300_000, + write_scheduler=ThreadPoolScheduler(max_workers=1)) -> None: + """ + Create write api configuration. + + :param write_type: methods of write (batching, asynchronous, synchronous) + :param batch_size: the number of data point to collect in batch + :param flush_interval: flush data at least in this interval (milliseconds) + :param jitter_interval: this is primarily to avoid large write spikes for users running a large number of + client instances ie, a jitter of 5s and flush duration 10s means flushes will happen every 10-15s + (milliseconds) + :param retry_interval: the time to wait before retry unsuccessful write (milliseconds) + :param max_retries: the number of max retries when write fails, 0 means retry is disabled + :param max_retry_delay: the maximum delay between each retry attempt in milliseconds + :param max_retry_time: total timeout for all retry attempts in milliseconds, if 0 retry is disabled + :param exponential_base: base for the exponential retry delay + :parama max_close_wait: the maximum time to wait for writes to be flushed if close() is called + :param write_scheduler: + """ + self.write_type = write_type + self.batch_size = batch_size + self.flush_interval = flush_interval + self.jitter_interval = jitter_interval + self.retry_interval = retry_interval + self.max_retries = max_retries + self.max_retry_delay = max_retry_delay + self.max_retry_time = max_retry_time + self.exponential_base = exponential_base + self.write_scheduler = write_scheduler + self.max_close_wait = max_close_wait + + def to_retry_strategy(self, **kwargs): + """ + Create a Retry strategy from write options. + + :key retry_callback: The callable ``callback`` to run after retryable error occurred. + The callable must accept one argument: + - `Exception`: an retryable error + """ + return WritesRetry( + total=self.max_retries, + retry_interval=self.retry_interval / 1_000, + jitter_interval=self.jitter_interval / 1_000, + max_retry_delay=self.max_retry_delay / 1_000, + max_retry_time=self.max_retry_time / 1_000, + exponential_base=self.exponential_base, + retry_callback=kwargs.get("retry_callback", None), + allowed_methods=["POST"]) + + def __getstate__(self): + """Return a dict of attributes that you want to pickle.""" + state = self.__dict__.copy() + # Remove write scheduler + del state['write_scheduler'] + return state + + def __setstate__(self, state): + """Set your object with the provided dict.""" + self.__dict__.update(state) + # Init default write Scheduler + self.write_scheduler = ThreadPoolScheduler(max_workers=1) + + +SYNCHRONOUS = WriteOptions(write_type=WriteType.synchronous) +ASYNCHRONOUS = WriteOptions(write_type=WriteType.asynchronous) + + +class PointSettings(object): + """Settings to store default tags.""" + + def __init__(self, **default_tags) -> None: + """ + Create point settings for write api. + + :param default_tags: Default tags which will be added to each point written by api. + """ + self.defaultTags = dict() + + for key, val in default_tags.items(): + self.add_default_tag(key, val) + + @staticmethod + def _get_value(value): + + if value.startswith("${env."): + return os.environ.get(value[6:-1]) + + return value + + def add_default_tag(self, key, value) -> None: + """Add new default tag with key and value.""" + self.defaultTags[key] = self._get_value(value) + + +class _BatchItemKey(object): + def __init__(self, bucket, org, precision=DEFAULT_WRITE_PRECISION) -> None: + self.bucket = bucket + self.org = org + self.precision = precision + pass + + def __hash__(self) -> int: + return hash((self.bucket, self.org, self.precision)) + + def __eq__(self, o: object) -> bool: + return isinstance(o, self.__class__) \ + and self.bucket == o.bucket and self.org == o.org and self.precision == o.precision + + def __str__(self) -> str: + return '_BatchItemKey[bucket:\'{}\', org:\'{}\', precision:\'{}\']' \ + .format(str(self.bucket), str(self.org), str(self.precision)) + + +class _BatchItem(object): + def __init__(self, key: _BatchItemKey, data, size=1) -> None: + self.key = key + self.data = data + self.size = size + pass + + def to_key_tuple(self) -> (str, str, str): + return self.key.bucket, self.key.org, self.key.precision + + def __str__(self) -> str: + return '_BatchItem[key:\'{}\', size: \'{}\']' \ + .format(str(self.key), str(self.size)) + + +class _BatchResponse(object): + def __init__(self, data: _BatchItem, exception: Exception = None): + self.data = data + self.exception = exception + pass + + def __str__(self) -> str: + return '_BatchResponse[status:\'{}\', \'{}\']' \ + .format("failed" if self.exception else "success", str(self.data)) + + +def _body_reduce(batch_items): + return b'\n'.join(map(lambda batch_item: batch_item.data, batch_items)) + + +class WriteApi(_BaseWriteApi): + """ + Implementation for '/api/v2/write' endpoint. + + Example: + .. code-block:: python + + from influxdb_client import InfluxDBClient + from influxdb_client.client.write_api import SYNCHRONOUS + + + # Initialize SYNCHRONOUS instance of WriteApi + with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: + write_api = client.write_api(write_options=SYNCHRONOUS) + """ + + def __init__(self, + influxdb_client, + write_options: WriteOptions = WriteOptions(), + point_settings: PointSettings = PointSettings(), + **kwargs) -> None: + """ + Initialize defaults. + + :param influxdb_client: with default settings (organization) + :param write_options: write api configuration + :param point_settings: settings to store default tags. + :key success_callback: The callable ``callback`` to run after successfully writen a batch. + + The callable must accept two arguments: + - `Tuple`: ``(bucket, organization, precision)`` + - `str`: written data + + **[batching mode]** + :key error_callback: The callable ``callback`` to run after unsuccessfully writen a batch. + + The callable must accept three arguments: + - `Tuple`: ``(bucket, organization, precision)`` + - `str`: written data + - `Exception`: an occurred error + + **[batching mode]** + :key retry_callback: The callable ``callback`` to run after retryable error occurred. + + The callable must accept three arguments: + - `Tuple`: ``(bucket, organization, precision)`` + - `str`: written data + - `Exception`: an retryable error + + **[batching mode]** + """ + super().__init__(influxdb_client=influxdb_client, point_settings=point_settings) + self._write_options = write_options + self._success_callback = kwargs.get('success_callback', None) + self._error_callback = kwargs.get('error_callback', None) + self._retry_callback = kwargs.get('retry_callback', None) + self._window_scheduler = None + + if self._write_options.write_type is WriteType.batching: + # Define Subject that listen incoming data and produces writes into InfluxDB + self._subject = Subject() + + self._window_scheduler = ThreadPoolScheduler(1) + self._disposable = self._subject.pipe( + # Split incoming data to windows by batch_size or flush_interval + ops.window_with_time_or_count(count=write_options.batch_size, + timespan=timedelta(milliseconds=write_options.flush_interval), + scheduler=self._window_scheduler), + # Map window into groups defined by 'organization', 'bucket' and 'precision' + ops.flat_map(lambda window: window.pipe( + # Group window by 'organization', 'bucket' and 'precision' + ops.group_by(lambda batch_item: batch_item.key), + # Create batch (concatenation line protocols by \n) + ops.map(lambda group: group.pipe( + ops.to_iterable(), + ops.map(lambda xs: _BatchItem(key=group.key, data=_body_reduce(xs), size=len(xs))))), + ops.merge_all())), + # Write data into InfluxDB (possibility to retry if its fail) + ops.filter(lambda batch: batch.size > 0), + ops.map(mapper=lambda batch: self._to_response(data=batch, delay=self._jitter_delay())), + ops.merge_all()) \ + .subscribe(self._on_next, self._on_error, self._on_complete) + + else: + self._subject = None + self._disposable = None + + if self._write_options.write_type is WriteType.asynchronous: + message = """The 'WriteType.asynchronous' is deprecated and will be removed in future major version. + +You can use native asynchronous version of the client: +- https://influxdb-client.readthedocs.io/en/stable/usage.html#how-to-use-asyncio + """ + warnings.warn(message, DeprecationWarning) + + def write(self, bucket: str, org: str = None, + record: Union[ + str, Iterable['str'], Point, Iterable['Point'], dict, Iterable['dict'], bytes, Iterable['bytes'], + Observable, NamedTuple, Iterable['NamedTuple'], 'dataclass', Iterable['dataclass'] + ] = None, + write_precision: WritePrecision = DEFAULT_WRITE_PRECISION, **kwargs) -> Any: + """ + Write time-series data into InfluxDB. + + :param str bucket: specifies the destination bucket for writes (required) + :param str, Organization org: specifies the destination organization for writes; + take the ID, Name or Organization. + If not specified the default value from ``InfluxDBClient.org`` is used. + :param WritePrecision write_precision: specifies the precision for the unix timestamps within + the body line-protocol. The precision specified on a Point has precedes + and is use for write. + :param record: Point, Line Protocol, Dictionary, NamedTuple, Data Classes, Pandas DataFrame or + RxPY Observable to write + :key data_frame_measurement_name: name of measurement for writing Pandas DataFrame - ``DataFrame`` + :key data_frame_tag_columns: list of DataFrame columns which are tags, + rest columns will be fields - ``DataFrame`` + :key data_frame_timestamp_column: name of DataFrame column which contains a timestamp. The column can be defined as a :class:`~str` value + formatted as `2018-10-26`, `2018-10-26 12:00`, `2018-10-26 12:00:00-05:00` + or other formats and types supported by `pandas.to_datetime `_ - ``DataFrame`` + :key data_frame_timestamp_timezone: name of the timezone which is used for timestamp column - ``DataFrame`` + :key record_measurement_key: key of record with specified measurement - + ``dictionary``, ``NamedTuple``, ``dataclass`` + :key record_measurement_name: static measurement name - ``dictionary``, ``NamedTuple``, ``dataclass`` + :key record_time_key: key of record with specified timestamp - ``dictionary``, ``NamedTuple``, ``dataclass`` + :key record_tag_keys: list of record keys to use as a tag - ``dictionary``, ``NamedTuple``, ``dataclass`` + :key record_field_keys: list of record keys to use as a field - ``dictionary``, ``NamedTuple``, ``dataclass`` + + Example: + .. code-block:: python + + # Record as Line Protocol + write_api.write("my-bucket", "my-org", "h2o_feet,location=us-west level=125i 1") + + # Record as Dictionary + dictionary = { + "measurement": "h2o_feet", + "tags": {"location": "us-west"}, + "fields": {"level": 125}, + "time": 1 + } + write_api.write("my-bucket", "my-org", dictionary) + + # Record as Point + from influxdb_client import Point + point = Point("h2o_feet").tag("location", "us-west").field("level", 125).time(1) + write_api.write("my-bucket", "my-org", point) + + DataFrame: + If the ``data_frame_timestamp_column`` is not specified the index of `Pandas DataFrame `_ + is used as a ``timestamp`` for written data. The index can be `PeriodIndex `_ + or its must be transformable to ``datetime`` by + `pandas.to_datetime `_. + + If you would like to transform a column to ``PeriodIndex``, you can use something like: + + .. code-block:: python + + import pandas as pd + + # DataFrame + data_frame = ... + # Set column as Index + data_frame.set_index('column_name', inplace=True) + # Transform index to PeriodIndex + data_frame.index = pd.to_datetime(data_frame.index, unit='s') + + """ # noqa: E501 + org = get_org_query_param(org=org, client=self._influxdb_client) + + self._append_default_tags(record) + + if self._write_options.write_type is WriteType.batching: + return self._write_batching(bucket, org, record, + write_precision, **kwargs) + + payloads = defaultdict(list) + self._serialize(record, write_precision, payloads, **kwargs) + + _async_req = True if self._write_options.write_type == WriteType.asynchronous else False + + def write_payload(payload): + final_string = b'\n'.join(payload[1]) + return self._post_write(_async_req, bucket, org, final_string, payload[0]) + + results = list(map(write_payload, payloads.items())) + if not _async_req: + return None + elif len(results) == 1: + return results[0] + return results + + def flush(self): + """Flush data.""" + # TODO + pass + + def close(self): + """Flush data and dispose a batching buffer.""" + self.__del__() + + def __enter__(self): + """ + Enter the runtime context related to this object. + + It will bind this method’s return value to the target(s) + specified in the `as` clause of the statement. + + return: self instance + """ + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Exit the runtime context related to this object and close the WriteApi.""" + self.close() + + def __del__(self): + """Close WriteApi.""" + if self._subject: + self._subject.on_completed() + self._subject.dispose() + self._subject = None + + """ + We impose a maximum wait time to ensure that we do not cause a deadlock if the + background thread has exited abnormally + + Each iteration waits 100ms, but sleep expects the unit to be seconds so convert + the maximum wait time to seconds. + + We keep a counter of how long we've waited + """ + max_wait_time = self._write_options.max_close_wait / 1000 + waited = 0 + sleep_period = 0.1 + + # Wait for writing to finish + while not self._disposable.is_disposed: + sleep(sleep_period) + waited += sleep_period + + # Have we reached the upper limit? + if waited >= max_wait_time: + logger.warning( + "Reached max_close_wait (%s seconds) waiting for batches to finish writing. Force closing", + max_wait_time + ) + break + + if self._window_scheduler: + self._window_scheduler.executor.shutdown(wait=False) + self._window_scheduler = None + + if self._disposable: + self._disposable = None + pass + + def _write_batching(self, bucket, org, data, + precision=DEFAULT_WRITE_PRECISION, + **kwargs): + if isinstance(data, bytes): + _key = _BatchItemKey(bucket, org, precision) + self._subject.on_next(_BatchItem(key=_key, data=data)) + + elif isinstance(data, str): + self._write_batching(bucket, org, data.encode(_UTF_8_encoding), + precision, **kwargs) + + elif isinstance(data, Point): + self._write_batching(bucket, org, data.to_line_protocol(), data.write_precision, **kwargs) + + elif isinstance(data, dict): + self._write_batching(bucket, org, Point.from_dict(data, write_precision=precision, **kwargs), + precision, **kwargs) + + elif 'DataFrame' in type(data).__name__: + serializer = DataframeSerializer(data, self._point_settings, precision, self._write_options.batch_size, + **kwargs) + for chunk_idx in range(serializer.number_of_chunks): + self._write_batching(bucket, org, + serializer.serialize(chunk_idx), + precision, **kwargs) + elif hasattr(data, "_asdict"): + # noinspection PyProtectedMember + self._write_batching(bucket, org, data._asdict(), precision, **kwargs) + + elif _HAS_DATACLASS and dataclasses.is_dataclass(data): + self._write_batching(bucket, org, dataclasses.asdict(data), precision, **kwargs) + + elif isinstance(data, Iterable): + for item in data: + self._write_batching(bucket, org, item, precision, **kwargs) + + elif isinstance(data, Observable): + data.subscribe(lambda it: self._write_batching(bucket, org, it, precision, **kwargs)) + pass + + return None + + def _http(self, batch_item: _BatchItem): + + logger.debug("Write time series data into InfluxDB: %s", batch_item) + + if self._retry_callback: + def _retry_callback_delegate(exception): + return self._retry_callback(batch_item.to_key_tuple(), batch_item.data, exception) + else: + _retry_callback_delegate = None + + retry = self._write_options.to_retry_strategy(retry_callback=_retry_callback_delegate) + + self._post_write(False, batch_item.key.bucket, batch_item.key.org, batch_item.data, + batch_item.key.precision, urlopen_kw={'retries': retry}) + + logger.debug("Write request finished %s", batch_item) + + return _BatchResponse(data=batch_item) + + def _post_write(self, _async_req, bucket, org, body, precision, **kwargs): + + return self._write_service.post_write(org=org, bucket=bucket, body=body, precision=precision, + async_req=_async_req, + content_type="text/plain; charset=utf-8", + **kwargs) + + def _to_response(self, data: _BatchItem, delay: timedelta): + + return rx.of(data).pipe( + ops.subscribe_on(self._write_options.write_scheduler), + # use delay if its specified + ops.delay(duetime=delay, scheduler=self._write_options.write_scheduler), + # invoke http call + ops.map(lambda x: self._http(x)), + # catch exception to fail batch response + ops.catch(handler=lambda exception, source: rx.just(_BatchResponse(exception=exception, data=data))), + ) + + def _jitter_delay(self): + return timedelta(milliseconds=random() * self._write_options.jitter_interval) + + def _on_next(self, response: _BatchResponse): + if response.exception: + logger.error("The batch item wasn't processed successfully because: %s", response.exception) + if self._error_callback: + try: + self._error_callback(response.data.to_key_tuple(), response.data.data, response.exception) + except Exception as e: + """ + Unfortunately, because callbacks are user-provided generic code, exceptions can be entirely + arbitrary + + We trap it, log that it occurred and then proceed - there's not much more that we can + really do. + """ + logger.error("The configured error callback threw an exception: %s", e) + + else: + logger.debug("The batch item: %s was processed successfully.", response) + if self._success_callback: + try: + self._success_callback(response.data.to_key_tuple(), response.data.data) + except Exception as e: + logger.error("The configured success callback threw an exception: %s", e) + + @staticmethod + def _on_error(ex): + logger.error("unexpected error during batching: %s", ex) + + def _on_complete(self): + self._disposable.dispose() + logger.info("the batching processor was disposed") + + def __getstate__(self): + """Return a dict of attributes that you want to pickle.""" + state = self.__dict__.copy() + # Remove rx + del state['_subject'] + del state['_disposable'] + del state['_window_scheduler'] + del state['_write_service'] + return state + + def __setstate__(self, state): + """Set your object with the provided dict.""" + self.__dict__.update(state) + # Init Rx + self.__init__(self._influxdb_client, + self._write_options, + self._point_settings, + success_callback=self._success_callback, + error_callback=self._error_callback, + retry_callback=self._retry_callback) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/client/write_api_async.py b/myenv/lib/python3.12/site-packages/influxdb_client/client/write_api_async.py new file mode 100644 index 0000000..e9e2018 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/client/write_api_async.py @@ -0,0 +1,125 @@ +"""Collect and async write time series data to InfluxDB Cloud or InfluxDB OSS.""" +import logging +from collections import defaultdict +from typing import Union, Iterable, NamedTuple + +from influxdb_client import Point, WritePrecision +from influxdb_client.client._base import _BaseWriteApi, _HAS_DATACLASS +from influxdb_client.client.util.helpers import get_org_query_param +from influxdb_client.client.write.point import DEFAULT_WRITE_PRECISION +from influxdb_client.client.write_api import PointSettings + +logger = logging.getLogger('influxdb_client.client.write_api_async') + +if _HAS_DATACLASS: + from dataclasses import dataclass + + +class WriteApiAsync(_BaseWriteApi): + """ + Implementation for '/api/v2/write' endpoint. + + Example: + .. code-block:: python + + from influxdb_client_async import InfluxDBClientAsync + + + # Initialize async/await instance of Write API + async with InfluxDBClientAsync(url="http://localhost:8086", token="my-token", org="my-org") as client: + write_api = client.write_api() + """ + + def __init__(self, influxdb_client, point_settings: PointSettings = PointSettings()) -> None: + """ + Initialize defaults. + + :param influxdb_client: with default settings (organization) + :param point_settings: settings to store default tags. + """ + super().__init__(influxdb_client=influxdb_client, point_settings=point_settings) + + async def write(self, bucket: str, org: str = None, + record: Union[str, Iterable['str'], Point, Iterable['Point'], dict, Iterable['dict'], bytes, + Iterable['bytes'], NamedTuple, Iterable['NamedTuple'], 'dataclass', + Iterable['dataclass']] = None, + write_precision: WritePrecision = DEFAULT_WRITE_PRECISION, **kwargs) -> bool: + """ + Write time-series data into InfluxDB. + + :param str bucket: specifies the destination bucket for writes (required) + :param str, Organization org: specifies the destination organization for writes; + take the ID, Name or Organization. + If not specified the default value from ``InfluxDBClientAsync.org`` is used. + :param WritePrecision write_precision: specifies the precision for the unix timestamps within + the body line-protocol. The precision specified on a Point has precedes + and is use for write. + :param record: Point, Line Protocol, Dictionary, NamedTuple, Data Classes, Pandas DataFrame + :key data_frame_measurement_name: name of measurement for writing Pandas DataFrame - ``DataFrame`` + :key data_frame_tag_columns: list of DataFrame columns which are tags, + rest columns will be fields - ``DataFrame`` + :key data_frame_timestamp_column: name of DataFrame column which contains a timestamp. The column can be defined as a :class:`~str` value + formatted as `2018-10-26`, `2018-10-26 12:00`, `2018-10-26 12:00:00-05:00` + or other formats and types supported by `pandas.to_datetime `_ - ``DataFrame`` + :key data_frame_timestamp_timezone: name of the timezone which is used for timestamp column - ``DataFrame`` + :key record_measurement_key: key of record with specified measurement - + ``dictionary``, ``NamedTuple``, ``dataclass`` + :key record_measurement_name: static measurement name - ``dictionary``, ``NamedTuple``, ``dataclass`` + :key record_time_key: key of record with specified timestamp - ``dictionary``, ``NamedTuple``, ``dataclass`` + :key record_tag_keys: list of record keys to use as a tag - ``dictionary``, ``NamedTuple``, ``dataclass`` + :key record_field_keys: list of record keys to use as a field - ``dictionary``, ``NamedTuple``, ``dataclass`` + :return: ``True`` for successfully accepted data, otherwise raise an exception + + Example: + .. code-block:: python + + # Record as Line Protocol + await write_api.write("my-bucket", "my-org", "h2o_feet,location=us-west level=125i 1") + + # Record as Dictionary + dictionary = { + "measurement": "h2o_feet", + "tags": {"location": "us-west"}, + "fields": {"level": 125}, + "time": 1 + } + await write_api.write("my-bucket", "my-org", dictionary) + + # Record as Point + from influxdb_client import Point + point = Point("h2o_feet").tag("location", "us-west").field("level", 125).time(1) + await write_api.write("my-bucket", "my-org", point) + + DataFrame: + If the ``data_frame_timestamp_column`` is not specified the index of `Pandas DataFrame `_ + is used as a ``timestamp`` for written data. The index can be `PeriodIndex `_ + or its must be transformable to ``datetime`` by + `pandas.to_datetime `_. + + If you would like to transform a column to ``PeriodIndex``, you can use something like: + + .. code-block:: python + + import pandas as pd + + # DataFrame + data_frame = ... + # Set column as Index + data_frame.set_index('column_name', inplace=True) + # Transform index to PeriodIndex + data_frame.index = pd.to_datetime(data_frame.index, unit='s') + + """ # noqa: E501 + org = get_org_query_param(org=org, client=self._influxdb_client) + self._append_default_tags(record) + + payloads = defaultdict(list) + self._serialize(record, write_precision, payloads, precision_from_point=False, **kwargs) + + # joint list by \n + body = b'\n'.join(payloads[write_precision]) + response = await self._write_service.post_write_async(org=org, bucket=bucket, body=body, + precision=write_precision, async_req=False, + _return_http_data_only=False, + content_type="text/plain; charset=utf-8") + return response[1] in (201, 204) diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/configuration.py b/myenv/lib/python3.12/site-packages/influxdb_client/configuration.py new file mode 100644 index 0000000..96b1030 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/configuration.py @@ -0,0 +1,283 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import copy +import logging +import multiprocessing +import sys + +import urllib3 + + +class TypeWithDefault(type): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + Do not edit the class manually. + """ + + def __init__(cls, name, bases, dct): + """Initialize with defaults.""" + super(TypeWithDefault, cls).__init__(name, bases, dct) + cls._default = None + + def __call__(cls): + """Call self as a function.""" + if cls._default is None: + cls._default = type.__call__(cls) + return copy.copy(cls._default) + + def set_default(cls, default): + """Set dafaults.""" + cls._default = copy.copy(default) + + +class Configuration(object, metaclass=TypeWithDefault): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + Do not edit the class manually. + """ + + def __init__(self): + """Initialize configuration.""" + # Default Base url + self.host = "http://localhost/api/v2" + # Temp file folder for downloading files + self.temp_folder_path = None + + # Authentication Settings + # dict to store API key(s) + self.api_key = {} + # dict to store API prefix (e.g. Bearer) + self.api_key_prefix = {} + # Username for HTTP basic authentication + self.username = "" + # Password for HTTP basic authentication + self.password = "" + + # Logging Settings + self.loggers = {} + # Log format + self.logger_format = '%(asctime)s %(levelname)s %(message)s' + # Log stream handler + self.logger_stream_handler = None + # Log file handler + self.logger_file_handler = None + # Debug file location + self.logger_file = None + # Debug switch + self.debug = False + + # SSL/TLS verification + # Set this to false to skip verifying SSL certificate when calling API + # from https server. + self.verify_ssl = True + # Set this to customize the certificate file to verify the peer. + self.ssl_ca_cert = None + # client certificate file + self.cert_file = None + # client key file + self.cert_key_file = None + # client key file password + self.cert_key_password = None + # Set this to True/False to enable/disable SSL hostname verification. + self.assert_hostname = None + + # Set this to specify a custom ssl context to inject this context inside the urllib3 connection pool. + self.ssl_context = None + + # urllib3 connection pool's maximum number of connections saved + # per pool. urllib3 uses 1 connection as default value, but this is + # not the best value when you are making a lot of possibly parallel + # requests to the same host, which is often the case here. + # cpu_count * 5 is used as default value to increase performance. + self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 + # Timeout setting for a request. If one number provided, it will be total request timeout. + # It can also be a pair (tuple) of (connection, read) timeouts. + self.timeout = None + + # Set to True/False to enable basic authentication when using proxied InfluxDB 1.8.x with no auth-enabled + self.auth_basic = False + + # Proxy URL + self.proxy = None + # A dictionary containing headers that will be sent to the proxy + self.proxy_headers = None + # Safe chars for path_param + self.safe_chars_for_path_param = '' + + @property + def logger_file(self): + """Logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """Logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in self.loggers.items(): + logger.addHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status. + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status. + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for name, logger in self.loggers.items(): + logger.setLevel(logging.DEBUG) + if name == 'influxdb_client.client.http': + # makes sure to do not duplicate stdout handler + if not any(map(lambda h: isinstance(h, logging.StreamHandler) and h.stream == sys.stdout, + logger.handlers)): + logger.addHandler(logging.StreamHandler(sys.stdout)) + # we use 'influxdb_client.client.http' logger instead of this + # httplib.HTTPConnection.debuglevel = 1 + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in self.loggers.items(): + logger.setLevel(logging.WARNING) + # we use 'influxdb_client.client.http' logger instead of this + # httplib.HTTPConnection.debuglevel = 0 + + @property + def logger_format(self): + """Logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """Logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier): + """Get API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :return: The token for api key authentication. + """ + if (self.api_key.get(identifier) and + self.api_key_prefix.get(identifier)): + return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501 + elif self.api_key.get(identifier): + return self.api_key[identifier] + + def get_basic_auth_token(self): + """Get HTTP basic authentication header (string). + + :return: The token for basic HTTP authentication. + """ + return urllib3.util.make_headers( + basic_auth=self.username + ':' + self.password + ).get('authorization') + + def auth_settings(self): + """Get Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + return { + 'BasicAuthentication': + { + 'type': 'basic', + 'in': 'header', + 'key': 'Authorization', + 'value': self.get_basic_auth_token() + }, + 'TokenAuthentication': + { + 'type': 'api_key', + 'in': 'header', + 'key': 'Authorization', + 'value': self.get_api_key_with_prefix('Authorization') + }, + + } + + def to_debug_report(self): + """Get the essential information for debugging. + + :return: The report for debugging. + """ + from influxdb_client import VERSION + return "Python SDK Debug Report:\n"\ + "OS: {env}\n"\ + "Python Version: {pyversion}\n"\ + "Version of the API: 2.0.0\n"\ + "SDK Package Version: {client_version}".\ + format(env=sys.platform, pyversion=sys.version, client_version=VERSION) + + def update_request_header_params(self, path: str, params: dict): + """Update header params based on custom settings. + + :param path: Resource path + :param params: Header parameters dict to be updated. + """ + pass + + def update_request_body(self, path: str, body): + """Update http body based on custom settings. + + :param path: Resource path + :param body: Request body to be updated. + :return: Updated body + """ + return body diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__init__.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__init__.py new file mode 100644 index 0000000..e9a69d6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__init__.py @@ -0,0 +1,335 @@ +# coding: utf-8 + +# flake8: noqa +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +# import models into model package +from influxdb_client.domain.ast_response import ASTResponse +from influxdb_client.domain.add_resource_member_request_body import AddResourceMemberRequestBody +from influxdb_client.domain.analyze_query_response import AnalyzeQueryResponse +from influxdb_client.domain.analyze_query_response_errors import AnalyzeQueryResponseErrors +from influxdb_client.domain.array_expression import ArrayExpression +from influxdb_client.domain.authorization import Authorization +from influxdb_client.domain.authorization_post_request import AuthorizationPostRequest +from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest +from influxdb_client.domain.authorizations import Authorizations +from influxdb_client.domain.axes import Axes +from influxdb_client.domain.axis import Axis +from influxdb_client.domain.axis_scale import AxisScale +from influxdb_client.domain.bad_statement import BadStatement +from influxdb_client.domain.band_view_properties import BandViewProperties +from influxdb_client.domain.binary_expression import BinaryExpression +from influxdb_client.domain.block import Block +from influxdb_client.domain.boolean_literal import BooleanLiteral +from influxdb_client.domain.bucket import Bucket +from influxdb_client.domain.bucket_links import BucketLinks +from influxdb_client.domain.bucket_metadata_manifest import BucketMetadataManifest +from influxdb_client.domain.bucket_retention_rules import BucketRetentionRules +from influxdb_client.domain.bucket_shard_mapping import BucketShardMapping +from influxdb_client.domain.buckets import Buckets +from influxdb_client.domain.builder_aggregate_function_type import BuilderAggregateFunctionType +from influxdb_client.domain.builder_config import BuilderConfig +from influxdb_client.domain.builder_config_aggregate_window import BuilderConfigAggregateWindow +from influxdb_client.domain.builder_functions_type import BuilderFunctionsType +from influxdb_client.domain.builder_tags_type import BuilderTagsType +from influxdb_client.domain.builtin_statement import BuiltinStatement +from influxdb_client.domain.call_expression import CallExpression +from influxdb_client.domain.cell import Cell +from influxdb_client.domain.cell_links import CellLinks +from influxdb_client.domain.cell_update import CellUpdate +from influxdb_client.domain.cell_with_view_properties import CellWithViewProperties +from influxdb_client.domain.check import Check +from influxdb_client.domain.check_base import CheckBase +from influxdb_client.domain.check_base_links import CheckBaseLinks +from influxdb_client.domain.check_discriminator import CheckDiscriminator +from influxdb_client.domain.check_patch import CheckPatch +from influxdb_client.domain.check_status_level import CheckStatusLevel +from influxdb_client.domain.check_view_properties import CheckViewProperties +from influxdb_client.domain.checks import Checks +from influxdb_client.domain.column_data_type import ColumnDataType +from influxdb_client.domain.column_semantic_type import ColumnSemanticType +from influxdb_client.domain.conditional_expression import ConditionalExpression +from influxdb_client.domain.config import Config +from influxdb_client.domain.constant_variable_properties import ConstantVariableProperties +from influxdb_client.domain.create_cell import CreateCell +from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest +from influxdb_client.domain.custom_check import CustomCheck +from influxdb_client.domain.dbrp import DBRP +from influxdb_client.domain.dbrp_create import DBRPCreate +from influxdb_client.domain.dbrp_get import DBRPGet +from influxdb_client.domain.dbrp_update import DBRPUpdate +from influxdb_client.domain.dbr_ps import DBRPs +from influxdb_client.domain.dashboard import Dashboard +from influxdb_client.domain.dashboard_color import DashboardColor +from influxdb_client.domain.dashboard_query import DashboardQuery +from influxdb_client.domain.dashboard_with_view_properties import DashboardWithViewProperties +from influxdb_client.domain.dashboards import Dashboards +from influxdb_client.domain.date_time_literal import DateTimeLiteral +from influxdb_client.domain.deadman_check import DeadmanCheck +from influxdb_client.domain.decimal_places import DecimalPlaces +from influxdb_client.domain.delete_predicate_request import DeletePredicateRequest +from influxdb_client.domain.dialect import Dialect +from influxdb_client.domain.dict_expression import DictExpression +from influxdb_client.domain.dict_item import DictItem +from influxdb_client.domain.duration import Duration +from influxdb_client.domain.duration_literal import DurationLiteral +from influxdb_client.domain.error import Error +from influxdb_client.domain.expression import Expression +from influxdb_client.domain.expression_statement import ExpressionStatement +from influxdb_client.domain.field import Field +from influxdb_client.domain.file import File +from influxdb_client.domain.float_literal import FloatLiteral +from influxdb_client.domain.flux_response import FluxResponse +from influxdb_client.domain.flux_suggestion import FluxSuggestion +from influxdb_client.domain.flux_suggestions import FluxSuggestions +from influxdb_client.domain.function_expression import FunctionExpression +from influxdb_client.domain.gauge_view_properties import GaugeViewProperties +from influxdb_client.domain.greater_threshold import GreaterThreshold +from influxdb_client.domain.http_notification_endpoint import HTTPNotificationEndpoint +from influxdb_client.domain.http_notification_rule import HTTPNotificationRule +from influxdb_client.domain.http_notification_rule_base import HTTPNotificationRuleBase +from influxdb_client.domain.health_check import HealthCheck +from influxdb_client.domain.heatmap_view_properties import HeatmapViewProperties +from influxdb_client.domain.histogram_view_properties import HistogramViewProperties +from influxdb_client.domain.identifier import Identifier +from influxdb_client.domain.import_declaration import ImportDeclaration +from influxdb_client.domain.index_expression import IndexExpression +from influxdb_client.domain.integer_literal import IntegerLiteral +from influxdb_client.domain.is_onboarding import IsOnboarding +from influxdb_client.domain.label import Label +from influxdb_client.domain.label_create_request import LabelCreateRequest +from influxdb_client.domain.label_mapping import LabelMapping +from influxdb_client.domain.label_response import LabelResponse +from influxdb_client.domain.label_update import LabelUpdate +from influxdb_client.domain.labels_response import LabelsResponse +from influxdb_client.domain.language_request import LanguageRequest +from influxdb_client.domain.legacy_authorization_post_request import LegacyAuthorizationPostRequest +from influxdb_client.domain.lesser_threshold import LesserThreshold +from influxdb_client.domain.line_plus_single_stat_properties import LinePlusSingleStatProperties +from influxdb_client.domain.line_protocol_error import LineProtocolError +from influxdb_client.domain.line_protocol_length_error import LineProtocolLengthError +from influxdb_client.domain.links import Links +from influxdb_client.domain.list_stacks_response import ListStacksResponse +from influxdb_client.domain.log_event import LogEvent +from influxdb_client.domain.logical_expression import LogicalExpression +from influxdb_client.domain.logs import Logs +from influxdb_client.domain.map_variable_properties import MapVariableProperties +from influxdb_client.domain.markdown_view_properties import MarkdownViewProperties +from influxdb_client.domain.measurement_schema import MeasurementSchema +from influxdb_client.domain.measurement_schema_column import MeasurementSchemaColumn +from influxdb_client.domain.measurement_schema_create_request import MeasurementSchemaCreateRequest +from influxdb_client.domain.measurement_schema_list import MeasurementSchemaList +from influxdb_client.domain.measurement_schema_update_request import MeasurementSchemaUpdateRequest +from influxdb_client.domain.member_assignment import MemberAssignment +from influxdb_client.domain.member_expression import MemberExpression +from influxdb_client.domain.metadata_backup import MetadataBackup +from influxdb_client.domain.model_property import ModelProperty +from influxdb_client.domain.mosaic_view_properties import MosaicViewProperties +from influxdb_client.domain.node import Node +from influxdb_client.domain.notification_endpoint import NotificationEndpoint +from influxdb_client.domain.notification_endpoint_base import NotificationEndpointBase +from influxdb_client.domain.notification_endpoint_base_links import NotificationEndpointBaseLinks +from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator +from influxdb_client.domain.notification_endpoint_type import NotificationEndpointType +from influxdb_client.domain.notification_endpoint_update import NotificationEndpointUpdate +from influxdb_client.domain.notification_endpoints import NotificationEndpoints +from influxdb_client.domain.notification_rule import NotificationRule +from influxdb_client.domain.notification_rule_base import NotificationRuleBase +from influxdb_client.domain.notification_rule_base_links import NotificationRuleBaseLinks +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator +from influxdb_client.domain.notification_rule_update import NotificationRuleUpdate +from influxdb_client.domain.notification_rules import NotificationRules +from influxdb_client.domain.object_expression import ObjectExpression +from influxdb_client.domain.onboarding_request import OnboardingRequest +from influxdb_client.domain.onboarding_response import OnboardingResponse +from influxdb_client.domain.option_statement import OptionStatement +from influxdb_client.domain.organization import Organization +from influxdb_client.domain.organization_links import OrganizationLinks +from influxdb_client.domain.organizations import Organizations +from influxdb_client.domain.package import Package +from influxdb_client.domain.package_clause import PackageClause +from influxdb_client.domain.pager_duty_notification_endpoint import PagerDutyNotificationEndpoint +from influxdb_client.domain.pager_duty_notification_rule import PagerDutyNotificationRule +from influxdb_client.domain.pager_duty_notification_rule_base import PagerDutyNotificationRuleBase +from influxdb_client.domain.paren_expression import ParenExpression +from influxdb_client.domain.password_reset_body import PasswordResetBody +from influxdb_client.domain.patch_bucket_request import PatchBucketRequest +from influxdb_client.domain.patch_dashboard_request import PatchDashboardRequest +from influxdb_client.domain.patch_organization_request import PatchOrganizationRequest +from influxdb_client.domain.patch_retention_rule import PatchRetentionRule +from influxdb_client.domain.patch_stack_request import PatchStackRequest +from influxdb_client.domain.patch_stack_request_additional_resources import PatchStackRequestAdditionalResources +from influxdb_client.domain.permission import Permission +from influxdb_client.domain.permission_resource import PermissionResource +from influxdb_client.domain.pipe_expression import PipeExpression +from influxdb_client.domain.pipe_literal import PipeLiteral +from influxdb_client.domain.post_bucket_request import PostBucketRequest +from influxdb_client.domain.post_check import PostCheck +from influxdb_client.domain.post_notification_endpoint import PostNotificationEndpoint +from influxdb_client.domain.post_notification_rule import PostNotificationRule +from influxdb_client.domain.post_organization_request import PostOrganizationRequest +from influxdb_client.domain.post_restore_kv_response import PostRestoreKVResponse +from influxdb_client.domain.post_stack_request import PostStackRequest +from influxdb_client.domain.property_key import PropertyKey +from influxdb_client.domain.query import Query +from influxdb_client.domain.query_edit_mode import QueryEditMode +from influxdb_client.domain.query_variable_properties import QueryVariableProperties +from influxdb_client.domain.query_variable_properties_values import QueryVariablePropertiesValues +from influxdb_client.domain.range_threshold import RangeThreshold +from influxdb_client.domain.ready import Ready +from influxdb_client.domain.regexp_literal import RegexpLiteral +from influxdb_client.domain.remote_connection import RemoteConnection +from influxdb_client.domain.remote_connection_creation_request import RemoteConnectionCreationRequest +from influxdb_client.domain.remote_connection_update_request import RemoteConnectionUpdateRequest +from influxdb_client.domain.remote_connections import RemoteConnections +from influxdb_client.domain.renamable_field import RenamableField +from influxdb_client.domain.replication import Replication +from influxdb_client.domain.replication_creation_request import ReplicationCreationRequest +from influxdb_client.domain.replication_update_request import ReplicationUpdateRequest +from influxdb_client.domain.replications import Replications +from influxdb_client.domain.resource_member import ResourceMember +from influxdb_client.domain.resource_members import ResourceMembers +from influxdb_client.domain.resource_members_links import ResourceMembersLinks +from influxdb_client.domain.resource_owner import ResourceOwner +from influxdb_client.domain.resource_owners import ResourceOwners +from influxdb_client.domain.restored_bucket_mappings import RestoredBucketMappings +from influxdb_client.domain.retention_policy_manifest import RetentionPolicyManifest +from influxdb_client.domain.return_statement import ReturnStatement +from influxdb_client.domain.routes import Routes +from influxdb_client.domain.routes_external import RoutesExternal +from influxdb_client.domain.routes_query import RoutesQuery +from influxdb_client.domain.routes_system import RoutesSystem +from influxdb_client.domain.rule_status_level import RuleStatusLevel +from influxdb_client.domain.run import Run +from influxdb_client.domain.run_links import RunLinks +from influxdb_client.domain.run_manually import RunManually +from influxdb_client.domain.runs import Runs +from influxdb_client.domain.smtp_notification_rule import SMTPNotificationRule +from influxdb_client.domain.smtp_notification_rule_base import SMTPNotificationRuleBase +from influxdb_client.domain.scatter_view_properties import ScatterViewProperties +from influxdb_client.domain.schema_type import SchemaType +from influxdb_client.domain.scraper_target_request import ScraperTargetRequest +from influxdb_client.domain.scraper_target_response import ScraperTargetResponse +from influxdb_client.domain.scraper_target_responses import ScraperTargetResponses +from influxdb_client.domain.script import Script +from influxdb_client.domain.script_create_request import ScriptCreateRequest +from influxdb_client.domain.script_invocation_params import ScriptInvocationParams +from influxdb_client.domain.script_language import ScriptLanguage +from influxdb_client.domain.script_update_request import ScriptUpdateRequest +from influxdb_client.domain.scripts import Scripts +from influxdb_client.domain.secret_keys import SecretKeys +from influxdb_client.domain.secret_keys_response import SecretKeysResponse +from influxdb_client.domain.shard_group_manifest import ShardGroupManifest +from influxdb_client.domain.shard_manifest import ShardManifest +from influxdb_client.domain.shard_owner import ShardOwner +from influxdb_client.domain.simple_table_view_properties import SimpleTableViewProperties +from influxdb_client.domain.single_stat_view_properties import SingleStatViewProperties +from influxdb_client.domain.slack_notification_endpoint import SlackNotificationEndpoint +from influxdb_client.domain.slack_notification_rule import SlackNotificationRule +from influxdb_client.domain.slack_notification_rule_base import SlackNotificationRuleBase +from influxdb_client.domain.source import Source +from influxdb_client.domain.source_links import SourceLinks +from influxdb_client.domain.sources import Sources +from influxdb_client.domain.stack import Stack +from influxdb_client.domain.stack_associations import StackAssociations +from influxdb_client.domain.stack_events import StackEvents +from influxdb_client.domain.stack_links import StackLinks +from influxdb_client.domain.stack_resources import StackResources +from influxdb_client.domain.statement import Statement +from influxdb_client.domain.static_legend import StaticLegend +from influxdb_client.domain.status_rule import StatusRule +from influxdb_client.domain.string_literal import StringLiteral +from influxdb_client.domain.subscription_manifest import SubscriptionManifest +from influxdb_client.domain.table_view_properties import TableViewProperties +from influxdb_client.domain.table_view_properties_table_options import TableViewPropertiesTableOptions +from influxdb_client.domain.tag_rule import TagRule +from influxdb_client.domain.task import Task +from influxdb_client.domain.task_create_request import TaskCreateRequest +from influxdb_client.domain.task_links import TaskLinks +from influxdb_client.domain.task_status_type import TaskStatusType +from influxdb_client.domain.task_update_request import TaskUpdateRequest +from influxdb_client.domain.tasks import Tasks +from influxdb_client.domain.telegraf import Telegraf +from influxdb_client.domain.telegraf_plugin import TelegrafPlugin +from influxdb_client.domain.telegraf_plugin_request import TelegrafPluginRequest +from influxdb_client.domain.telegraf_plugin_request_plugins import TelegrafPluginRequestPlugins +from influxdb_client.domain.telegraf_plugins import TelegrafPlugins +from influxdb_client.domain.telegraf_request import TelegrafRequest +from influxdb_client.domain.telegraf_request_metadata import TelegrafRequestMetadata +from influxdb_client.domain.telegrafs import Telegrafs +from influxdb_client.domain.telegram_notification_endpoint import TelegramNotificationEndpoint +from influxdb_client.domain.telegram_notification_rule import TelegramNotificationRule +from influxdb_client.domain.telegram_notification_rule_base import TelegramNotificationRuleBase +from influxdb_client.domain.template_apply import TemplateApply +from influxdb_client.domain.template_apply_remotes import TemplateApplyRemotes +from influxdb_client.domain.template_apply_template import TemplateApplyTemplate +from influxdb_client.domain.template_chart import TemplateChart +from influxdb_client.domain.template_export_by_id import TemplateExportByID +from influxdb_client.domain.template_export_by_id_org_ids import TemplateExportByIDOrgIDs +from influxdb_client.domain.template_export_by_id_resource_filters import TemplateExportByIDResourceFilters +from influxdb_client.domain.template_export_by_id_resources import TemplateExportByIDResources +from influxdb_client.domain.template_export_by_name import TemplateExportByName +from influxdb_client.domain.template_export_by_name_resources import TemplateExportByNameResources +from influxdb_client.domain.template_kind import TemplateKind +from influxdb_client.domain.template_summary import TemplateSummary +from influxdb_client.domain.template_summary_diff import TemplateSummaryDiff +from influxdb_client.domain.template_summary_diff_buckets import TemplateSummaryDiffBuckets +from influxdb_client.domain.template_summary_diff_buckets_new_old import TemplateSummaryDiffBucketsNewOld +from influxdb_client.domain.template_summary_diff_checks import TemplateSummaryDiffChecks +from influxdb_client.domain.template_summary_diff_dashboards import TemplateSummaryDiffDashboards +from influxdb_client.domain.template_summary_diff_dashboards_new_old import TemplateSummaryDiffDashboardsNewOld +from influxdb_client.domain.template_summary_diff_label_mappings import TemplateSummaryDiffLabelMappings +from influxdb_client.domain.template_summary_diff_labels import TemplateSummaryDiffLabels +from influxdb_client.domain.template_summary_diff_labels_new_old import TemplateSummaryDiffLabelsNewOld +from influxdb_client.domain.template_summary_diff_notification_endpoints import TemplateSummaryDiffNotificationEndpoints +from influxdb_client.domain.template_summary_diff_notification_rules import TemplateSummaryDiffNotificationRules +from influxdb_client.domain.template_summary_diff_notification_rules_new_old import TemplateSummaryDiffNotificationRulesNewOld +from influxdb_client.domain.template_summary_diff_tasks import TemplateSummaryDiffTasks +from influxdb_client.domain.template_summary_diff_tasks_new_old import TemplateSummaryDiffTasksNewOld +from influxdb_client.domain.template_summary_diff_telegraf_configs import TemplateSummaryDiffTelegrafConfigs +from influxdb_client.domain.template_summary_diff_variables import TemplateSummaryDiffVariables +from influxdb_client.domain.template_summary_diff_variables_new_old import TemplateSummaryDiffVariablesNewOld +from influxdb_client.domain.template_summary_errors import TemplateSummaryErrors +from influxdb_client.domain.template_summary_label import TemplateSummaryLabel +from influxdb_client.domain.template_summary_label_properties import TemplateSummaryLabelProperties +from influxdb_client.domain.template_summary_summary import TemplateSummarySummary +from influxdb_client.domain.template_summary_summary_buckets import TemplateSummarySummaryBuckets +from influxdb_client.domain.template_summary_summary_dashboards import TemplateSummarySummaryDashboards +from influxdb_client.domain.template_summary_summary_label_mappings import TemplateSummarySummaryLabelMappings +from influxdb_client.domain.template_summary_summary_notification_rules import TemplateSummarySummaryNotificationRules +from influxdb_client.domain.template_summary_summary_status_rules import TemplateSummarySummaryStatusRules +from influxdb_client.domain.template_summary_summary_tag_rules import TemplateSummarySummaryTagRules +from influxdb_client.domain.template_summary_summary_tasks import TemplateSummarySummaryTasks +from influxdb_client.domain.template_summary_summary_variables import TemplateSummarySummaryVariables +from influxdb_client.domain.test_statement import TestStatement +from influxdb_client.domain.threshold import Threshold +from influxdb_client.domain.threshold_base import ThresholdBase +from influxdb_client.domain.threshold_check import ThresholdCheck +from influxdb_client.domain.unary_expression import UnaryExpression +from influxdb_client.domain.unsigned_integer_literal import UnsignedIntegerLiteral +from influxdb_client.domain.user import User +from influxdb_client.domain.user_response import UserResponse +from influxdb_client.domain.user_response_links import UserResponseLinks +from influxdb_client.domain.users import Users +from influxdb_client.domain.variable import Variable +from influxdb_client.domain.variable_assignment import VariableAssignment +from influxdb_client.domain.variable_links import VariableLinks +from influxdb_client.domain.variable_properties import VariableProperties +from influxdb_client.domain.variables import Variables +from influxdb_client.domain.view import View +from influxdb_client.domain.view_links import ViewLinks +from influxdb_client.domain.view_properties import ViewProperties +from influxdb_client.domain.views import Views +from influxdb_client.domain.write_precision import WritePrecision +from influxdb_client.domain.xy_geom import XYGeom +from influxdb_client.domain.xy_view_properties import XYViewProperties diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d5ca33b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/add_resource_member_request_body.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/add_resource_member_request_body.cpython-312.pyc new file mode 100644 index 0000000..0a70328 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/add_resource_member_request_body.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/analyze_query_response.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/analyze_query_response.cpython-312.pyc new file mode 100644 index 0000000..e3d21a1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/analyze_query_response.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/analyze_query_response_errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/analyze_query_response_errors.cpython-312.pyc new file mode 100644 index 0000000..e1084ce Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/analyze_query_response_errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/array_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/array_expression.cpython-312.pyc new file mode 100644 index 0000000..612fd30 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/array_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/ast_response.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/ast_response.cpython-312.pyc new file mode 100644 index 0000000..9fa1086 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/ast_response.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorization.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorization.cpython-312.pyc new file mode 100644 index 0000000..72a4334 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorization.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorization_post_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorization_post_request.cpython-312.pyc new file mode 100644 index 0000000..f653250 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorization_post_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorization_update_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorization_update_request.cpython-312.pyc new file mode 100644 index 0000000..e5edabe Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorization_update_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorizations.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorizations.cpython-312.pyc new file mode 100644 index 0000000..42dc9c8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/authorizations.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/axes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/axes.cpython-312.pyc new file mode 100644 index 0000000..6a181cc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/axes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/axis.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/axis.cpython-312.pyc new file mode 100644 index 0000000..1dd3908 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/axis.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/axis_scale.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/axis_scale.cpython-312.pyc new file mode 100644 index 0000000..714ecaa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/axis_scale.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bad_statement.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bad_statement.cpython-312.pyc new file mode 100644 index 0000000..0cd295e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bad_statement.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/band_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/band_view_properties.cpython-312.pyc new file mode 100644 index 0000000..7276a2f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/band_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/binary_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/binary_expression.cpython-312.pyc new file mode 100644 index 0000000..0faba01 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/binary_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/block.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/block.cpython-312.pyc new file mode 100644 index 0000000..8a275ef Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/block.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/boolean_literal.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/boolean_literal.cpython-312.pyc new file mode 100644 index 0000000..a92e4d3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/boolean_literal.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket.cpython-312.pyc new file mode 100644 index 0000000..3613ba2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_links.cpython-312.pyc new file mode 100644 index 0000000..cd7f06b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_metadata_manifest.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_metadata_manifest.cpython-312.pyc new file mode 100644 index 0000000..60394a5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_metadata_manifest.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_retention_rules.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_retention_rules.cpython-312.pyc new file mode 100644 index 0000000..1313042 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_retention_rules.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_shard_mapping.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_shard_mapping.cpython-312.pyc new file mode 100644 index 0000000..00fc7bb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/bucket_shard_mapping.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/buckets.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/buckets.cpython-312.pyc new file mode 100644 index 0000000..939bbeb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/buckets.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_aggregate_function_type.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_aggregate_function_type.cpython-312.pyc new file mode 100644 index 0000000..62b28cd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_aggregate_function_type.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_config.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_config.cpython-312.pyc new file mode 100644 index 0000000..92a2afa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_config.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_config_aggregate_window.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_config_aggregate_window.cpython-312.pyc new file mode 100644 index 0000000..1b52bfe Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_config_aggregate_window.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_functions_type.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_functions_type.cpython-312.pyc new file mode 100644 index 0000000..6829e56 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_functions_type.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_tags_type.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_tags_type.cpython-312.pyc new file mode 100644 index 0000000..fc214c4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builder_tags_type.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builtin_statement.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builtin_statement.cpython-312.pyc new file mode 100644 index 0000000..b91e892 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/builtin_statement.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/call_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/call_expression.cpython-312.pyc new file mode 100644 index 0000000..87f8101 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/call_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell.cpython-312.pyc new file mode 100644 index 0000000..281fc84 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell_links.cpython-312.pyc new file mode 100644 index 0000000..5fb78c3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell_update.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell_update.cpython-312.pyc new file mode 100644 index 0000000..438a209 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell_update.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell_with_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell_with_view_properties.cpython-312.pyc new file mode 100644 index 0000000..6edab84 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/cell_with_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check.cpython-312.pyc new file mode 100644 index 0000000..ba2f678 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_base.cpython-312.pyc new file mode 100644 index 0000000..e1debca Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_base_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_base_links.cpython-312.pyc new file mode 100644 index 0000000..10d9cce Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_base_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_discriminator.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_discriminator.cpython-312.pyc new file mode 100644 index 0000000..3f47488 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_discriminator.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_patch.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_patch.cpython-312.pyc new file mode 100644 index 0000000..3fe7508 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_patch.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_status_level.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_status_level.cpython-312.pyc new file mode 100644 index 0000000..b099308 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_status_level.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_view_properties.cpython-312.pyc new file mode 100644 index 0000000..6d0e1c6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/check_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/checks.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/checks.cpython-312.pyc new file mode 100644 index 0000000..c2744f7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/checks.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/column_data_type.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/column_data_type.cpython-312.pyc new file mode 100644 index 0000000..7e891d5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/column_data_type.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/column_semantic_type.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/column_semantic_type.cpython-312.pyc new file mode 100644 index 0000000..6d30bd4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/column_semantic_type.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/conditional_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/conditional_expression.cpython-312.pyc new file mode 100644 index 0000000..52e3452 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/conditional_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/config.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000..1d83500 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/config.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/constant_variable_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/constant_variable_properties.cpython-312.pyc new file mode 100644 index 0000000..9b27d72 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/constant_variable_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/create_cell.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/create_cell.cpython-312.pyc new file mode 100644 index 0000000..1ae4917 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/create_cell.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/create_dashboard_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/create_dashboard_request.cpython-312.pyc new file mode 100644 index 0000000..4cc3407 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/create_dashboard_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/custom_check.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/custom_check.cpython-312.pyc new file mode 100644 index 0000000..536b62b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/custom_check.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard.cpython-312.pyc new file mode 100644 index 0000000..a8a6ff1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard_color.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard_color.cpython-312.pyc new file mode 100644 index 0000000..94fa4bf Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard_color.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard_query.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard_query.cpython-312.pyc new file mode 100644 index 0000000..52e5136 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard_query.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard_with_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard_with_view_properties.cpython-312.pyc new file mode 100644 index 0000000..23232f8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboard_with_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboards.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboards.cpython-312.pyc new file mode 100644 index 0000000..c62062b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dashboards.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/date_time_literal.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/date_time_literal.cpython-312.pyc new file mode 100644 index 0000000..ea27c2c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/date_time_literal.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbr_ps.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbr_ps.cpython-312.pyc new file mode 100644 index 0000000..70d37ff Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbr_ps.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp.cpython-312.pyc new file mode 100644 index 0000000..829b7c1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp_create.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp_create.cpython-312.pyc new file mode 100644 index 0000000..f8455dd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp_create.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp_get.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp_get.cpython-312.pyc new file mode 100644 index 0000000..f73a99f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp_get.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp_update.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp_update.cpython-312.pyc new file mode 100644 index 0000000..270bd00 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dbrp_update.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/deadman_check.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/deadman_check.cpython-312.pyc new file mode 100644 index 0000000..af045a5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/deadman_check.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/decimal_places.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/decimal_places.cpython-312.pyc new file mode 100644 index 0000000..fd421a9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/decimal_places.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/delete_predicate_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/delete_predicate_request.cpython-312.pyc new file mode 100644 index 0000000..05ec44c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/delete_predicate_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dialect.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dialect.cpython-312.pyc new file mode 100644 index 0000000..d33ed13 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dialect.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dict_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dict_expression.cpython-312.pyc new file mode 100644 index 0000000..abae204 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dict_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dict_item.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dict_item.cpython-312.pyc new file mode 100644 index 0000000..ab94489 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/dict_item.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/duration.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/duration.cpython-312.pyc new file mode 100644 index 0000000..0d04cbd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/duration.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/duration_literal.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/duration_literal.cpython-312.pyc new file mode 100644 index 0000000..f7852b9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/duration_literal.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/error.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/error.cpython-312.pyc new file mode 100644 index 0000000..9357734 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/error.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/expression.cpython-312.pyc new file mode 100644 index 0000000..07469e6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/expression_statement.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/expression_statement.cpython-312.pyc new file mode 100644 index 0000000..e89cdb1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/expression_statement.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/field.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/field.cpython-312.pyc new file mode 100644 index 0000000..53c4363 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/field.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/file.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/file.cpython-312.pyc new file mode 100644 index 0000000..c754735 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/file.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/float_literal.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/float_literal.cpython-312.pyc new file mode 100644 index 0000000..341edb0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/float_literal.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/flux_response.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/flux_response.cpython-312.pyc new file mode 100644 index 0000000..d9da68c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/flux_response.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/flux_suggestion.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/flux_suggestion.cpython-312.pyc new file mode 100644 index 0000000..f88b6cd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/flux_suggestion.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/flux_suggestions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/flux_suggestions.cpython-312.pyc new file mode 100644 index 0000000..29e1b59 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/flux_suggestions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/function_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/function_expression.cpython-312.pyc new file mode 100644 index 0000000..9988cd3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/function_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/gauge_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/gauge_view_properties.cpython-312.pyc new file mode 100644 index 0000000..c13690a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/gauge_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/greater_threshold.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/greater_threshold.cpython-312.pyc new file mode 100644 index 0000000..88b8319 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/greater_threshold.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/health_check.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/health_check.cpython-312.pyc new file mode 100644 index 0000000..ab67183 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/health_check.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/heatmap_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/heatmap_view_properties.cpython-312.pyc new file mode 100644 index 0000000..52e8ae2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/heatmap_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/histogram_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/histogram_view_properties.cpython-312.pyc new file mode 100644 index 0000000..a3dfe88 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/histogram_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/http_notification_endpoint.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/http_notification_endpoint.cpython-312.pyc new file mode 100644 index 0000000..291ea72 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/http_notification_endpoint.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/http_notification_rule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/http_notification_rule.cpython-312.pyc new file mode 100644 index 0000000..488e0c9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/http_notification_rule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/http_notification_rule_base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/http_notification_rule_base.cpython-312.pyc new file mode 100644 index 0000000..adfc5e3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/http_notification_rule_base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/identifier.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/identifier.cpython-312.pyc new file mode 100644 index 0000000..b042da8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/identifier.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/import_declaration.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/import_declaration.cpython-312.pyc new file mode 100644 index 0000000..6aa531c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/import_declaration.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/index_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/index_expression.cpython-312.pyc new file mode 100644 index 0000000..b87e356 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/index_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/integer_literal.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/integer_literal.cpython-312.pyc new file mode 100644 index 0000000..7aea820 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/integer_literal.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/is_onboarding.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/is_onboarding.cpython-312.pyc new file mode 100644 index 0000000..b54396f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/is_onboarding.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label.cpython-312.pyc new file mode 100644 index 0000000..65e41ab Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_create_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_create_request.cpython-312.pyc new file mode 100644 index 0000000..b82a0f7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_create_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_mapping.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_mapping.cpython-312.pyc new file mode 100644 index 0000000..7d3c605 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_mapping.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_response.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_response.cpython-312.pyc new file mode 100644 index 0000000..41fc75e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_response.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_update.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_update.cpython-312.pyc new file mode 100644 index 0000000..dc7376f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/label_update.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/labels_response.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/labels_response.cpython-312.pyc new file mode 100644 index 0000000..8e6198a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/labels_response.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/language_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/language_request.cpython-312.pyc new file mode 100644 index 0000000..8a2958d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/language_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/legacy_authorization_post_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/legacy_authorization_post_request.cpython-312.pyc new file mode 100644 index 0000000..62df8c8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/legacy_authorization_post_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/lesser_threshold.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/lesser_threshold.cpython-312.pyc new file mode 100644 index 0000000..7e908f6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/lesser_threshold.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/line_plus_single_stat_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/line_plus_single_stat_properties.cpython-312.pyc new file mode 100644 index 0000000..f39d9d8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/line_plus_single_stat_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/line_protocol_error.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/line_protocol_error.cpython-312.pyc new file mode 100644 index 0000000..d292777 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/line_protocol_error.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/line_protocol_length_error.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/line_protocol_length_error.cpython-312.pyc new file mode 100644 index 0000000..8a135f2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/line_protocol_length_error.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/links.cpython-312.pyc new file mode 100644 index 0000000..a478931 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/list_stacks_response.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/list_stacks_response.cpython-312.pyc new file mode 100644 index 0000000..8d0a002 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/list_stacks_response.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/log_event.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/log_event.cpython-312.pyc new file mode 100644 index 0000000..79e6393 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/log_event.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/logical_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/logical_expression.cpython-312.pyc new file mode 100644 index 0000000..b5d2449 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/logical_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/logs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/logs.cpython-312.pyc new file mode 100644 index 0000000..0917156 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/logs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/map_variable_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/map_variable_properties.cpython-312.pyc new file mode 100644 index 0000000..7879461 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/map_variable_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/markdown_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/markdown_view_properties.cpython-312.pyc new file mode 100644 index 0000000..d17c2b9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/markdown_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema.cpython-312.pyc new file mode 100644 index 0000000..2dc2f3c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_column.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_column.cpython-312.pyc new file mode 100644 index 0000000..f1ec923 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_column.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_create_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_create_request.cpython-312.pyc new file mode 100644 index 0000000..8c538eb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_create_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_list.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_list.cpython-312.pyc new file mode 100644 index 0000000..6bf20aa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_list.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_update_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_update_request.cpython-312.pyc new file mode 100644 index 0000000..c923dcc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/measurement_schema_update_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/member_assignment.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/member_assignment.cpython-312.pyc new file mode 100644 index 0000000..fd7f347 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/member_assignment.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/member_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/member_expression.cpython-312.pyc new file mode 100644 index 0000000..15cef10 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/member_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/metadata_backup.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/metadata_backup.cpython-312.pyc new file mode 100644 index 0000000..f402df8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/metadata_backup.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/model_property.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/model_property.cpython-312.pyc new file mode 100644 index 0000000..b94e447 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/model_property.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/mosaic_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/mosaic_view_properties.cpython-312.pyc new file mode 100644 index 0000000..941552e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/mosaic_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/node.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/node.cpython-312.pyc new file mode 100644 index 0000000..3dd6ae0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/node.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint.cpython-312.pyc new file mode 100644 index 0000000..5463ae5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_base.cpython-312.pyc new file mode 100644 index 0000000..481bbca Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_base_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_base_links.cpython-312.pyc new file mode 100644 index 0000000..00115bd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_base_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_discriminator.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_discriminator.cpython-312.pyc new file mode 100644 index 0000000..4895226 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_discriminator.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_type.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_type.cpython-312.pyc new file mode 100644 index 0000000..ae35305 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_type.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_update.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_update.cpython-312.pyc new file mode 100644 index 0000000..2fe13cd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoint_update.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoints.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoints.cpython-312.pyc new file mode 100644 index 0000000..a625b65 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_endpoints.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule.cpython-312.pyc new file mode 100644 index 0000000..1fcf148 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_base.cpython-312.pyc new file mode 100644 index 0000000..a385743 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_base_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_base_links.cpython-312.pyc new file mode 100644 index 0000000..e450972 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_base_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_discriminator.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_discriminator.cpython-312.pyc new file mode 100644 index 0000000..0a50219 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_discriminator.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_update.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_update.cpython-312.pyc new file mode 100644 index 0000000..ef945db Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rule_update.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rules.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rules.cpython-312.pyc new file mode 100644 index 0000000..985a2fc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/notification_rules.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/object_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/object_expression.cpython-312.pyc new file mode 100644 index 0000000..042e417 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/object_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/onboarding_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/onboarding_request.cpython-312.pyc new file mode 100644 index 0000000..bfbc90a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/onboarding_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/onboarding_response.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/onboarding_response.cpython-312.pyc new file mode 100644 index 0000000..b46aa00 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/onboarding_response.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/option_statement.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/option_statement.cpython-312.pyc new file mode 100644 index 0000000..d2ed138 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/option_statement.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/organization.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/organization.cpython-312.pyc new file mode 100644 index 0000000..1532cb5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/organization.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/organization_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/organization_links.cpython-312.pyc new file mode 100644 index 0000000..6444eef Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/organization_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/organizations.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/organizations.cpython-312.pyc new file mode 100644 index 0000000..1429357 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/organizations.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/package.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/package.cpython-312.pyc new file mode 100644 index 0000000..f2a90ae Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/package.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/package_clause.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/package_clause.cpython-312.pyc new file mode 100644 index 0000000..9702ed5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/package_clause.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pager_duty_notification_endpoint.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pager_duty_notification_endpoint.cpython-312.pyc new file mode 100644 index 0000000..f3130b9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pager_duty_notification_endpoint.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pager_duty_notification_rule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pager_duty_notification_rule.cpython-312.pyc new file mode 100644 index 0000000..399f514 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pager_duty_notification_rule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pager_duty_notification_rule_base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pager_duty_notification_rule_base.cpython-312.pyc new file mode 100644 index 0000000..39212c1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pager_duty_notification_rule_base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/paren_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/paren_expression.cpython-312.pyc new file mode 100644 index 0000000..0b176b5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/paren_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/password_reset_body.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/password_reset_body.cpython-312.pyc new file mode 100644 index 0000000..c880e91 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/password_reset_body.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_bucket_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_bucket_request.cpython-312.pyc new file mode 100644 index 0000000..6ed9538 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_bucket_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_dashboard_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_dashboard_request.cpython-312.pyc new file mode 100644 index 0000000..751e9ee Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_dashboard_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_organization_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_organization_request.cpython-312.pyc new file mode 100644 index 0000000..ecc0813 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_organization_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_retention_rule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_retention_rule.cpython-312.pyc new file mode 100644 index 0000000..c05ec55 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_retention_rule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_stack_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_stack_request.cpython-312.pyc new file mode 100644 index 0000000..03a11a0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_stack_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_stack_request_additional_resources.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_stack_request_additional_resources.cpython-312.pyc new file mode 100644 index 0000000..689a5ba Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/patch_stack_request_additional_resources.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/permission.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/permission.cpython-312.pyc new file mode 100644 index 0000000..091a838 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/permission.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/permission_resource.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/permission_resource.cpython-312.pyc new file mode 100644 index 0000000..c60a543 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/permission_resource.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pipe_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pipe_expression.cpython-312.pyc new file mode 100644 index 0000000..478d48c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pipe_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pipe_literal.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pipe_literal.cpython-312.pyc new file mode 100644 index 0000000..ea0bcf8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/pipe_literal.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_bucket_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_bucket_request.cpython-312.pyc new file mode 100644 index 0000000..a9c6a22 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_bucket_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_check.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_check.cpython-312.pyc new file mode 100644 index 0000000..36c0528 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_check.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_notification_endpoint.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_notification_endpoint.cpython-312.pyc new file mode 100644 index 0000000..46bbe1d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_notification_endpoint.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_notification_rule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_notification_rule.cpython-312.pyc new file mode 100644 index 0000000..cb8cb47 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_notification_rule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_organization_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_organization_request.cpython-312.pyc new file mode 100644 index 0000000..9ce4f28 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_organization_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_restore_kv_response.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_restore_kv_response.cpython-312.pyc new file mode 100644 index 0000000..7e57289 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_restore_kv_response.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_stack_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_stack_request.cpython-312.pyc new file mode 100644 index 0000000..3b26dbd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/post_stack_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/property_key.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/property_key.cpython-312.pyc new file mode 100644 index 0000000..29704c2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/property_key.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query.cpython-312.pyc new file mode 100644 index 0000000..dcc2128 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query_edit_mode.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query_edit_mode.cpython-312.pyc new file mode 100644 index 0000000..141da23 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query_edit_mode.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query_variable_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query_variable_properties.cpython-312.pyc new file mode 100644 index 0000000..92994c1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query_variable_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query_variable_properties_values.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query_variable_properties_values.cpython-312.pyc new file mode 100644 index 0000000..28c3610 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/query_variable_properties_values.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/range_threshold.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/range_threshold.cpython-312.pyc new file mode 100644 index 0000000..ada2514 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/range_threshold.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/ready.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/ready.cpython-312.pyc new file mode 100644 index 0000000..560262c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/ready.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/regexp_literal.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/regexp_literal.cpython-312.pyc new file mode 100644 index 0000000..3c7e546 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/regexp_literal.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connection.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connection.cpython-312.pyc new file mode 100644 index 0000000..6d73a7e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connection.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connection_creation_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connection_creation_request.cpython-312.pyc new file mode 100644 index 0000000..b7b7f11 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connection_creation_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connection_update_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connection_update_request.cpython-312.pyc new file mode 100644 index 0000000..986421c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connection_update_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connections.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connections.cpython-312.pyc new file mode 100644 index 0000000..93319c6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/remote_connections.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/renamable_field.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/renamable_field.cpython-312.pyc new file mode 100644 index 0000000..9fe33e9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/renamable_field.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replication.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replication.cpython-312.pyc new file mode 100644 index 0000000..68f9e64 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replication.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replication_creation_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replication_creation_request.cpython-312.pyc new file mode 100644 index 0000000..3e34c5f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replication_creation_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replication_update_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replication_update_request.cpython-312.pyc new file mode 100644 index 0000000..4f0d55b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replication_update_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replications.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replications.cpython-312.pyc new file mode 100644 index 0000000..01ba0e3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/replications.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_member.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_member.cpython-312.pyc new file mode 100644 index 0000000..e586cae Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_member.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_members.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_members.cpython-312.pyc new file mode 100644 index 0000000..9e5eb5a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_members.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_members_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_members_links.cpython-312.pyc new file mode 100644 index 0000000..3745bac Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_members_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_owner.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_owner.cpython-312.pyc new file mode 100644 index 0000000..e2eeafd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_owner.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_owners.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_owners.cpython-312.pyc new file mode 100644 index 0000000..95bf6ed Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/resource_owners.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/restored_bucket_mappings.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/restored_bucket_mappings.cpython-312.pyc new file mode 100644 index 0000000..01a972b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/restored_bucket_mappings.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/retention_policy_manifest.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/retention_policy_manifest.cpython-312.pyc new file mode 100644 index 0000000..99f306e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/retention_policy_manifest.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/return_statement.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/return_statement.cpython-312.pyc new file mode 100644 index 0000000..ef184ed Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/return_statement.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes.cpython-312.pyc new file mode 100644 index 0000000..fbad061 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes_external.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes_external.cpython-312.pyc new file mode 100644 index 0000000..5e247d4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes_external.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes_query.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes_query.cpython-312.pyc new file mode 100644 index 0000000..0e7fcec Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes_query.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes_system.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes_system.cpython-312.pyc new file mode 100644 index 0000000..3b0ab0e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/routes_system.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/rule_status_level.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/rule_status_level.cpython-312.pyc new file mode 100644 index 0000000..5de99ec Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/rule_status_level.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/run.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/run.cpython-312.pyc new file mode 100644 index 0000000..c3169f3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/run.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/run_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/run_links.cpython-312.pyc new file mode 100644 index 0000000..2d13dc3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/run_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/run_manually.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/run_manually.cpython-312.pyc new file mode 100644 index 0000000..c3e4644 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/run_manually.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/runs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/runs.cpython-312.pyc new file mode 100644 index 0000000..f2c9079 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/runs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scatter_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scatter_view_properties.cpython-312.pyc new file mode 100644 index 0000000..75d3d9c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scatter_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/schema_type.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/schema_type.cpython-312.pyc new file mode 100644 index 0000000..06670e9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/schema_type.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scraper_target_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scraper_target_request.cpython-312.pyc new file mode 100644 index 0000000..1ecb86b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scraper_target_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scraper_target_response.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scraper_target_response.cpython-312.pyc new file mode 100644 index 0000000..6eb0e4a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scraper_target_response.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scraper_target_responses.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scraper_target_responses.cpython-312.pyc new file mode 100644 index 0000000..f81f4ed Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scraper_target_responses.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script.cpython-312.pyc new file mode 100644 index 0000000..03314a0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_create_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_create_request.cpython-312.pyc new file mode 100644 index 0000000..5347227 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_create_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_invocation_params.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_invocation_params.cpython-312.pyc new file mode 100644 index 0000000..3df70c5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_invocation_params.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_language.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_language.cpython-312.pyc new file mode 100644 index 0000000..d802e57 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_language.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_update_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_update_request.cpython-312.pyc new file mode 100644 index 0000000..7b1d093 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/script_update_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scripts.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scripts.cpython-312.pyc new file mode 100644 index 0000000..d28125b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/scripts.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/secret_keys.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/secret_keys.cpython-312.pyc new file mode 100644 index 0000000..d8b647e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/secret_keys.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/secret_keys_response.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/secret_keys_response.cpython-312.pyc new file mode 100644 index 0000000..2cde838 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/secret_keys_response.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/shard_group_manifest.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/shard_group_manifest.cpython-312.pyc new file mode 100644 index 0000000..1b48a61 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/shard_group_manifest.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/shard_manifest.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/shard_manifest.cpython-312.pyc new file mode 100644 index 0000000..55c746d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/shard_manifest.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/shard_owner.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/shard_owner.cpython-312.pyc new file mode 100644 index 0000000..a7ad7db Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/shard_owner.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/simple_table_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/simple_table_view_properties.cpython-312.pyc new file mode 100644 index 0000000..8304eea Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/simple_table_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/single_stat_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/single_stat_view_properties.cpython-312.pyc new file mode 100644 index 0000000..e6e9473 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/single_stat_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/slack_notification_endpoint.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/slack_notification_endpoint.cpython-312.pyc new file mode 100644 index 0000000..24b5c37 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/slack_notification_endpoint.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/slack_notification_rule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/slack_notification_rule.cpython-312.pyc new file mode 100644 index 0000000..5f13ec4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/slack_notification_rule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/slack_notification_rule_base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/slack_notification_rule_base.cpython-312.pyc new file mode 100644 index 0000000..837948a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/slack_notification_rule_base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/smtp_notification_rule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/smtp_notification_rule.cpython-312.pyc new file mode 100644 index 0000000..f271e87 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/smtp_notification_rule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/smtp_notification_rule_base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/smtp_notification_rule_base.cpython-312.pyc new file mode 100644 index 0000000..c2776a5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/smtp_notification_rule_base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/source.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/source.cpython-312.pyc new file mode 100644 index 0000000..a0cd2b5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/source.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/source_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/source_links.cpython-312.pyc new file mode 100644 index 0000000..a30e7e8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/source_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/sources.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/sources.cpython-312.pyc new file mode 100644 index 0000000..137cd30 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/sources.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack.cpython-312.pyc new file mode 100644 index 0000000..f79c43f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_associations.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_associations.cpython-312.pyc new file mode 100644 index 0000000..4d7a404 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_associations.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_events.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_events.cpython-312.pyc new file mode 100644 index 0000000..db2db51 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_events.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_links.cpython-312.pyc new file mode 100644 index 0000000..57a9d3e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_resources.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_resources.cpython-312.pyc new file mode 100644 index 0000000..0355eda Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/stack_resources.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/statement.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/statement.cpython-312.pyc new file mode 100644 index 0000000..01a804d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/statement.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/static_legend.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/static_legend.cpython-312.pyc new file mode 100644 index 0000000..8015e06 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/static_legend.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/status_rule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/status_rule.cpython-312.pyc new file mode 100644 index 0000000..1deed34 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/status_rule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/string_literal.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/string_literal.cpython-312.pyc new file mode 100644 index 0000000..1917bbb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/string_literal.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/subscription_manifest.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/subscription_manifest.cpython-312.pyc new file mode 100644 index 0000000..bf8a16b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/subscription_manifest.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/table_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/table_view_properties.cpython-312.pyc new file mode 100644 index 0000000..87dfc71 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/table_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/table_view_properties_table_options.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/table_view_properties_table_options.cpython-312.pyc new file mode 100644 index 0000000..bc2469a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/table_view_properties_table_options.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/tag_rule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/tag_rule.cpython-312.pyc new file mode 100644 index 0000000..41b1f1d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/tag_rule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task.cpython-312.pyc new file mode 100644 index 0000000..66d86db Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_create_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_create_request.cpython-312.pyc new file mode 100644 index 0000000..65c0cba Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_create_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_links.cpython-312.pyc new file mode 100644 index 0000000..a569759 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_status_type.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_status_type.cpython-312.pyc new file mode 100644 index 0000000..c832642 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_status_type.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_update_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_update_request.cpython-312.pyc new file mode 100644 index 0000000..2de4070 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/task_update_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/tasks.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/tasks.cpython-312.pyc new file mode 100644 index 0000000..e686961 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/tasks.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf.cpython-312.pyc new file mode 100644 index 0000000..e75cef0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugin.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugin.cpython-312.pyc new file mode 100644 index 0000000..ef91aed Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugin.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugin_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugin_request.cpython-312.pyc new file mode 100644 index 0000000..2eeb0df Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugin_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugin_request_plugins.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugin_request_plugins.cpython-312.pyc new file mode 100644 index 0000000..2131436 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugin_request_plugins.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugins.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugins.cpython-312.pyc new file mode 100644 index 0000000..1e50f5d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_plugins.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_request.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_request.cpython-312.pyc new file mode 100644 index 0000000..cef27a9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_request.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_request_metadata.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_request_metadata.cpython-312.pyc new file mode 100644 index 0000000..d4ed20e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegraf_request_metadata.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegrafs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegrafs.cpython-312.pyc new file mode 100644 index 0000000..a3dee53 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegrafs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegram_notification_endpoint.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegram_notification_endpoint.cpython-312.pyc new file mode 100644 index 0000000..dd16005 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegram_notification_endpoint.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegram_notification_rule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegram_notification_rule.cpython-312.pyc new file mode 100644 index 0000000..57c19c5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegram_notification_rule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegram_notification_rule_base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegram_notification_rule_base.cpython-312.pyc new file mode 100644 index 0000000..8e7ae9c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/telegram_notification_rule_base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_apply.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_apply.cpython-312.pyc new file mode 100644 index 0000000..aa29e46 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_apply.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_apply_remotes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_apply_remotes.cpython-312.pyc new file mode 100644 index 0000000..92d175a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_apply_remotes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_apply_template.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_apply_template.cpython-312.pyc new file mode 100644 index 0000000..c21ae74 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_apply_template.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_chart.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_chart.cpython-312.pyc new file mode 100644 index 0000000..e864364 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_chart.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id.cpython-312.pyc new file mode 100644 index 0000000..e7c48f4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id_org_ids.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id_org_ids.cpython-312.pyc new file mode 100644 index 0000000..ba05cd0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id_org_ids.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id_resource_filters.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id_resource_filters.cpython-312.pyc new file mode 100644 index 0000000..98e4d9f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id_resource_filters.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id_resources.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id_resources.cpython-312.pyc new file mode 100644 index 0000000..ca7ec1c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_id_resources.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_name.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_name.cpython-312.pyc new file mode 100644 index 0000000..abac623 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_name.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_name_resources.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_name_resources.cpython-312.pyc new file mode 100644 index 0000000..71e051d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_export_by_name_resources.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_kind.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_kind.cpython-312.pyc new file mode 100644 index 0000000..cd11e0d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_kind.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary.cpython-312.pyc new file mode 100644 index 0000000..f10fd57 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff.cpython-312.pyc new file mode 100644 index 0000000..2f022e2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_buckets.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_buckets.cpython-312.pyc new file mode 100644 index 0000000..2957bcc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_buckets.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_buckets_new_old.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_buckets_new_old.cpython-312.pyc new file mode 100644 index 0000000..f23bbd4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_buckets_new_old.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_checks.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_checks.cpython-312.pyc new file mode 100644 index 0000000..d629441 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_checks.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_dashboards.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_dashboards.cpython-312.pyc new file mode 100644 index 0000000..68ee5fc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_dashboards.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_dashboards_new_old.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_dashboards_new_old.cpython-312.pyc new file mode 100644 index 0000000..45237bf Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_dashboards_new_old.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_label_mappings.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_label_mappings.cpython-312.pyc new file mode 100644 index 0000000..f9b71ec Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_label_mappings.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_labels.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_labels.cpython-312.pyc new file mode 100644 index 0000000..e3e89fa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_labels.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_labels_new_old.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_labels_new_old.cpython-312.pyc new file mode 100644 index 0000000..55499eb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_labels_new_old.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_notification_endpoints.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_notification_endpoints.cpython-312.pyc new file mode 100644 index 0000000..b83e6e8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_notification_endpoints.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_notification_rules.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_notification_rules.cpython-312.pyc new file mode 100644 index 0000000..e9832e3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_notification_rules.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_notification_rules_new_old.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_notification_rules_new_old.cpython-312.pyc new file mode 100644 index 0000000..e12101f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_notification_rules_new_old.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_tasks.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_tasks.cpython-312.pyc new file mode 100644 index 0000000..b1d1866 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_tasks.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_tasks_new_old.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_tasks_new_old.cpython-312.pyc new file mode 100644 index 0000000..74efc5d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_tasks_new_old.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_telegraf_configs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_telegraf_configs.cpython-312.pyc new file mode 100644 index 0000000..e6deb25 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_telegraf_configs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_variables.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_variables.cpython-312.pyc new file mode 100644 index 0000000..848fc55 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_variables.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_variables_new_old.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_variables_new_old.cpython-312.pyc new file mode 100644 index 0000000..6488c88 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_diff_variables_new_old.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_errors.cpython-312.pyc new file mode 100644 index 0000000..71dc985 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_label.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_label.cpython-312.pyc new file mode 100644 index 0000000..3b9606d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_label.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_label_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_label_properties.cpython-312.pyc new file mode 100644 index 0000000..13dff4c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_label_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary.cpython-312.pyc new file mode 100644 index 0000000..2d7404f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_buckets.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_buckets.cpython-312.pyc new file mode 100644 index 0000000..73c457a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_buckets.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_dashboards.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_dashboards.cpython-312.pyc new file mode 100644 index 0000000..c947311 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_dashboards.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_label_mappings.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_label_mappings.cpython-312.pyc new file mode 100644 index 0000000..e1afd35 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_label_mappings.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_notification_rules.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_notification_rules.cpython-312.pyc new file mode 100644 index 0000000..e6e6f6b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_notification_rules.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_status_rules.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_status_rules.cpython-312.pyc new file mode 100644 index 0000000..0da4650 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_status_rules.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_tag_rules.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_tag_rules.cpython-312.pyc new file mode 100644 index 0000000..9c84c5b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_tag_rules.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_tasks.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_tasks.cpython-312.pyc new file mode 100644 index 0000000..d757188 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_tasks.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_variables.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_variables.cpython-312.pyc new file mode 100644 index 0000000..ad10788 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/template_summary_summary_variables.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/test_statement.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/test_statement.cpython-312.pyc new file mode 100644 index 0000000..c393020 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/test_statement.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/threshold.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/threshold.cpython-312.pyc new file mode 100644 index 0000000..1b10a04 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/threshold.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/threshold_base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/threshold_base.cpython-312.pyc new file mode 100644 index 0000000..da75420 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/threshold_base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/threshold_check.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/threshold_check.cpython-312.pyc new file mode 100644 index 0000000..b79ef1b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/threshold_check.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/unary_expression.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/unary_expression.cpython-312.pyc new file mode 100644 index 0000000..17ffa7d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/unary_expression.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/unsigned_integer_literal.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/unsigned_integer_literal.cpython-312.pyc new file mode 100644 index 0000000..f9fe87c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/unsigned_integer_literal.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/user.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/user.cpython-312.pyc new file mode 100644 index 0000000..31b929c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/user.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/user_response.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/user_response.cpython-312.pyc new file mode 100644 index 0000000..27d881c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/user_response.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/user_response_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/user_response_links.cpython-312.pyc new file mode 100644 index 0000000..436d074 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/user_response_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/users.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/users.cpython-312.pyc new file mode 100644 index 0000000..474e7e3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/users.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable.cpython-312.pyc new file mode 100644 index 0000000..173d2df Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable_assignment.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable_assignment.cpython-312.pyc new file mode 100644 index 0000000..6adf3b4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable_assignment.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable_links.cpython-312.pyc new file mode 100644 index 0000000..8db4b38 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable_properties.cpython-312.pyc new file mode 100644 index 0000000..4f84e27 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variable_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variables.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variables.cpython-312.pyc new file mode 100644 index 0000000..5d7d276 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/variables.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/view.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/view.cpython-312.pyc new file mode 100644 index 0000000..a2cd05c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/view.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/view_links.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/view_links.cpython-312.pyc new file mode 100644 index 0000000..f7df8dd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/view_links.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/view_properties.cpython-312.pyc new file mode 100644 index 0000000..23c3b0b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/views.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/views.cpython-312.pyc new file mode 100644 index 0000000..fc200fb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/views.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/write_precision.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/write_precision.cpython-312.pyc new file mode 100644 index 0000000..f95b7ad Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/write_precision.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/xy_geom.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/xy_geom.cpython-312.pyc new file mode 100644 index 0000000..b2f70e4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/xy_geom.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/xy_view_properties.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/xy_view_properties.cpython-312.pyc new file mode 100644 index 0000000..a7e96b1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/domain/__pycache__/xy_view_properties.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/add_resource_member_request_body.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/add_resource_member_request_body.py new file mode 100644 index 0000000..ffcddd8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/add_resource_member_request_body.py @@ -0,0 +1,139 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class AddResourceMemberRequestBody(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str' + } + + attribute_map = { + 'id': 'id', + 'name': 'name' + } + + def __init__(self, id=None, name=None): # noqa: E501,D401,D403 + """AddResourceMemberRequestBody - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._name = None + self.discriminator = None + + self.id = id + if name is not None: + self.name = name + + @property + def id(self): + """Get the id of this AddResourceMemberRequestBody. + + The ID of the user to add to the resource. + + :return: The id of this AddResourceMemberRequestBody. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this AddResourceMemberRequestBody. + + The ID of the user to add to the resource. + + :param id: The id of this AddResourceMemberRequestBody. + :type: str + """ # noqa: E501 + if id is None: + raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this AddResourceMemberRequestBody. + + The name of the user to add to the resource. + + :return: The name of this AddResourceMemberRequestBody. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this AddResourceMemberRequestBody. + + The name of the user to add to the resource. + + :param name: The name of this AddResourceMemberRequestBody. + :type: str + """ # noqa: E501 + self._name = name + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, AddResourceMemberRequestBody): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/analyze_query_response.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/analyze_query_response.py new file mode 100644 index 0000000..ef7d182 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/analyze_query_response.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class AnalyzeQueryResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'errors': 'list[AnalyzeQueryResponseErrors]' + } + + attribute_map = { + 'errors': 'errors' + } + + def __init__(self, errors=None): # noqa: E501,D401,D403 + """AnalyzeQueryResponse - a model defined in OpenAPI.""" # noqa: E501 + self._errors = None + self.discriminator = None + + if errors is not None: + self.errors = errors + + @property + def errors(self): + """Get the errors of this AnalyzeQueryResponse. + + :return: The errors of this AnalyzeQueryResponse. + :rtype: list[AnalyzeQueryResponseErrors] + """ # noqa: E501 + return self._errors + + @errors.setter + def errors(self, errors): + """Set the errors of this AnalyzeQueryResponse. + + :param errors: The errors of this AnalyzeQueryResponse. + :type: list[AnalyzeQueryResponseErrors] + """ # noqa: E501 + self._errors = errors + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, AnalyzeQueryResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/analyze_query_response_errors.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/analyze_query_response_errors.py new file mode 100644 index 0000000..7a35dc6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/analyze_query_response_errors.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class AnalyzeQueryResponseErrors(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'line': 'int', + 'column': 'int', + 'character': 'int', + 'message': 'str' + } + + attribute_map = { + 'line': 'line', + 'column': 'column', + 'character': 'character', + 'message': 'message' + } + + def __init__(self, line=None, column=None, character=None, message=None): # noqa: E501,D401,D403 + """AnalyzeQueryResponseErrors - a model defined in OpenAPI.""" # noqa: E501 + self._line = None + self._column = None + self._character = None + self._message = None + self.discriminator = None + + if line is not None: + self.line = line + if column is not None: + self.column = column + if character is not None: + self.character = character + if message is not None: + self.message = message + + @property + def line(self): + """Get the line of this AnalyzeQueryResponseErrors. + + :return: The line of this AnalyzeQueryResponseErrors. + :rtype: int + """ # noqa: E501 + return self._line + + @line.setter + def line(self, line): + """Set the line of this AnalyzeQueryResponseErrors. + + :param line: The line of this AnalyzeQueryResponseErrors. + :type: int + """ # noqa: E501 + self._line = line + + @property + def column(self): + """Get the column of this AnalyzeQueryResponseErrors. + + :return: The column of this AnalyzeQueryResponseErrors. + :rtype: int + """ # noqa: E501 + return self._column + + @column.setter + def column(self, column): + """Set the column of this AnalyzeQueryResponseErrors. + + :param column: The column of this AnalyzeQueryResponseErrors. + :type: int + """ # noqa: E501 + self._column = column + + @property + def character(self): + """Get the character of this AnalyzeQueryResponseErrors. + + :return: The character of this AnalyzeQueryResponseErrors. + :rtype: int + """ # noqa: E501 + return self._character + + @character.setter + def character(self, character): + """Set the character of this AnalyzeQueryResponseErrors. + + :param character: The character of this AnalyzeQueryResponseErrors. + :type: int + """ # noqa: E501 + self._character = character + + @property + def message(self): + """Get the message of this AnalyzeQueryResponseErrors. + + :return: The message of this AnalyzeQueryResponseErrors. + :rtype: str + """ # noqa: E501 + return self._message + + @message.setter + def message(self, message): + """Set the message of this AnalyzeQueryResponseErrors. + + :param message: The message of this AnalyzeQueryResponseErrors. + :type: str + """ # noqa: E501 + self._message = message + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, AnalyzeQueryResponseErrors): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/array_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/array_expression.py new file mode 100644 index 0000000..0c28531 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/array_expression.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class ArrayExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'elements': 'list[Expression]' + } + + attribute_map = { + 'type': 'type', + 'elements': 'elements' + } + + def __init__(self, type=None, elements=None): # noqa: E501,D401,D403 + """ArrayExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._elements = None + self.discriminator = None + + if type is not None: + self.type = type + if elements is not None: + self.elements = elements + + @property + def type(self): + """Get the type of this ArrayExpression. + + Type of AST node + + :return: The type of this ArrayExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ArrayExpression. + + Type of AST node + + :param type: The type of this ArrayExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def elements(self): + """Get the elements of this ArrayExpression. + + Elements of the array + + :return: The elements of this ArrayExpression. + :rtype: list[Expression] + """ # noqa: E501 + return self._elements + + @elements.setter + def elements(self, elements): + """Set the elements of this ArrayExpression. + + Elements of the array + + :param elements: The elements of this ArrayExpression. + :type: list[Expression] + """ # noqa: E501 + self._elements = elements + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ArrayExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/ast_response.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/ast_response.py new file mode 100644 index 0000000..227cfdd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/ast_response.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ASTResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'ast': 'Package' + } + + attribute_map = { + 'ast': 'ast' + } + + def __init__(self, ast=None): # noqa: E501,D401,D403 + """ASTResponse - a model defined in OpenAPI.""" # noqa: E501 + self._ast = None + self.discriminator = None + + if ast is not None: + self.ast = ast + + @property + def ast(self): + """Get the ast of this ASTResponse. + + :return: The ast of this ASTResponse. + :rtype: Package + """ # noqa: E501 + return self._ast + + @ast.setter + def ast(self, ast): + """Set the ast of this ASTResponse. + + :param ast: The ast of this ASTResponse. + :type: Package + """ # noqa: E501 + self._ast = ast + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ASTResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorization.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorization.py new file mode 100644 index 0000000..67a0bfd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorization.py @@ -0,0 +1,350 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest + + +class Authorization(AuthorizationUpdateRequest): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'org_id': 'str', + 'permissions': 'list[Permission]', + 'id': 'str', + 'token': 'str', + 'user_id': 'str', + 'user': 'str', + 'org': 'str', + 'links': 'object', + 'status': 'str', + 'description': 'str' + } + + attribute_map = { + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'org_id': 'orgID', + 'permissions': 'permissions', + 'id': 'id', + 'token': 'token', + 'user_id': 'userID', + 'user': 'user', + 'org': 'org', + 'links': 'links', + 'status': 'status', + 'description': 'description' + } + + def __init__(self, created_at=None, updated_at=None, org_id=None, permissions=None, id=None, token=None, user_id=None, user=None, org=None, links=None, status='active', description=None): # noqa: E501,D401,D403 + """Authorization - a model defined in OpenAPI.""" # noqa: E501 + AuthorizationUpdateRequest.__init__(self, status=status, description=description) # noqa: E501 + + self._created_at = None + self._updated_at = None + self._org_id = None + self._permissions = None + self._id = None + self._token = None + self._user_id = None + self._user = None + self._org = None + self._links = None + self.discriminator = None + + if created_at is not None: + self.created_at = created_at + if updated_at is not None: + self.updated_at = updated_at + if org_id is not None: + self.org_id = org_id + if permissions is not None: + self.permissions = permissions + if id is not None: + self.id = id + if token is not None: + self.token = token + if user_id is not None: + self.user_id = user_id + if user is not None: + self.user = user + if org is not None: + self.org = org + if links is not None: + self.links = links + + @property + def created_at(self): + """Get the created_at of this Authorization. + + :return: The created_at of this Authorization. + :rtype: datetime + """ # noqa: E501 + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Set the created_at of this Authorization. + + :param created_at: The created_at of this Authorization. + :type: datetime + """ # noqa: E501 + self._created_at = created_at + + @property + def updated_at(self): + """Get the updated_at of this Authorization. + + :return: The updated_at of this Authorization. + :rtype: datetime + """ # noqa: E501 + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Set the updated_at of this Authorization. + + :param updated_at: The updated_at of this Authorization. + :type: datetime + """ # noqa: E501 + self._updated_at = updated_at + + @property + def org_id(self): + """Get the org_id of this Authorization. + + The organization ID. Specifies the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) that the authorization is scoped to. + + :return: The org_id of this Authorization. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this Authorization. + + The organization ID. Specifies the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) that the authorization is scoped to. + + :param org_id: The org_id of this Authorization. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def permissions(self): + """Get the permissions of this Authorization. + + The list of permissions. An authorization must have at least one permission. + + :return: The permissions of this Authorization. + :rtype: list[Permission] + """ # noqa: E501 + return self._permissions + + @permissions.setter + def permissions(self, permissions): + """Set the permissions of this Authorization. + + The list of permissions. An authorization must have at least one permission. + + :param permissions: The permissions of this Authorization. + :type: list[Permission] + """ # noqa: E501 + self._permissions = permissions + + @property + def id(self): + """Get the id of this Authorization. + + The authorization ID. + + :return: The id of this Authorization. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Authorization. + + The authorization ID. + + :param id: The id of this Authorization. + :type: str + """ # noqa: E501 + self._id = id + + @property + def token(self): + """Get the token of this Authorization. + + The API token. The token value is unique to the authorization. [API tokens](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token) are used to authenticate and authorize InfluxDB API requests and `influx` CLI commands--after receiving the request, InfluxDB checks that the token is valid and that the `permissions` allow the requested action(s). + + :return: The token of this Authorization. + :rtype: str + """ # noqa: E501 + return self._token + + @token.setter + def token(self, token): + """Set the token of this Authorization. + + The API token. The token value is unique to the authorization. [API tokens](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token) are used to authenticate and authorize InfluxDB API requests and `influx` CLI commands--after receiving the request, InfluxDB checks that the token is valid and that the `permissions` allow the requested action(s). + + :param token: The token of this Authorization. + :type: str + """ # noqa: E501 + self._token = token + + @property + def user_id(self): + """Get the user_id of this Authorization. + + The user ID. Specifies the [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) that owns the authorization. If _scoped_, the user that the authorization is scoped to; otherwise, the creator of the authorization. + + :return: The user_id of this Authorization. + :rtype: str + """ # noqa: E501 + return self._user_id + + @user_id.setter + def user_id(self, user_id): + """Set the user_id of this Authorization. + + The user ID. Specifies the [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) that owns the authorization. If _scoped_, the user that the authorization is scoped to; otherwise, the creator of the authorization. + + :param user_id: The user_id of this Authorization. + :type: str + """ # noqa: E501 + self._user_id = user_id + + @property + def user(self): + """Get the user of this Authorization. + + The user name. Specifies the [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) that owns the authorization. If the authorization is _scoped_ to a user, the user; otherwise, the creator of the authorization. + + :return: The user of this Authorization. + :rtype: str + """ # noqa: E501 + return self._user + + @user.setter + def user(self, user): + """Set the user of this Authorization. + + The user name. Specifies the [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) that owns the authorization. If the authorization is _scoped_ to a user, the user; otherwise, the creator of the authorization. + + :param user: The user of this Authorization. + :type: str + """ # noqa: E501 + self._user = user + + @property + def org(self): + """Get the org of this Authorization. + + The organization name. Specifies the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) that the token is scoped to. + + :return: The org of this Authorization. + :rtype: str + """ # noqa: E501 + return self._org + + @org.setter + def org(self, org): + """Set the org of this Authorization. + + The organization name. Specifies the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) that the token is scoped to. + + :param org: The org of this Authorization. + :type: str + """ # noqa: E501 + self._org = org + + @property + def links(self): + """Get the links of this Authorization. + + :return: The links of this Authorization. + :rtype: object + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Authorization. + + :param links: The links of this Authorization. + :type: object + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Authorization): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorization_post_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorization_post_request.py new file mode 100644 index 0000000..94bcd96 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorization_post_request.py @@ -0,0 +1,173 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest + + +class AuthorizationPostRequest(AuthorizationUpdateRequest): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'org_id': 'str', + 'user_id': 'str', + 'permissions': 'list[Permission]', + 'status': 'str', + 'description': 'str' + } + + attribute_map = { + 'org_id': 'orgID', + 'user_id': 'userID', + 'permissions': 'permissions', + 'status': 'status', + 'description': 'description' + } + + def __init__(self, org_id=None, user_id=None, permissions=None, status='active', description=None): # noqa: E501,D401,D403 + """AuthorizationPostRequest - a model defined in OpenAPI.""" # noqa: E501 + AuthorizationUpdateRequest.__init__(self, status=status, description=description) # noqa: E501 + + self._org_id = None + self._user_id = None + self._permissions = None + self.discriminator = None + + if org_id is not None: + self.org_id = org_id + if user_id is not None: + self.user_id = user_id + if permissions is not None: + self.permissions = permissions + + @property + def org_id(self): + """Get the org_id of this AuthorizationPostRequest. + + An organization ID. Specifies the organization that owns the authorization. + + :return: The org_id of this AuthorizationPostRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this AuthorizationPostRequest. + + An organization ID. Specifies the organization that owns the authorization. + + :param org_id: The org_id of this AuthorizationPostRequest. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def user_id(self): + """Get the user_id of this AuthorizationPostRequest. + + A user ID. Specifies the user that the authorization is scoped to. When a user authenticates with username and password, InfluxDB generates a _user session_ with all the permissions specified by all the user's authorizations. + + :return: The user_id of this AuthorizationPostRequest. + :rtype: str + """ # noqa: E501 + return self._user_id + + @user_id.setter + def user_id(self, user_id): + """Set the user_id of this AuthorizationPostRequest. + + A user ID. Specifies the user that the authorization is scoped to. When a user authenticates with username and password, InfluxDB generates a _user session_ with all the permissions specified by all the user's authorizations. + + :param user_id: The user_id of this AuthorizationPostRequest. + :type: str + """ # noqa: E501 + self._user_id = user_id + + @property + def permissions(self): + """Get the permissions of this AuthorizationPostRequest. + + A list of permissions for an authorization. In the list, provide at least one `permission` object. In a `permission`, the `resource.type` property grants access to all resources of the specified type. To grant access to only a specific resource, specify the `resource.id` property. + + :return: The permissions of this AuthorizationPostRequest. + :rtype: list[Permission] + """ # noqa: E501 + return self._permissions + + @permissions.setter + def permissions(self, permissions): + """Set the permissions of this AuthorizationPostRequest. + + A list of permissions for an authorization. In the list, provide at least one `permission` object. In a `permission`, the `resource.type` property grants access to all resources of the specified type. To grant access to only a specific resource, specify the `resource.id` property. + + :param permissions: The permissions of this AuthorizationPostRequest. + :type: list[Permission] + """ # noqa: E501 + self._permissions = permissions + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, AuthorizationPostRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorization_update_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorization_update_request.py new file mode 100644 index 0000000..097d965 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorization_update_request.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class AuthorizationUpdateRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'status': 'str', + 'description': 'str' + } + + attribute_map = { + 'status': 'status', + 'description': 'description' + } + + def __init__(self, status='active', description=None): # noqa: E501,D401,D403 + """AuthorizationUpdateRequest - a model defined in OpenAPI.""" # noqa: E501 + self._status = None + self._description = None + self.discriminator = None + + if status is not None: + self.status = status + if description is not None: + self.description = description + + @property + def status(self): + """Get the status of this AuthorizationUpdateRequest. + + Status of the token. If `inactive`, InfluxDB rejects requests that use the token. + + :return: The status of this AuthorizationUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this AuthorizationUpdateRequest. + + Status of the token. If `inactive`, InfluxDB rejects requests that use the token. + + :param status: The status of this AuthorizationUpdateRequest. + :type: str + """ # noqa: E501 + self._status = status + + @property + def description(self): + """Get the description of this AuthorizationUpdateRequest. + + A description of the token. + + :return: The description of this AuthorizationUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this AuthorizationUpdateRequest. + + A description of the token. + + :param description: The description of this AuthorizationUpdateRequest. + :type: str + """ # noqa: E501 + self._description = description + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, AuthorizationUpdateRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorizations.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorizations.py new file mode 100644 index 0000000..ad3a4d0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/authorizations.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Authorizations(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'Links', + 'authorizations': 'list[Authorization]' + } + + attribute_map = { + 'links': 'links', + 'authorizations': 'authorizations' + } + + def __init__(self, links=None, authorizations=None): # noqa: E501,D401,D403 + """Authorizations - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._authorizations = None + self.discriminator = None + + if links is not None: + self.links = links + if authorizations is not None: + self.authorizations = authorizations + + @property + def links(self): + """Get the links of this Authorizations. + + :return: The links of this Authorizations. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Authorizations. + + :param links: The links of this Authorizations. + :type: Links + """ # noqa: E501 + self._links = links + + @property + def authorizations(self): + """Get the authorizations of this Authorizations. + + :return: The authorizations of this Authorizations. + :rtype: list[Authorization] + """ # noqa: E501 + return self._authorizations + + @authorizations.setter + def authorizations(self, authorizations): + """Set the authorizations of this Authorizations. + + :param authorizations: The authorizations of this Authorizations. + :type: list[Authorization] + """ # noqa: E501 + self._authorizations = authorizations + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Authorizations): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/axes.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/axes.py new file mode 100644 index 0000000..738e3b3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/axes.py @@ -0,0 +1,132 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Axes(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'x': 'Axis', + 'y': 'Axis' + } + + attribute_map = { + 'x': 'x', + 'y': 'y' + } + + def __init__(self, x=None, y=None): # noqa: E501,D401,D403 + """Axes - a model defined in OpenAPI.""" # noqa: E501 + self._x = None + self._y = None + self.discriminator = None + + self.x = x + self.y = y + + @property + def x(self): + """Get the x of this Axes. + + :return: The x of this Axes. + :rtype: Axis + """ # noqa: E501 + return self._x + + @x.setter + def x(self, x): + """Set the x of this Axes. + + :param x: The x of this Axes. + :type: Axis + """ # noqa: E501 + if x is None: + raise ValueError("Invalid value for `x`, must not be `None`") # noqa: E501 + self._x = x + + @property + def y(self): + """Get the y of this Axes. + + :return: The y of this Axes. + :rtype: Axis + """ # noqa: E501 + return self._y + + @y.setter + def y(self, y): + """Set the y of this Axes. + + :param y: The y of this Axes. + :type: Axis + """ # noqa: E501 + if y is None: + raise ValueError("Invalid value for `y`, must not be `None`") # noqa: E501 + self._y = y + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Axes): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/axis.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/axis.py new file mode 100644 index 0000000..4442075 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/axis.py @@ -0,0 +1,242 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Axis(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'bounds': 'list[str]', + 'label': 'str', + 'prefix': 'str', + 'suffix': 'str', + 'base': 'str', + 'scale': 'AxisScale' + } + + attribute_map = { + 'bounds': 'bounds', + 'label': 'label', + 'prefix': 'prefix', + 'suffix': 'suffix', + 'base': 'base', + 'scale': 'scale' + } + + def __init__(self, bounds=None, label=None, prefix=None, suffix=None, base=None, scale=None): # noqa: E501,D401,D403 + """Axis - a model defined in OpenAPI.""" # noqa: E501 + self._bounds = None + self._label = None + self._prefix = None + self._suffix = None + self._base = None + self._scale = None + self.discriminator = None + + if bounds is not None: + self.bounds = bounds + if label is not None: + self.label = label + if prefix is not None: + self.prefix = prefix + if suffix is not None: + self.suffix = suffix + if base is not None: + self.base = base + if scale is not None: + self.scale = scale + + @property + def bounds(self): + """Get the bounds of this Axis. + + The extents of the axis in the form [lower, upper]. Clients determine whether bounds are inclusive or exclusive of their limits. + + :return: The bounds of this Axis. + :rtype: list[str] + """ # noqa: E501 + return self._bounds + + @bounds.setter + def bounds(self, bounds): + """Set the bounds of this Axis. + + The extents of the axis in the form [lower, upper]. Clients determine whether bounds are inclusive or exclusive of their limits. + + :param bounds: The bounds of this Axis. + :type: list[str] + """ # noqa: E501 + self._bounds = bounds + + @property + def label(self): + """Get the label of this Axis. + + Description of the axis. + + :return: The label of this Axis. + :rtype: str + """ # noqa: E501 + return self._label + + @label.setter + def label(self, label): + """Set the label of this Axis. + + Description of the axis. + + :param label: The label of this Axis. + :type: str + """ # noqa: E501 + self._label = label + + @property + def prefix(self): + """Get the prefix of this Axis. + + Label prefix for formatting axis values. + + :return: The prefix of this Axis. + :rtype: str + """ # noqa: E501 + return self._prefix + + @prefix.setter + def prefix(self, prefix): + """Set the prefix of this Axis. + + Label prefix for formatting axis values. + + :param prefix: The prefix of this Axis. + :type: str + """ # noqa: E501 + self._prefix = prefix + + @property + def suffix(self): + """Get the suffix of this Axis. + + Label suffix for formatting axis values. + + :return: The suffix of this Axis. + :rtype: str + """ # noqa: E501 + return self._suffix + + @suffix.setter + def suffix(self, suffix): + """Set the suffix of this Axis. + + Label suffix for formatting axis values. + + :param suffix: The suffix of this Axis. + :type: str + """ # noqa: E501 + self._suffix = suffix + + @property + def base(self): + """Get the base of this Axis. + + Radix for formatting axis values. + + :return: The base of this Axis. + :rtype: str + """ # noqa: E501 + return self._base + + @base.setter + def base(self, base): + """Set the base of this Axis. + + Radix for formatting axis values. + + :param base: The base of this Axis. + :type: str + """ # noqa: E501 + self._base = base + + @property + def scale(self): + """Get the scale of this Axis. + + :return: The scale of this Axis. + :rtype: AxisScale + """ # noqa: E501 + return self._scale + + @scale.setter + def scale(self, scale): + """Set the scale of this Axis. + + :param scale: The scale of this Axis. + :type: AxisScale + """ # noqa: E501 + self._scale = scale + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Axis): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/axis_scale.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/axis_scale.py new file mode 100644 index 0000000..a678fef --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/axis_scale.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class AxisScale(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + LOG = "log" + LINEAR = "linear" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """AxisScale - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, AxisScale): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/bad_statement.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bad_statement.py new file mode 100644 index 0000000..d1f455e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bad_statement.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.statement import Statement + + +class BadStatement(Statement): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'text': 'str' + } + + attribute_map = { + 'type': 'type', + 'text': 'text' + } + + def __init__(self, type=None, text=None): # noqa: E501,D401,D403 + """BadStatement - a model defined in OpenAPI.""" # noqa: E501 + Statement.__init__(self) # noqa: E501 + + self._type = None + self._text = None + self.discriminator = None + + if type is not None: + self.type = type + if text is not None: + self.text = text + + @property + def type(self): + """Get the type of this BadStatement. + + Type of AST node + + :return: The type of this BadStatement. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this BadStatement. + + Type of AST node + + :param type: The type of this BadStatement. + :type: str + """ # noqa: E501 + self._type = type + + @property + def text(self): + """Get the text of this BadStatement. + + Raw source text + + :return: The text of this BadStatement. + :rtype: str + """ # noqa: E501 + return self._text + + @text.setter + def text(self, text): + """Set the text of this BadStatement. + + Raw source text + + :param text: The text of this BadStatement. + :type: str + """ # noqa: E501 + self._text = text + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BadStatement): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/band_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/band_view_properties.py new file mode 100644 index 0000000..1b03b7b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/band_view_properties.py @@ -0,0 +1,771 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class BandViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'adaptive_zoom_hide': 'bool', + 'time_format': 'str', + 'type': 'str', + 'queries': 'list[DashboardQuery]', + 'colors': 'list[DashboardColor]', + 'shape': 'str', + 'note': 'str', + 'show_note_when_empty': 'bool', + 'axes': 'Axes', + 'static_legend': 'StaticLegend', + 'x_column': 'str', + 'generate_x_axis_ticks': 'list[str]', + 'x_total_ticks': 'int', + 'x_tick_start': 'float', + 'x_tick_step': 'float', + 'y_column': 'str', + 'generate_y_axis_ticks': 'list[str]', + 'y_total_ticks': 'int', + 'y_tick_start': 'float', + 'y_tick_step': 'float', + 'upper_column': 'str', + 'main_column': 'str', + 'lower_column': 'str', + 'hover_dimension': 'str', + 'geom': 'XYGeom', + 'legend_colorize_rows': 'bool', + 'legend_hide': 'bool', + 'legend_opacity': 'float', + 'legend_orientation_threshold': 'int' + } + + attribute_map = { + 'adaptive_zoom_hide': 'adaptiveZoomHide', + 'time_format': 'timeFormat', + 'type': 'type', + 'queries': 'queries', + 'colors': 'colors', + 'shape': 'shape', + 'note': 'note', + 'show_note_when_empty': 'showNoteWhenEmpty', + 'axes': 'axes', + 'static_legend': 'staticLegend', + 'x_column': 'xColumn', + 'generate_x_axis_ticks': 'generateXAxisTicks', + 'x_total_ticks': 'xTotalTicks', + 'x_tick_start': 'xTickStart', + 'x_tick_step': 'xTickStep', + 'y_column': 'yColumn', + 'generate_y_axis_ticks': 'generateYAxisTicks', + 'y_total_ticks': 'yTotalTicks', + 'y_tick_start': 'yTickStart', + 'y_tick_step': 'yTickStep', + 'upper_column': 'upperColumn', + 'main_column': 'mainColumn', + 'lower_column': 'lowerColumn', + 'hover_dimension': 'hoverDimension', + 'geom': 'geom', + 'legend_colorize_rows': 'legendColorizeRows', + 'legend_hide': 'legendHide', + 'legend_opacity': 'legendOpacity', + 'legend_orientation_threshold': 'legendOrientationThreshold' + } + + def __init__(self, adaptive_zoom_hide=None, time_format=None, type=None, queries=None, colors=None, shape=None, note=None, show_note_when_empty=None, axes=None, static_legend=None, x_column=None, generate_x_axis_ticks=None, x_total_ticks=None, x_tick_start=None, x_tick_step=None, y_column=None, generate_y_axis_ticks=None, y_total_ticks=None, y_tick_start=None, y_tick_step=None, upper_column=None, main_column=None, lower_column=None, hover_dimension=None, geom=None, legend_colorize_rows=None, legend_hide=None, legend_opacity=None, legend_orientation_threshold=None): # noqa: E501,D401,D403 + """BandViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._adaptive_zoom_hide = None + self._time_format = None + self._type = None + self._queries = None + self._colors = None + self._shape = None + self._note = None + self._show_note_when_empty = None + self._axes = None + self._static_legend = None + self._x_column = None + self._generate_x_axis_ticks = None + self._x_total_ticks = None + self._x_tick_start = None + self._x_tick_step = None + self._y_column = None + self._generate_y_axis_ticks = None + self._y_total_ticks = None + self._y_tick_start = None + self._y_tick_step = None + self._upper_column = None + self._main_column = None + self._lower_column = None + self._hover_dimension = None + self._geom = None + self._legend_colorize_rows = None + self._legend_hide = None + self._legend_opacity = None + self._legend_orientation_threshold = None + self.discriminator = None + + if adaptive_zoom_hide is not None: + self.adaptive_zoom_hide = adaptive_zoom_hide + if time_format is not None: + self.time_format = time_format + self.type = type + self.queries = queries + self.colors = colors + self.shape = shape + self.note = note + self.show_note_when_empty = show_note_when_empty + self.axes = axes + if static_legend is not None: + self.static_legend = static_legend + if x_column is not None: + self.x_column = x_column + if generate_x_axis_ticks is not None: + self.generate_x_axis_ticks = generate_x_axis_ticks + if x_total_ticks is not None: + self.x_total_ticks = x_total_ticks + if x_tick_start is not None: + self.x_tick_start = x_tick_start + if x_tick_step is not None: + self.x_tick_step = x_tick_step + if y_column is not None: + self.y_column = y_column + if generate_y_axis_ticks is not None: + self.generate_y_axis_ticks = generate_y_axis_ticks + if y_total_ticks is not None: + self.y_total_ticks = y_total_ticks + if y_tick_start is not None: + self.y_tick_start = y_tick_start + if y_tick_step is not None: + self.y_tick_step = y_tick_step + if upper_column is not None: + self.upper_column = upper_column + if main_column is not None: + self.main_column = main_column + if lower_column is not None: + self.lower_column = lower_column + if hover_dimension is not None: + self.hover_dimension = hover_dimension + self.geom = geom + if legend_colorize_rows is not None: + self.legend_colorize_rows = legend_colorize_rows + if legend_hide is not None: + self.legend_hide = legend_hide + if legend_opacity is not None: + self.legend_opacity = legend_opacity + if legend_orientation_threshold is not None: + self.legend_orientation_threshold = legend_orientation_threshold + + @property + def adaptive_zoom_hide(self): + """Get the adaptive_zoom_hide of this BandViewProperties. + + :return: The adaptive_zoom_hide of this BandViewProperties. + :rtype: bool + """ # noqa: E501 + return self._adaptive_zoom_hide + + @adaptive_zoom_hide.setter + def adaptive_zoom_hide(self, adaptive_zoom_hide): + """Set the adaptive_zoom_hide of this BandViewProperties. + + :param adaptive_zoom_hide: The adaptive_zoom_hide of this BandViewProperties. + :type: bool + """ # noqa: E501 + self._adaptive_zoom_hide = adaptive_zoom_hide + + @property + def time_format(self): + """Get the time_format of this BandViewProperties. + + :return: The time_format of this BandViewProperties. + :rtype: str + """ # noqa: E501 + return self._time_format + + @time_format.setter + def time_format(self, time_format): + """Set the time_format of this BandViewProperties. + + :param time_format: The time_format of this BandViewProperties. + :type: str + """ # noqa: E501 + self._time_format = time_format + + @property + def type(self): + """Get the type of this BandViewProperties. + + :return: The type of this BandViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this BandViewProperties. + + :param type: The type of this BandViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def queries(self): + """Get the queries of this BandViewProperties. + + :return: The queries of this BandViewProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this BandViewProperties. + + :param queries: The queries of this BandViewProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def colors(self): + """Get the colors of this BandViewProperties. + + Colors define color encoding of data into a visualization + + :return: The colors of this BandViewProperties. + :rtype: list[DashboardColor] + """ # noqa: E501 + return self._colors + + @colors.setter + def colors(self, colors): + """Set the colors of this BandViewProperties. + + Colors define color encoding of data into a visualization + + :param colors: The colors of this BandViewProperties. + :type: list[DashboardColor] + """ # noqa: E501 + if colors is None: + raise ValueError("Invalid value for `colors`, must not be `None`") # noqa: E501 + self._colors = colors + + @property + def shape(self): + """Get the shape of this BandViewProperties. + + :return: The shape of this BandViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this BandViewProperties. + + :param shape: The shape of this BandViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this BandViewProperties. + + :return: The note of this BandViewProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this BandViewProperties. + + :param note: The note of this BandViewProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + @property + def show_note_when_empty(self): + """Get the show_note_when_empty of this BandViewProperties. + + If true, will display note when empty + + :return: The show_note_when_empty of this BandViewProperties. + :rtype: bool + """ # noqa: E501 + return self._show_note_when_empty + + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty): + """Set the show_note_when_empty of this BandViewProperties. + + If true, will display note when empty + + :param show_note_when_empty: The show_note_when_empty of this BandViewProperties. + :type: bool + """ # noqa: E501 + if show_note_when_empty is None: + raise ValueError("Invalid value for `show_note_when_empty`, must not be `None`") # noqa: E501 + self._show_note_when_empty = show_note_when_empty + + @property + def axes(self): + """Get the axes of this BandViewProperties. + + :return: The axes of this BandViewProperties. + :rtype: Axes + """ # noqa: E501 + return self._axes + + @axes.setter + def axes(self, axes): + """Set the axes of this BandViewProperties. + + :param axes: The axes of this BandViewProperties. + :type: Axes + """ # noqa: E501 + if axes is None: + raise ValueError("Invalid value for `axes`, must not be `None`") # noqa: E501 + self._axes = axes + + @property + def static_legend(self): + """Get the static_legend of this BandViewProperties. + + :return: The static_legend of this BandViewProperties. + :rtype: StaticLegend + """ # noqa: E501 + return self._static_legend + + @static_legend.setter + def static_legend(self, static_legend): + """Set the static_legend of this BandViewProperties. + + :param static_legend: The static_legend of this BandViewProperties. + :type: StaticLegend + """ # noqa: E501 + self._static_legend = static_legend + + @property + def x_column(self): + """Get the x_column of this BandViewProperties. + + :return: The x_column of this BandViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_column + + @x_column.setter + def x_column(self, x_column): + """Set the x_column of this BandViewProperties. + + :param x_column: The x_column of this BandViewProperties. + :type: str + """ # noqa: E501 + self._x_column = x_column + + @property + def generate_x_axis_ticks(self): + """Get the generate_x_axis_ticks of this BandViewProperties. + + :return: The generate_x_axis_ticks of this BandViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._generate_x_axis_ticks + + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks): + """Set the generate_x_axis_ticks of this BandViewProperties. + + :param generate_x_axis_ticks: The generate_x_axis_ticks of this BandViewProperties. + :type: list[str] + """ # noqa: E501 + self._generate_x_axis_ticks = generate_x_axis_ticks + + @property + def x_total_ticks(self): + """Get the x_total_ticks of this BandViewProperties. + + :return: The x_total_ticks of this BandViewProperties. + :rtype: int + """ # noqa: E501 + return self._x_total_ticks + + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks): + """Set the x_total_ticks of this BandViewProperties. + + :param x_total_ticks: The x_total_ticks of this BandViewProperties. + :type: int + """ # noqa: E501 + self._x_total_ticks = x_total_ticks + + @property + def x_tick_start(self): + """Get the x_tick_start of this BandViewProperties. + + :return: The x_tick_start of this BandViewProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_start + + @x_tick_start.setter + def x_tick_start(self, x_tick_start): + """Set the x_tick_start of this BandViewProperties. + + :param x_tick_start: The x_tick_start of this BandViewProperties. + :type: float + """ # noqa: E501 + self._x_tick_start = x_tick_start + + @property + def x_tick_step(self): + """Get the x_tick_step of this BandViewProperties. + + :return: The x_tick_step of this BandViewProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_step + + @x_tick_step.setter + def x_tick_step(self, x_tick_step): + """Set the x_tick_step of this BandViewProperties. + + :param x_tick_step: The x_tick_step of this BandViewProperties. + :type: float + """ # noqa: E501 + self._x_tick_step = x_tick_step + + @property + def y_column(self): + """Get the y_column of this BandViewProperties. + + :return: The y_column of this BandViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_column + + @y_column.setter + def y_column(self, y_column): + """Set the y_column of this BandViewProperties. + + :param y_column: The y_column of this BandViewProperties. + :type: str + """ # noqa: E501 + self._y_column = y_column + + @property + def generate_y_axis_ticks(self): + """Get the generate_y_axis_ticks of this BandViewProperties. + + :return: The generate_y_axis_ticks of this BandViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._generate_y_axis_ticks + + @generate_y_axis_ticks.setter + def generate_y_axis_ticks(self, generate_y_axis_ticks): + """Set the generate_y_axis_ticks of this BandViewProperties. + + :param generate_y_axis_ticks: The generate_y_axis_ticks of this BandViewProperties. + :type: list[str] + """ # noqa: E501 + self._generate_y_axis_ticks = generate_y_axis_ticks + + @property + def y_total_ticks(self): + """Get the y_total_ticks of this BandViewProperties. + + :return: The y_total_ticks of this BandViewProperties. + :rtype: int + """ # noqa: E501 + return self._y_total_ticks + + @y_total_ticks.setter + def y_total_ticks(self, y_total_ticks): + """Set the y_total_ticks of this BandViewProperties. + + :param y_total_ticks: The y_total_ticks of this BandViewProperties. + :type: int + """ # noqa: E501 + self._y_total_ticks = y_total_ticks + + @property + def y_tick_start(self): + """Get the y_tick_start of this BandViewProperties. + + :return: The y_tick_start of this BandViewProperties. + :rtype: float + """ # noqa: E501 + return self._y_tick_start + + @y_tick_start.setter + def y_tick_start(self, y_tick_start): + """Set the y_tick_start of this BandViewProperties. + + :param y_tick_start: The y_tick_start of this BandViewProperties. + :type: float + """ # noqa: E501 + self._y_tick_start = y_tick_start + + @property + def y_tick_step(self): + """Get the y_tick_step of this BandViewProperties. + + :return: The y_tick_step of this BandViewProperties. + :rtype: float + """ # noqa: E501 + return self._y_tick_step + + @y_tick_step.setter + def y_tick_step(self, y_tick_step): + """Set the y_tick_step of this BandViewProperties. + + :param y_tick_step: The y_tick_step of this BandViewProperties. + :type: float + """ # noqa: E501 + self._y_tick_step = y_tick_step + + @property + def upper_column(self): + """Get the upper_column of this BandViewProperties. + + :return: The upper_column of this BandViewProperties. + :rtype: str + """ # noqa: E501 + return self._upper_column + + @upper_column.setter + def upper_column(self, upper_column): + """Set the upper_column of this BandViewProperties. + + :param upper_column: The upper_column of this BandViewProperties. + :type: str + """ # noqa: E501 + self._upper_column = upper_column + + @property + def main_column(self): + """Get the main_column of this BandViewProperties. + + :return: The main_column of this BandViewProperties. + :rtype: str + """ # noqa: E501 + return self._main_column + + @main_column.setter + def main_column(self, main_column): + """Set the main_column of this BandViewProperties. + + :param main_column: The main_column of this BandViewProperties. + :type: str + """ # noqa: E501 + self._main_column = main_column + + @property + def lower_column(self): + """Get the lower_column of this BandViewProperties. + + :return: The lower_column of this BandViewProperties. + :rtype: str + """ # noqa: E501 + return self._lower_column + + @lower_column.setter + def lower_column(self, lower_column): + """Set the lower_column of this BandViewProperties. + + :param lower_column: The lower_column of this BandViewProperties. + :type: str + """ # noqa: E501 + self._lower_column = lower_column + + @property + def hover_dimension(self): + """Get the hover_dimension of this BandViewProperties. + + :return: The hover_dimension of this BandViewProperties. + :rtype: str + """ # noqa: E501 + return self._hover_dimension + + @hover_dimension.setter + def hover_dimension(self, hover_dimension): + """Set the hover_dimension of this BandViewProperties. + + :param hover_dimension: The hover_dimension of this BandViewProperties. + :type: str + """ # noqa: E501 + self._hover_dimension = hover_dimension + + @property + def geom(self): + """Get the geom of this BandViewProperties. + + :return: The geom of this BandViewProperties. + :rtype: XYGeom + """ # noqa: E501 + return self._geom + + @geom.setter + def geom(self, geom): + """Set the geom of this BandViewProperties. + + :param geom: The geom of this BandViewProperties. + :type: XYGeom + """ # noqa: E501 + if geom is None: + raise ValueError("Invalid value for `geom`, must not be `None`") # noqa: E501 + self._geom = geom + + @property + def legend_colorize_rows(self): + """Get the legend_colorize_rows of this BandViewProperties. + + :return: The legend_colorize_rows of this BandViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_colorize_rows + + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows): + """Set the legend_colorize_rows of this BandViewProperties. + + :param legend_colorize_rows: The legend_colorize_rows of this BandViewProperties. + :type: bool + """ # noqa: E501 + self._legend_colorize_rows = legend_colorize_rows + + @property + def legend_hide(self): + """Get the legend_hide of this BandViewProperties. + + :return: The legend_hide of this BandViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_hide + + @legend_hide.setter + def legend_hide(self, legend_hide): + """Set the legend_hide of this BandViewProperties. + + :param legend_hide: The legend_hide of this BandViewProperties. + :type: bool + """ # noqa: E501 + self._legend_hide = legend_hide + + @property + def legend_opacity(self): + """Get the legend_opacity of this BandViewProperties. + + :return: The legend_opacity of this BandViewProperties. + :rtype: float + """ # noqa: E501 + return self._legend_opacity + + @legend_opacity.setter + def legend_opacity(self, legend_opacity): + """Set the legend_opacity of this BandViewProperties. + + :param legend_opacity: The legend_opacity of this BandViewProperties. + :type: float + """ # noqa: E501 + self._legend_opacity = legend_opacity + + @property + def legend_orientation_threshold(self): + """Get the legend_orientation_threshold of this BandViewProperties. + + :return: The legend_orientation_threshold of this BandViewProperties. + :rtype: int + """ # noqa: E501 + return self._legend_orientation_threshold + + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold): + """Set the legend_orientation_threshold of this BandViewProperties. + + :param legend_orientation_threshold: The legend_orientation_threshold of this BandViewProperties. + :type: int + """ # noqa: E501 + self._legend_orientation_threshold = legend_orientation_threshold + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BandViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/binary_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/binary_expression.py new file mode 100644 index 0000000..858d4e1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/binary_expression.py @@ -0,0 +1,184 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class BinaryExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'operator': 'str', + 'left': 'Expression', + 'right': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'operator': 'operator', + 'left': 'left', + 'right': 'right' + } + + def __init__(self, type=None, operator=None, left=None, right=None): # noqa: E501,D401,D403 + """BinaryExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._operator = None + self._left = None + self._right = None + self.discriminator = None + + if type is not None: + self.type = type + if operator is not None: + self.operator = operator + if left is not None: + self.left = left + if right is not None: + self.right = right + + @property + def type(self): + """Get the type of this BinaryExpression. + + Type of AST node + + :return: The type of this BinaryExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this BinaryExpression. + + Type of AST node + + :param type: The type of this BinaryExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def operator(self): + """Get the operator of this BinaryExpression. + + :return: The operator of this BinaryExpression. + :rtype: str + """ # noqa: E501 + return self._operator + + @operator.setter + def operator(self, operator): + """Set the operator of this BinaryExpression. + + :param operator: The operator of this BinaryExpression. + :type: str + """ # noqa: E501 + self._operator = operator + + @property + def left(self): + """Get the left of this BinaryExpression. + + :return: The left of this BinaryExpression. + :rtype: Expression + """ # noqa: E501 + return self._left + + @left.setter + def left(self, left): + """Set the left of this BinaryExpression. + + :param left: The left of this BinaryExpression. + :type: Expression + """ # noqa: E501 + self._left = left + + @property + def right(self): + """Get the right of this BinaryExpression. + + :return: The right of this BinaryExpression. + :rtype: Expression + """ # noqa: E501 + return self._right + + @right.setter + def right(self, right): + """Set the right of this BinaryExpression. + + :param right: The right of this BinaryExpression. + :type: Expression + """ # noqa: E501 + self._right = right + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BinaryExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/block.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/block.py new file mode 100644 index 0000000..b2ddc36 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/block.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.node import Node + + +class Block(Node): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'body': 'list[Statement]' + } + + attribute_map = { + 'type': 'type', + 'body': 'body' + } + + def __init__(self, type=None, body=None): # noqa: E501,D401,D403 + """Block - a model defined in OpenAPI.""" # noqa: E501 + Node.__init__(self) # noqa: E501 + + self._type = None + self._body = None + self.discriminator = None + + if type is not None: + self.type = type + if body is not None: + self.body = body + + @property + def type(self): + """Get the type of this Block. + + Type of AST node + + :return: The type of this Block. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this Block. + + Type of AST node + + :param type: The type of this Block. + :type: str + """ # noqa: E501 + self._type = type + + @property + def body(self): + """Get the body of this Block. + + Block body + + :return: The body of this Block. + :rtype: list[Statement] + """ # noqa: E501 + return self._body + + @body.setter + def body(self, body): + """Set the body of this Block. + + Block body + + :param body: The body of this Block. + :type: list[Statement] + """ # noqa: E501 + self._body = body + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Block): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/boolean_literal.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/boolean_literal.py new file mode 100644 index 0000000..693d7be --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/boolean_literal.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class BooleanLiteral(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'value': 'bool' + } + + attribute_map = { + 'type': 'type', + 'value': 'value' + } + + def __init__(self, type=None, value=None): # noqa: E501,D401,D403 + """BooleanLiteral - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._value = None + self.discriminator = None + + if type is not None: + self.type = type + if value is not None: + self.value = value + + @property + def type(self): + """Get the type of this BooleanLiteral. + + Type of AST node + + :return: The type of this BooleanLiteral. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this BooleanLiteral. + + Type of AST node + + :param type: The type of this BooleanLiteral. + :type: str + """ # noqa: E501 + self._type = type + + @property + def value(self): + """Get the value of this BooleanLiteral. + + :return: The value of this BooleanLiteral. + :rtype: bool + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this BooleanLiteral. + + :param value: The value of this BooleanLiteral. + :type: bool + """ # noqa: E501 + self._value = value + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BooleanLiteral): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket.py new file mode 100644 index 0000000..fe7d3d6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket.py @@ -0,0 +1,366 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Bucket(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'BucketLinks', + 'id': 'str', + 'type': 'str', + 'name': 'str', + 'description': 'str', + 'org_id': 'str', + 'rp': 'str', + 'schema_type': 'SchemaType', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'retention_rules': 'list[BucketRetentionRules]', + 'labels': 'list[Label]' + } + + attribute_map = { + 'links': 'links', + 'id': 'id', + 'type': 'type', + 'name': 'name', + 'description': 'description', + 'org_id': 'orgID', + 'rp': 'rp', + 'schema_type': 'schemaType', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'retention_rules': 'retentionRules', + 'labels': 'labels' + } + + def __init__(self, links=None, id=None, type='user', name=None, description=None, org_id=None, rp=None, schema_type=None, created_at=None, updated_at=None, retention_rules=None, labels=None): # noqa: E501,D401,D403 + """Bucket - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._id = None + self._type = None + self._name = None + self._description = None + self._org_id = None + self._rp = None + self._schema_type = None + self._created_at = None + self._updated_at = None + self._retention_rules = None + self._labels = None + self.discriminator = None + + if links is not None: + self.links = links + if id is not None: + self.id = id + if type is not None: + self.type = type + self.name = name + if description is not None: + self.description = description + if org_id is not None: + self.org_id = org_id + if rp is not None: + self.rp = rp + if schema_type is not None: + self.schema_type = schema_type + if created_at is not None: + self.created_at = created_at + if updated_at is not None: + self.updated_at = updated_at + self.retention_rules = retention_rules + if labels is not None: + self.labels = labels + + @property + def links(self): + """Get the links of this Bucket. + + :return: The links of this Bucket. + :rtype: BucketLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Bucket. + + :param links: The links of this Bucket. + :type: BucketLinks + """ # noqa: E501 + self._links = links + + @property + def id(self): + """Get the id of this Bucket. + + :return: The id of this Bucket. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Bucket. + + :param id: The id of this Bucket. + :type: str + """ # noqa: E501 + self._id = id + + @property + def type(self): + """Get the type of this Bucket. + + :return: The type of this Bucket. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this Bucket. + + :param type: The type of this Bucket. + :type: str + """ # noqa: E501 + self._type = type + + @property + def name(self): + """Get the name of this Bucket. + + :return: The name of this Bucket. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this Bucket. + + :param name: The name of this Bucket. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this Bucket. + + :return: The description of this Bucket. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this Bucket. + + :param description: The description of this Bucket. + :type: str + """ # noqa: E501 + self._description = description + + @property + def org_id(self): + """Get the org_id of this Bucket. + + :return: The org_id of this Bucket. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this Bucket. + + :param org_id: The org_id of this Bucket. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def rp(self): + """Get the rp of this Bucket. + + :return: The rp of this Bucket. + :rtype: str + """ # noqa: E501 + return self._rp + + @rp.setter + def rp(self, rp): + """Set the rp of this Bucket. + + :param rp: The rp of this Bucket. + :type: str + """ # noqa: E501 + self._rp = rp + + @property + def schema_type(self): + """Get the schema_type of this Bucket. + + :return: The schema_type of this Bucket. + :rtype: SchemaType + """ # noqa: E501 + return self._schema_type + + @schema_type.setter + def schema_type(self, schema_type): + """Set the schema_type of this Bucket. + + :param schema_type: The schema_type of this Bucket. + :type: SchemaType + """ # noqa: E501 + self._schema_type = schema_type + + @property + def created_at(self): + """Get the created_at of this Bucket. + + :return: The created_at of this Bucket. + :rtype: datetime + """ # noqa: E501 + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Set the created_at of this Bucket. + + :param created_at: The created_at of this Bucket. + :type: datetime + """ # noqa: E501 + self._created_at = created_at + + @property + def updated_at(self): + """Get the updated_at of this Bucket. + + :return: The updated_at of this Bucket. + :rtype: datetime + """ # noqa: E501 + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Set the updated_at of this Bucket. + + :param updated_at: The updated_at of this Bucket. + :type: datetime + """ # noqa: E501 + self._updated_at = updated_at + + @property + def retention_rules(self): + """Get the retention_rules of this Bucket. + + Retention rules to expire or retain data. The InfluxDB `/api/v2` API uses `RetentionRules` to configure the [retention period](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-period). #### InfluxDB Cloud - `retentionRules` is required. #### InfluxDB OSS - `retentionRules` isn't required. + + :return: The retention_rules of this Bucket. + :rtype: list[BucketRetentionRules] + """ # noqa: E501 + return self._retention_rules + + @retention_rules.setter + def retention_rules(self, retention_rules): + """Set the retention_rules of this Bucket. + + Retention rules to expire or retain data. The InfluxDB `/api/v2` API uses `RetentionRules` to configure the [retention period](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-period). #### InfluxDB Cloud - `retentionRules` is required. #### InfluxDB OSS - `retentionRules` isn't required. + + :param retention_rules: The retention_rules of this Bucket. + :type: list[BucketRetentionRules] + """ # noqa: E501 + if retention_rules is None: + raise ValueError("Invalid value for `retention_rules`, must not be `None`") # noqa: E501 + self._retention_rules = retention_rules + + @property + def labels(self): + """Get the labels of this Bucket. + + :return: The labels of this Bucket. + :rtype: list[Label] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this Bucket. + + :param labels: The labels of this Bucket. + :type: list[Label] + """ # noqa: E501 + self._labels = labels + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Bucket): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_links.py new file mode 100644 index 0000000..a4cef1a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_links.py @@ -0,0 +1,246 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class BucketLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'labels': 'str', + 'members': 'str', + 'org': 'str', + 'owners': 'str', + '_self': 'str', + 'write': 'str' + } + + attribute_map = { + 'labels': 'labels', + 'members': 'members', + 'org': 'org', + 'owners': 'owners', + '_self': 'self', + 'write': 'write' + } + + def __init__(self, labels=None, members=None, org=None, owners=None, _self=None, write=None): # noqa: E501,D401,D403 + """BucketLinks - a model defined in OpenAPI.""" # noqa: E501 + self._labels = None + self._members = None + self._org = None + self._owners = None + self.__self = None + self._write = None + self.discriminator = None + + if labels is not None: + self.labels = labels + if members is not None: + self.members = members + if org is not None: + self.org = org + if owners is not None: + self.owners = owners + if _self is not None: + self._self = _self + if write is not None: + self.write = write + + @property + def labels(self): + """Get the labels of this BucketLinks. + + URI of resource. + + :return: The labels of this BucketLinks. + :rtype: str + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this BucketLinks. + + URI of resource. + + :param labels: The labels of this BucketLinks. + :type: str + """ # noqa: E501 + self._labels = labels + + @property + def members(self): + """Get the members of this BucketLinks. + + URI of resource. + + :return: The members of this BucketLinks. + :rtype: str + """ # noqa: E501 + return self._members + + @members.setter + def members(self, members): + """Set the members of this BucketLinks. + + URI of resource. + + :param members: The members of this BucketLinks. + :type: str + """ # noqa: E501 + self._members = members + + @property + def org(self): + """Get the org of this BucketLinks. + + URI of resource. + + :return: The org of this BucketLinks. + :rtype: str + """ # noqa: E501 + return self._org + + @org.setter + def org(self, org): + """Set the org of this BucketLinks. + + URI of resource. + + :param org: The org of this BucketLinks. + :type: str + """ # noqa: E501 + self._org = org + + @property + def owners(self): + """Get the owners of this BucketLinks. + + URI of resource. + + :return: The owners of this BucketLinks. + :rtype: str + """ # noqa: E501 + return self._owners + + @owners.setter + def owners(self, owners): + """Set the owners of this BucketLinks. + + URI of resource. + + :param owners: The owners of this BucketLinks. + :type: str + """ # noqa: E501 + self._owners = owners + + @property + def _self(self): + """Get the _self of this BucketLinks. + + URI of resource. + + :return: The _self of this BucketLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this BucketLinks. + + URI of resource. + + :param _self: The _self of this BucketLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + @property + def write(self): + """Get the write of this BucketLinks. + + URI of resource. + + :return: The write of this BucketLinks. + :rtype: str + """ # noqa: E501 + return self._write + + @write.setter + def write(self, write): + """Set the write of this BucketLinks. + + URI of resource. + + :param write: The write of this BucketLinks. + :type: str + """ # noqa: E501 + self._write = write + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BucketLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_metadata_manifest.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_metadata_manifest.py new file mode 100644 index 0000000..c87ec20 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_metadata_manifest.py @@ -0,0 +1,251 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class BucketMetadataManifest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'organization_id': 'str', + 'organization_name': 'str', + 'bucket_id': 'str', + 'bucket_name': 'str', + 'description': 'str', + 'default_retention_policy': 'str', + 'retention_policies': 'list[RetentionPolicyManifest]' + } + + attribute_map = { + 'organization_id': 'organizationID', + 'organization_name': 'organizationName', + 'bucket_id': 'bucketID', + 'bucket_name': 'bucketName', + 'description': 'description', + 'default_retention_policy': 'defaultRetentionPolicy', + 'retention_policies': 'retentionPolicies' + } + + def __init__(self, organization_id=None, organization_name=None, bucket_id=None, bucket_name=None, description=None, default_retention_policy=None, retention_policies=None): # noqa: E501,D401,D403 + """BucketMetadataManifest - a model defined in OpenAPI.""" # noqa: E501 + self._organization_id = None + self._organization_name = None + self._bucket_id = None + self._bucket_name = None + self._description = None + self._default_retention_policy = None + self._retention_policies = None + self.discriminator = None + + self.organization_id = organization_id + self.organization_name = organization_name + self.bucket_id = bucket_id + self.bucket_name = bucket_name + if description is not None: + self.description = description + self.default_retention_policy = default_retention_policy + self.retention_policies = retention_policies + + @property + def organization_id(self): + """Get the organization_id of this BucketMetadataManifest. + + :return: The organization_id of this BucketMetadataManifest. + :rtype: str + """ # noqa: E501 + return self._organization_id + + @organization_id.setter + def organization_id(self, organization_id): + """Set the organization_id of this BucketMetadataManifest. + + :param organization_id: The organization_id of this BucketMetadataManifest. + :type: str + """ # noqa: E501 + if organization_id is None: + raise ValueError("Invalid value for `organization_id`, must not be `None`") # noqa: E501 + self._organization_id = organization_id + + @property + def organization_name(self): + """Get the organization_name of this BucketMetadataManifest. + + :return: The organization_name of this BucketMetadataManifest. + :rtype: str + """ # noqa: E501 + return self._organization_name + + @organization_name.setter + def organization_name(self, organization_name): + """Set the organization_name of this BucketMetadataManifest. + + :param organization_name: The organization_name of this BucketMetadataManifest. + :type: str + """ # noqa: E501 + if organization_name is None: + raise ValueError("Invalid value for `organization_name`, must not be `None`") # noqa: E501 + self._organization_name = organization_name + + @property + def bucket_id(self): + """Get the bucket_id of this BucketMetadataManifest. + + :return: The bucket_id of this BucketMetadataManifest. + :rtype: str + """ # noqa: E501 + return self._bucket_id + + @bucket_id.setter + def bucket_id(self, bucket_id): + """Set the bucket_id of this BucketMetadataManifest. + + :param bucket_id: The bucket_id of this BucketMetadataManifest. + :type: str + """ # noqa: E501 + if bucket_id is None: + raise ValueError("Invalid value for `bucket_id`, must not be `None`") # noqa: E501 + self._bucket_id = bucket_id + + @property + def bucket_name(self): + """Get the bucket_name of this BucketMetadataManifest. + + :return: The bucket_name of this BucketMetadataManifest. + :rtype: str + """ # noqa: E501 + return self._bucket_name + + @bucket_name.setter + def bucket_name(self, bucket_name): + """Set the bucket_name of this BucketMetadataManifest. + + :param bucket_name: The bucket_name of this BucketMetadataManifest. + :type: str + """ # noqa: E501 + if bucket_name is None: + raise ValueError("Invalid value for `bucket_name`, must not be `None`") # noqa: E501 + self._bucket_name = bucket_name + + @property + def description(self): + """Get the description of this BucketMetadataManifest. + + :return: The description of this BucketMetadataManifest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this BucketMetadataManifest. + + :param description: The description of this BucketMetadataManifest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def default_retention_policy(self): + """Get the default_retention_policy of this BucketMetadataManifest. + + :return: The default_retention_policy of this BucketMetadataManifest. + :rtype: str + """ # noqa: E501 + return self._default_retention_policy + + @default_retention_policy.setter + def default_retention_policy(self, default_retention_policy): + """Set the default_retention_policy of this BucketMetadataManifest. + + :param default_retention_policy: The default_retention_policy of this BucketMetadataManifest. + :type: str + """ # noqa: E501 + if default_retention_policy is None: + raise ValueError("Invalid value for `default_retention_policy`, must not be `None`") # noqa: E501 + self._default_retention_policy = default_retention_policy + + @property + def retention_policies(self): + """Get the retention_policies of this BucketMetadataManifest. + + :return: The retention_policies of this BucketMetadataManifest. + :rtype: list[RetentionPolicyManifest] + """ # noqa: E501 + return self._retention_policies + + @retention_policies.setter + def retention_policies(self, retention_policies): + """Set the retention_policies of this BucketMetadataManifest. + + :param retention_policies: The retention_policies of this BucketMetadataManifest. + :type: list[RetentionPolicyManifest] + """ # noqa: E501 + if retention_policies is None: + raise ValueError("Invalid value for `retention_policies`, must not be `None`") # noqa: E501 + self._retention_policies = retention_policies + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BucketMetadataManifest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_retention_rules.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_retention_rules.py new file mode 100644 index 0000000..2359eb7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_retention_rules.py @@ -0,0 +1,164 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class BucketRetentionRules(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'every_seconds': 'int', + 'shard_group_duration_seconds': 'int' + } + + attribute_map = { + 'type': 'type', + 'every_seconds': 'everySeconds', + 'shard_group_duration_seconds': 'shardGroupDurationSeconds' + } + + def __init__(self, type='expire', every_seconds=2592000, shard_group_duration_seconds=None): # noqa: E501,D401,D403 + """BucketRetentionRules - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self._every_seconds = None + self._shard_group_duration_seconds = None + self.discriminator = None + + if type is not None: + self.type = type + self.every_seconds = every_seconds + if shard_group_duration_seconds is not None: + self.shard_group_duration_seconds = shard_group_duration_seconds + + @property + def type(self): + """Get the type of this BucketRetentionRules. + + :return: The type of this BucketRetentionRules. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this BucketRetentionRules. + + :param type: The type of this BucketRetentionRules. + :type: str + """ # noqa: E501 + self._type = type + + @property + def every_seconds(self): + """Get the every_seconds of this BucketRetentionRules. + + The duration in seconds for how long data will be kept in the database. The default duration is 2592000 (30 days). 0 represents infinite retention. + + :return: The every_seconds of this BucketRetentionRules. + :rtype: int + """ # noqa: E501 + return self._every_seconds + + @every_seconds.setter + def every_seconds(self, every_seconds): + """Set the every_seconds of this BucketRetentionRules. + + The duration in seconds for how long data will be kept in the database. The default duration is 2592000 (30 days). 0 represents infinite retention. + + :param every_seconds: The every_seconds of this BucketRetentionRules. + :type: int + """ # noqa: E501 + if every_seconds is None: + raise ValueError("Invalid value for `every_seconds`, must not be `None`") # noqa: E501 + if every_seconds is not None and every_seconds < 0: # noqa: E501 + raise ValueError("Invalid value for `every_seconds`, must be a value greater than or equal to `0`") # noqa: E501 + self._every_seconds = every_seconds + + @property + def shard_group_duration_seconds(self): + """Get the shard_group_duration_seconds of this BucketRetentionRules. + + The shard group duration. The duration or interval (in seconds) that each shard group covers. #### InfluxDB Cloud - Does not use `shardGroupDurationsSeconds`. #### InfluxDB OSS - Default value depends on the [bucket retention period](https://docs.influxdata.com/influxdb/latest/reference/internals/shards/#shard-group-duration). + + :return: The shard_group_duration_seconds of this BucketRetentionRules. + :rtype: int + """ # noqa: E501 + return self._shard_group_duration_seconds + + @shard_group_duration_seconds.setter + def shard_group_duration_seconds(self, shard_group_duration_seconds): + """Set the shard_group_duration_seconds of this BucketRetentionRules. + + The shard group duration. The duration or interval (in seconds) that each shard group covers. #### InfluxDB Cloud - Does not use `shardGroupDurationsSeconds`. #### InfluxDB OSS - Default value depends on the [bucket retention period](https://docs.influxdata.com/influxdb/latest/reference/internals/shards/#shard-group-duration). + + :param shard_group_duration_seconds: The shard_group_duration_seconds of this BucketRetentionRules. + :type: int + """ # noqa: E501 + self._shard_group_duration_seconds = shard_group_duration_seconds + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BucketRetentionRules): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_shard_mapping.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_shard_mapping.py new file mode 100644 index 0000000..1bf7c27 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/bucket_shard_mapping.py @@ -0,0 +1,132 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class BucketShardMapping(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'old_id': 'int', + 'new_id': 'int' + } + + attribute_map = { + 'old_id': 'oldId', + 'new_id': 'newId' + } + + def __init__(self, old_id=None, new_id=None): # noqa: E501,D401,D403 + """BucketShardMapping - a model defined in OpenAPI.""" # noqa: E501 + self._old_id = None + self._new_id = None + self.discriminator = None + + self.old_id = old_id + self.new_id = new_id + + @property + def old_id(self): + """Get the old_id of this BucketShardMapping. + + :return: The old_id of this BucketShardMapping. + :rtype: int + """ # noqa: E501 + return self._old_id + + @old_id.setter + def old_id(self, old_id): + """Set the old_id of this BucketShardMapping. + + :param old_id: The old_id of this BucketShardMapping. + :type: int + """ # noqa: E501 + if old_id is None: + raise ValueError("Invalid value for `old_id`, must not be `None`") # noqa: E501 + self._old_id = old_id + + @property + def new_id(self): + """Get the new_id of this BucketShardMapping. + + :return: The new_id of this BucketShardMapping. + :rtype: int + """ # noqa: E501 + return self._new_id + + @new_id.setter + def new_id(self, new_id): + """Set the new_id of this BucketShardMapping. + + :param new_id: The new_id of this BucketShardMapping. + :type: int + """ # noqa: E501 + if new_id is None: + raise ValueError("Invalid value for `new_id`, must not be `None`") # noqa: E501 + self._new_id = new_id + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BucketShardMapping): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/buckets.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/buckets.py new file mode 100644 index 0000000..de3277f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/buckets.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Buckets(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'Links', + 'buckets': 'list[Bucket]' + } + + attribute_map = { + 'links': 'links', + 'buckets': 'buckets' + } + + def __init__(self, links=None, buckets=None): # noqa: E501,D401,D403 + """Buckets - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._buckets = None + self.discriminator = None + + if links is not None: + self.links = links + if buckets is not None: + self.buckets = buckets + + @property + def links(self): + """Get the links of this Buckets. + + :return: The links of this Buckets. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Buckets. + + :param links: The links of this Buckets. + :type: Links + """ # noqa: E501 + self._links = links + + @property + def buckets(self): + """Get the buckets of this Buckets. + + :return: The buckets of this Buckets. + :rtype: list[Bucket] + """ # noqa: E501 + return self._buckets + + @buckets.setter + def buckets(self, buckets): + """Set the buckets of this Buckets. + + :param buckets: The buckets of this Buckets. + :type: list[Bucket] + """ # noqa: E501 + self._buckets = buckets + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Buckets): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_aggregate_function_type.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_aggregate_function_type.py new file mode 100644 index 0000000..0cbf6b6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_aggregate_function_type.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class BuilderAggregateFunctionType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + FILTER = "filter" + GROUP = "group" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """BuilderAggregateFunctionType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BuilderAggregateFunctionType): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_config.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_config.py new file mode 100644 index 0000000..e8a2d02 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_config.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class BuilderConfig(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'buckets': 'list[str]', + 'tags': 'list[BuilderTagsType]', + 'functions': 'list[BuilderFunctionsType]', + 'aggregate_window': 'BuilderConfigAggregateWindow' + } + + attribute_map = { + 'buckets': 'buckets', + 'tags': 'tags', + 'functions': 'functions', + 'aggregate_window': 'aggregateWindow' + } + + def __init__(self, buckets=None, tags=None, functions=None, aggregate_window=None): # noqa: E501,D401,D403 + """BuilderConfig - a model defined in OpenAPI.""" # noqa: E501 + self._buckets = None + self._tags = None + self._functions = None + self._aggregate_window = None + self.discriminator = None + + if buckets is not None: + self.buckets = buckets + if tags is not None: + self.tags = tags + if functions is not None: + self.functions = functions + if aggregate_window is not None: + self.aggregate_window = aggregate_window + + @property + def buckets(self): + """Get the buckets of this BuilderConfig. + + :return: The buckets of this BuilderConfig. + :rtype: list[str] + """ # noqa: E501 + return self._buckets + + @buckets.setter + def buckets(self, buckets): + """Set the buckets of this BuilderConfig. + + :param buckets: The buckets of this BuilderConfig. + :type: list[str] + """ # noqa: E501 + self._buckets = buckets + + @property + def tags(self): + """Get the tags of this BuilderConfig. + + :return: The tags of this BuilderConfig. + :rtype: list[BuilderTagsType] + """ # noqa: E501 + return self._tags + + @tags.setter + def tags(self, tags): + """Set the tags of this BuilderConfig. + + :param tags: The tags of this BuilderConfig. + :type: list[BuilderTagsType] + """ # noqa: E501 + self._tags = tags + + @property + def functions(self): + """Get the functions of this BuilderConfig. + + :return: The functions of this BuilderConfig. + :rtype: list[BuilderFunctionsType] + """ # noqa: E501 + return self._functions + + @functions.setter + def functions(self, functions): + """Set the functions of this BuilderConfig. + + :param functions: The functions of this BuilderConfig. + :type: list[BuilderFunctionsType] + """ # noqa: E501 + self._functions = functions + + @property + def aggregate_window(self): + """Get the aggregate_window of this BuilderConfig. + + :return: The aggregate_window of this BuilderConfig. + :rtype: BuilderConfigAggregateWindow + """ # noqa: E501 + return self._aggregate_window + + @aggregate_window.setter + def aggregate_window(self, aggregate_window): + """Set the aggregate_window of this BuilderConfig. + + :param aggregate_window: The aggregate_window of this BuilderConfig. + :type: BuilderConfigAggregateWindow + """ # noqa: E501 + self._aggregate_window = aggregate_window + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BuilderConfig): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_config_aggregate_window.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_config_aggregate_window.py new file mode 100644 index 0000000..bcdde38 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_config_aggregate_window.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class BuilderConfigAggregateWindow(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'period': 'str', + 'fill_values': 'bool' + } + + attribute_map = { + 'period': 'period', + 'fill_values': 'fillValues' + } + + def __init__(self, period=None, fill_values=None): # noqa: E501,D401,D403 + """BuilderConfigAggregateWindow - a model defined in OpenAPI.""" # noqa: E501 + self._period = None + self._fill_values = None + self.discriminator = None + + if period is not None: + self.period = period + if fill_values is not None: + self.fill_values = fill_values + + @property + def period(self): + """Get the period of this BuilderConfigAggregateWindow. + + :return: The period of this BuilderConfigAggregateWindow. + :rtype: str + """ # noqa: E501 + return self._period + + @period.setter + def period(self, period): + """Set the period of this BuilderConfigAggregateWindow. + + :param period: The period of this BuilderConfigAggregateWindow. + :type: str + """ # noqa: E501 + self._period = period + + @property + def fill_values(self): + """Get the fill_values of this BuilderConfigAggregateWindow. + + :return: The fill_values of this BuilderConfigAggregateWindow. + :rtype: bool + """ # noqa: E501 + return self._fill_values + + @fill_values.setter + def fill_values(self, fill_values): + """Set the fill_values of this BuilderConfigAggregateWindow. + + :param fill_values: The fill_values of this BuilderConfigAggregateWindow. + :type: bool + """ # noqa: E501 + self._fill_values = fill_values + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BuilderConfigAggregateWindow): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_functions_type.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_functions_type.py new file mode 100644 index 0000000..4ebf44e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_functions_type.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class BuilderFunctionsType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str' + } + + attribute_map = { + 'name': 'name' + } + + def __init__(self, name=None): # noqa: E501,D401,D403 + """BuilderFunctionsType - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self.discriminator = None + + if name is not None: + self.name = name + + @property + def name(self): + """Get the name of this BuilderFunctionsType. + + :return: The name of this BuilderFunctionsType. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this BuilderFunctionsType. + + :param name: The name of this BuilderFunctionsType. + :type: str + """ # noqa: E501 + self._name = name + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BuilderFunctionsType): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_tags_type.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_tags_type.py new file mode 100644 index 0000000..fcf7e05 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builder_tags_type.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class BuilderTagsType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'key': 'str', + 'values': 'list[str]', + 'aggregate_function_type': 'BuilderAggregateFunctionType' + } + + attribute_map = { + 'key': 'key', + 'values': 'values', + 'aggregate_function_type': 'aggregateFunctionType' + } + + def __init__(self, key=None, values=None, aggregate_function_type=None): # noqa: E501,D401,D403 + """BuilderTagsType - a model defined in OpenAPI.""" # noqa: E501 + self._key = None + self._values = None + self._aggregate_function_type = None + self.discriminator = None + + if key is not None: + self.key = key + if values is not None: + self.values = values + if aggregate_function_type is not None: + self.aggregate_function_type = aggregate_function_type + + @property + def key(self): + """Get the key of this BuilderTagsType. + + :return: The key of this BuilderTagsType. + :rtype: str + """ # noqa: E501 + return self._key + + @key.setter + def key(self, key): + """Set the key of this BuilderTagsType. + + :param key: The key of this BuilderTagsType. + :type: str + """ # noqa: E501 + self._key = key + + @property + def values(self): + """Get the values of this BuilderTagsType. + + :return: The values of this BuilderTagsType. + :rtype: list[str] + """ # noqa: E501 + return self._values + + @values.setter + def values(self, values): + """Set the values of this BuilderTagsType. + + :param values: The values of this BuilderTagsType. + :type: list[str] + """ # noqa: E501 + self._values = values + + @property + def aggregate_function_type(self): + """Get the aggregate_function_type of this BuilderTagsType. + + :return: The aggregate_function_type of this BuilderTagsType. + :rtype: BuilderAggregateFunctionType + """ # noqa: E501 + return self._aggregate_function_type + + @aggregate_function_type.setter + def aggregate_function_type(self, aggregate_function_type): + """Set the aggregate_function_type of this BuilderTagsType. + + :param aggregate_function_type: The aggregate_function_type of this BuilderTagsType. + :type: BuilderAggregateFunctionType + """ # noqa: E501 + self._aggregate_function_type = aggregate_function_type + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BuilderTagsType): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/builtin_statement.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builtin_statement.py new file mode 100644 index 0000000..51f24a3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/builtin_statement.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.statement import Statement + + +class BuiltinStatement(Statement): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'id': 'Identifier' + } + + attribute_map = { + 'type': 'type', + 'id': 'id' + } + + def __init__(self, type=None, id=None): # noqa: E501,D401,D403 + """BuiltinStatement - a model defined in OpenAPI.""" # noqa: E501 + Statement.__init__(self) # noqa: E501 + + self._type = None + self._id = None + self.discriminator = None + + if type is not None: + self.type = type + if id is not None: + self.id = id + + @property + def type(self): + """Get the type of this BuiltinStatement. + + Type of AST node + + :return: The type of this BuiltinStatement. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this BuiltinStatement. + + Type of AST node + + :param type: The type of this BuiltinStatement. + :type: str + """ # noqa: E501 + self._type = type + + @property + def id(self): + """Get the id of this BuiltinStatement. + + :return: The id of this BuiltinStatement. + :rtype: Identifier + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this BuiltinStatement. + + :param id: The id of this BuiltinStatement. + :type: Identifier + """ # noqa: E501 + self._id = id + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, BuiltinStatement): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/call_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/call_expression.py new file mode 100644 index 0000000..922b705 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/call_expression.py @@ -0,0 +1,165 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class CallExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'callee': 'Expression', + 'arguments': 'list[Expression]' + } + + attribute_map = { + 'type': 'type', + 'callee': 'callee', + 'arguments': 'arguments' + } + + def __init__(self, type=None, callee=None, arguments=None): # noqa: E501,D401,D403 + """CallExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._callee = None + self._arguments = None + self.discriminator = None + + if type is not None: + self.type = type + if callee is not None: + self.callee = callee + if arguments is not None: + self.arguments = arguments + + @property + def type(self): + """Get the type of this CallExpression. + + Type of AST node + + :return: The type of this CallExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this CallExpression. + + Type of AST node + + :param type: The type of this CallExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def callee(self): + """Get the callee of this CallExpression. + + :return: The callee of this CallExpression. + :rtype: Expression + """ # noqa: E501 + return self._callee + + @callee.setter + def callee(self, callee): + """Set the callee of this CallExpression. + + :param callee: The callee of this CallExpression. + :type: Expression + """ # noqa: E501 + self._callee = callee + + @property + def arguments(self): + """Get the arguments of this CallExpression. + + Function arguments + + :return: The arguments of this CallExpression. + :rtype: list[Expression] + """ # noqa: E501 + return self._arguments + + @arguments.setter + def arguments(self, arguments): + """Set the arguments of this CallExpression. + + Function arguments + + :param arguments: The arguments of this CallExpression. + :type: list[Expression] + """ # noqa: E501 + self._arguments = arguments + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CallExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell.py new file mode 100644 index 0000000..d52d947 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell.py @@ -0,0 +1,249 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Cell(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'links': 'CellLinks', + 'x': 'int', + 'y': 'int', + 'w': 'int', + 'h': 'int', + 'view_id': 'str' + } + + attribute_map = { + 'id': 'id', + 'links': 'links', + 'x': 'x', + 'y': 'y', + 'w': 'w', + 'h': 'h', + 'view_id': 'viewID' + } + + def __init__(self, id=None, links=None, x=None, y=None, w=None, h=None, view_id=None): # noqa: E501,D401,D403 + """Cell - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._links = None + self._x = None + self._y = None + self._w = None + self._h = None + self._view_id = None + self.discriminator = None + + if id is not None: + self.id = id + if links is not None: + self.links = links + if x is not None: + self.x = x + if y is not None: + self.y = y + if w is not None: + self.w = w + if h is not None: + self.h = h + if view_id is not None: + self.view_id = view_id + + @property + def id(self): + """Get the id of this Cell. + + :return: The id of this Cell. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Cell. + + :param id: The id of this Cell. + :type: str + """ # noqa: E501 + self._id = id + + @property + def links(self): + """Get the links of this Cell. + + :return: The links of this Cell. + :rtype: CellLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Cell. + + :param links: The links of this Cell. + :type: CellLinks + """ # noqa: E501 + self._links = links + + @property + def x(self): + """Get the x of this Cell. + + :return: The x of this Cell. + :rtype: int + """ # noqa: E501 + return self._x + + @x.setter + def x(self, x): + """Set the x of this Cell. + + :param x: The x of this Cell. + :type: int + """ # noqa: E501 + self._x = x + + @property + def y(self): + """Get the y of this Cell. + + :return: The y of this Cell. + :rtype: int + """ # noqa: E501 + return self._y + + @y.setter + def y(self, y): + """Set the y of this Cell. + + :param y: The y of this Cell. + :type: int + """ # noqa: E501 + self._y = y + + @property + def w(self): + """Get the w of this Cell. + + :return: The w of this Cell. + :rtype: int + """ # noqa: E501 + return self._w + + @w.setter + def w(self, w): + """Set the w of this Cell. + + :param w: The w of this Cell. + :type: int + """ # noqa: E501 + self._w = w + + @property + def h(self): + """Get the h of this Cell. + + :return: The h of this Cell. + :rtype: int + """ # noqa: E501 + return self._h + + @h.setter + def h(self, h): + """Set the h of this Cell. + + :param h: The h of this Cell. + :type: int + """ # noqa: E501 + self._h = h + + @property + def view_id(self): + """Get the view_id of this Cell. + + The reference to a view from the views API. + + :return: The view_id of this Cell. + :rtype: str + """ # noqa: E501 + return self._view_id + + @view_id.setter + def view_id(self, view_id): + """Set the view_id of this Cell. + + The reference to a view from the views API. + + :param view_id: The view_id of this Cell. + :type: str + """ # noqa: E501 + self._view_id = view_id + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Cell): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell_links.py new file mode 100644 index 0000000..fc556c7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell_links.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class CellLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str', + 'view': 'str' + } + + attribute_map = { + '_self': 'self', + 'view': 'view' + } + + def __init__(self, _self=None, view=None): # noqa: E501,D401,D403 + """CellLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self._view = None + self.discriminator = None + + if _self is not None: + self._self = _self + if view is not None: + self.view = view + + @property + def _self(self): + """Get the _self of this CellLinks. + + :return: The _self of this CellLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this CellLinks. + + :param _self: The _self of this CellLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + @property + def view(self): + """Get the view of this CellLinks. + + :return: The view of this CellLinks. + :rtype: str + """ # noqa: E501 + return self._view + + @view.setter + def view(self, view): + """Set the view of this CellLinks. + + :param view: The view of this CellLinks. + :type: str + """ # noqa: E501 + self._view = view + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CellLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell_update.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell_update.py new file mode 100644 index 0000000..d58c625 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell_update.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class CellUpdate(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'x': 'int', + 'y': 'int', + 'w': 'int', + 'h': 'int' + } + + attribute_map = { + 'x': 'x', + 'y': 'y', + 'w': 'w', + 'h': 'h' + } + + def __init__(self, x=None, y=None, w=None, h=None): # noqa: E501,D401,D403 + """CellUpdate - a model defined in OpenAPI.""" # noqa: E501 + self._x = None + self._y = None + self._w = None + self._h = None + self.discriminator = None + + if x is not None: + self.x = x + if y is not None: + self.y = y + if w is not None: + self.w = w + if h is not None: + self.h = h + + @property + def x(self): + """Get the x of this CellUpdate. + + :return: The x of this CellUpdate. + :rtype: int + """ # noqa: E501 + return self._x + + @x.setter + def x(self, x): + """Set the x of this CellUpdate. + + :param x: The x of this CellUpdate. + :type: int + """ # noqa: E501 + self._x = x + + @property + def y(self): + """Get the y of this CellUpdate. + + :return: The y of this CellUpdate. + :rtype: int + """ # noqa: E501 + return self._y + + @y.setter + def y(self, y): + """Set the y of this CellUpdate. + + :param y: The y of this CellUpdate. + :type: int + """ # noqa: E501 + self._y = y + + @property + def w(self): + """Get the w of this CellUpdate. + + :return: The w of this CellUpdate. + :rtype: int + """ # noqa: E501 + return self._w + + @w.setter + def w(self, w): + """Set the w of this CellUpdate. + + :param w: The w of this CellUpdate. + :type: int + """ # noqa: E501 + self._w = w + + @property + def h(self): + """Get the h of this CellUpdate. + + :return: The h of this CellUpdate. + :rtype: int + """ # noqa: E501 + return self._h + + @h.setter + def h(self, h): + """Set the h of this CellUpdate. + + :param h: The h of this CellUpdate. + :type: int + """ # noqa: E501 + self._h = h + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CellUpdate): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell_with_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell_with_view_properties.py new file mode 100644 index 0000000..4379cd1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/cell_with_view_properties.py @@ -0,0 +1,148 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.cell import Cell + + +class CellWithViewProperties(Cell): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'properties': 'ViewProperties', + 'id': 'str', + 'links': 'CellLinks', + 'x': 'int', + 'y': 'int', + 'w': 'int', + 'h': 'int', + 'view_id': 'str' + } + + attribute_map = { + 'name': 'name', + 'properties': 'properties', + 'id': 'id', + 'links': 'links', + 'x': 'x', + 'y': 'y', + 'w': 'w', + 'h': 'h', + 'view_id': 'viewID' + } + + def __init__(self, name=None, properties=None, id=None, links=None, x=None, y=None, w=None, h=None, view_id=None): # noqa: E501,D401,D403 + """CellWithViewProperties - a model defined in OpenAPI.""" # noqa: E501 + Cell.__init__(self, id=id, links=links, x=x, y=y, w=w, h=h, view_id=view_id) # noqa: E501 + + self._name = None + self._properties = None + self.discriminator = None + + if name is not None: + self.name = name + if properties is not None: + self.properties = properties + + @property + def name(self): + """Get the name of this CellWithViewProperties. + + :return: The name of this CellWithViewProperties. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this CellWithViewProperties. + + :param name: The name of this CellWithViewProperties. + :type: str + """ # noqa: E501 + self._name = name + + @property + def properties(self): + """Get the properties of this CellWithViewProperties. + + :return: The properties of this CellWithViewProperties. + :rtype: ViewProperties + """ # noqa: E501 + return self._properties + + @properties.setter + def properties(self, properties): + """Set the properties of this CellWithViewProperties. + + :param properties: The properties of this CellWithViewProperties. + :type: ViewProperties + """ # noqa: E501 + self._properties = properties + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CellWithViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/check.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check.py new file mode 100644 index 0000000..c85e4a5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check.py @@ -0,0 +1,120 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Check(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str' + } + + attribute_map = { + 'type': 'type' + } + + discriminator_value_class_map = { + 'deadman': 'DeadmanCheck', + 'custom': 'CustomCheck', + 'threshold': 'ThresholdCheck' + } + + def __init__(self, type=None): # noqa: E501,D401,D403 + """Check - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self.discriminator = 'type' + + self.type = type + + @property + def type(self): + """Get the type of this Check. + + :return: The type of this Check. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this Check. + + :param type: The type of this Check. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + def get_real_child_model(self, data): + """Return the real base class specified by the discriminator.""" + discriminator_key = self.attribute_map[self.discriminator] + discriminator_value = data[discriminator_key] + return self.discriminator_value_class_map.get(discriminator_value) + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Check): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_base.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_base.py new file mode 100644 index 0000000..b54921a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_base.py @@ -0,0 +1,452 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class CheckBase(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'query': 'DashboardQuery', + 'status': 'TaskStatusType', + 'description': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'labels': 'list[Label]', + 'links': 'CheckBaseLinks' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'query': 'query', + 'status': 'status', + 'description': 'description', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, id=None, name=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, query=None, status=None, description=None, latest_completed=None, last_run_status=None, last_run_error=None, labels=None, links=None): # noqa: E501,D401,D403 + """CheckBase - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._name = None + self._org_id = None + self._task_id = None + self._owner_id = None + self._created_at = None + self._updated_at = None + self._query = None + self._status = None + self._description = None + self._latest_completed = None + self._last_run_status = None + self._last_run_error = None + self._labels = None + self._links = None + self.discriminator = None + + if id is not None: + self.id = id + self.name = name + self.org_id = org_id + if task_id is not None: + self.task_id = task_id + if owner_id is not None: + self.owner_id = owner_id + if created_at is not None: + self.created_at = created_at + if updated_at is not None: + self.updated_at = updated_at + self.query = query + if status is not None: + self.status = status + if description is not None: + self.description = description + if latest_completed is not None: + self.latest_completed = latest_completed + if last_run_status is not None: + self.last_run_status = last_run_status + if last_run_error is not None: + self.last_run_error = last_run_error + if labels is not None: + self.labels = labels + if links is not None: + self.links = links + + @property + def id(self): + """Get the id of this CheckBase. + + :return: The id of this CheckBase. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this CheckBase. + + :param id: The id of this CheckBase. + :type: str + """ # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this CheckBase. + + :return: The name of this CheckBase. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this CheckBase. + + :param name: The name of this CheckBase. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def org_id(self): + """Get the org_id of this CheckBase. + + The ID of the organization that owns this check. + + :return: The org_id of this CheckBase. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this CheckBase. + + The ID of the organization that owns this check. + + :param org_id: The org_id of this CheckBase. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def task_id(self): + """Get the task_id of this CheckBase. + + The ID of the task associated with this check. + + :return: The task_id of this CheckBase. + :rtype: str + """ # noqa: E501 + return self._task_id + + @task_id.setter + def task_id(self, task_id): + """Set the task_id of this CheckBase. + + The ID of the task associated with this check. + + :param task_id: The task_id of this CheckBase. + :type: str + """ # noqa: E501 + self._task_id = task_id + + @property + def owner_id(self): + """Get the owner_id of this CheckBase. + + The ID of creator used to create this check. + + :return: The owner_id of this CheckBase. + :rtype: str + """ # noqa: E501 + return self._owner_id + + @owner_id.setter + def owner_id(self, owner_id): + """Set the owner_id of this CheckBase. + + The ID of creator used to create this check. + + :param owner_id: The owner_id of this CheckBase. + :type: str + """ # noqa: E501 + self._owner_id = owner_id + + @property + def created_at(self): + """Get the created_at of this CheckBase. + + :return: The created_at of this CheckBase. + :rtype: datetime + """ # noqa: E501 + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Set the created_at of this CheckBase. + + :param created_at: The created_at of this CheckBase. + :type: datetime + """ # noqa: E501 + self._created_at = created_at + + @property + def updated_at(self): + """Get the updated_at of this CheckBase. + + :return: The updated_at of this CheckBase. + :rtype: datetime + """ # noqa: E501 + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Set the updated_at of this CheckBase. + + :param updated_at: The updated_at of this CheckBase. + :type: datetime + """ # noqa: E501 + self._updated_at = updated_at + + @property + def query(self): + """Get the query of this CheckBase. + + :return: The query of this CheckBase. + :rtype: DashboardQuery + """ # noqa: E501 + return self._query + + @query.setter + def query(self, query): + """Set the query of this CheckBase. + + :param query: The query of this CheckBase. + :type: DashboardQuery + """ # noqa: E501 + if query is None: + raise ValueError("Invalid value for `query`, must not be `None`") # noqa: E501 + self._query = query + + @property + def status(self): + """Get the status of this CheckBase. + + :return: The status of this CheckBase. + :rtype: TaskStatusType + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this CheckBase. + + :param status: The status of this CheckBase. + :type: TaskStatusType + """ # noqa: E501 + self._status = status + + @property + def description(self): + """Get the description of this CheckBase. + + An optional description of the check. + + :return: The description of this CheckBase. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this CheckBase. + + An optional description of the check. + + :param description: The description of this CheckBase. + :type: str + """ # noqa: E501 + self._description = description + + @property + def latest_completed(self): + """Get the latest_completed of this CheckBase. + + A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)) of the latest scheduled and completed run. + + :return: The latest_completed of this CheckBase. + :rtype: datetime + """ # noqa: E501 + return self._latest_completed + + @latest_completed.setter + def latest_completed(self, latest_completed): + """Set the latest_completed of this CheckBase. + + A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)) of the latest scheduled and completed run. + + :param latest_completed: The latest_completed of this CheckBase. + :type: datetime + """ # noqa: E501 + self._latest_completed = latest_completed + + @property + def last_run_status(self): + """Get the last_run_status of this CheckBase. + + :return: The last_run_status of this CheckBase. + :rtype: str + """ # noqa: E501 + return self._last_run_status + + @last_run_status.setter + def last_run_status(self, last_run_status): + """Set the last_run_status of this CheckBase. + + :param last_run_status: The last_run_status of this CheckBase. + :type: str + """ # noqa: E501 + self._last_run_status = last_run_status + + @property + def last_run_error(self): + """Get the last_run_error of this CheckBase. + + :return: The last_run_error of this CheckBase. + :rtype: str + """ # noqa: E501 + return self._last_run_error + + @last_run_error.setter + def last_run_error(self, last_run_error): + """Set the last_run_error of this CheckBase. + + :param last_run_error: The last_run_error of this CheckBase. + :type: str + """ # noqa: E501 + self._last_run_error = last_run_error + + @property + def labels(self): + """Get the labels of this CheckBase. + + :return: The labels of this CheckBase. + :rtype: list[Label] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this CheckBase. + + :param labels: The labels of this CheckBase. + :type: list[Label] + """ # noqa: E501 + self._labels = labels + + @property + def links(self): + """Get the links of this CheckBase. + + :return: The links of this CheckBase. + :rtype: CheckBaseLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this CheckBase. + + :param links: The links of this CheckBase. + :type: CheckBaseLinks + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CheckBase): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_base_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_base_links.py new file mode 100644 index 0000000..19c28aa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_base_links.py @@ -0,0 +1,219 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class CheckBaseLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str', + 'labels': 'str', + 'members': 'str', + 'owners': 'str', + 'query': 'str' + } + + attribute_map = { + '_self': 'self', + 'labels': 'labels', + 'members': 'members', + 'owners': 'owners', + 'query': 'query' + } + + def __init__(self, _self=None, labels=None, members=None, owners=None, query=None): # noqa: E501,D401,D403 + """CheckBaseLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self._labels = None + self._members = None + self._owners = None + self._query = None + self.discriminator = None + + if _self is not None: + self._self = _self + if labels is not None: + self.labels = labels + if members is not None: + self.members = members + if owners is not None: + self.owners = owners + if query is not None: + self.query = query + + @property + def _self(self): + """Get the _self of this CheckBaseLinks. + + URI of resource. + + :return: The _self of this CheckBaseLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this CheckBaseLinks. + + URI of resource. + + :param _self: The _self of this CheckBaseLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + @property + def labels(self): + """Get the labels of this CheckBaseLinks. + + URI of resource. + + :return: The labels of this CheckBaseLinks. + :rtype: str + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this CheckBaseLinks. + + URI of resource. + + :param labels: The labels of this CheckBaseLinks. + :type: str + """ # noqa: E501 + self._labels = labels + + @property + def members(self): + """Get the members of this CheckBaseLinks. + + URI of resource. + + :return: The members of this CheckBaseLinks. + :rtype: str + """ # noqa: E501 + return self._members + + @members.setter + def members(self, members): + """Set the members of this CheckBaseLinks. + + URI of resource. + + :param members: The members of this CheckBaseLinks. + :type: str + """ # noqa: E501 + self._members = members + + @property + def owners(self): + """Get the owners of this CheckBaseLinks. + + URI of resource. + + :return: The owners of this CheckBaseLinks. + :rtype: str + """ # noqa: E501 + return self._owners + + @owners.setter + def owners(self, owners): + """Set the owners of this CheckBaseLinks. + + URI of resource. + + :param owners: The owners of this CheckBaseLinks. + :type: str + """ # noqa: E501 + self._owners = owners + + @property + def query(self): + """Get the query of this CheckBaseLinks. + + URI of resource. + + :return: The query of this CheckBaseLinks. + :rtype: str + """ # noqa: E501 + return self._query + + @query.setter + def query(self, query): + """Set the query of this CheckBaseLinks. + + URI of resource. + + :param query: The query of this CheckBaseLinks. + :type: str + """ # noqa: E501 + self._query = query + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CheckBaseLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_discriminator.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_discriminator.py new file mode 100644 index 0000000..c2ebfea --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_discriminator.py @@ -0,0 +1,116 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.check_base import CheckBase + + +class CheckDiscriminator(CheckBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'query': 'DashboardQuery', + 'status': 'TaskStatusType', + 'description': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'labels': 'list[Label]', + 'links': 'CheckBaseLinks' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'query': 'query', + 'status': 'status', + 'description': 'description', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, id=None, name=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, query=None, status=None, description=None, latest_completed=None, last_run_status=None, last_run_error=None, labels=None, links=None): # noqa: E501,D401,D403 + """CheckDiscriminator - a model defined in OpenAPI.""" # noqa: E501 + CheckBase.__init__(self, id=id, name=name, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, query=query, status=status, description=description, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, labels=labels, links=links) # noqa: E501 + self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CheckDiscriminator): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_patch.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_patch.py new file mode 100644 index 0000000..014e910 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_patch.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class CheckPatch(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'status': 'str' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'status': 'status' + } + + def __init__(self, name=None, description=None, status=None): # noqa: E501,D401,D403 + """CheckPatch - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._status = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if status is not None: + self.status = status + + @property + def name(self): + """Get the name of this CheckPatch. + + :return: The name of this CheckPatch. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this CheckPatch. + + :param name: The name of this CheckPatch. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this CheckPatch. + + :return: The description of this CheckPatch. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this CheckPatch. + + :param description: The description of this CheckPatch. + :type: str + """ # noqa: E501 + self._description = description + + @property + def status(self): + """Get the status of this CheckPatch. + + :return: The status of this CheckPatch. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this CheckPatch. + + :param status: The status of this CheckPatch. + :type: str + """ # noqa: E501 + self._status = status + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CheckPatch): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_status_level.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_status_level.py new file mode 100644 index 0000000..476f2b5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_status_level.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class CheckStatusLevel(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + UNKNOWN = "UNKNOWN" + OK = "OK" + INFO = "INFO" + CRIT = "CRIT" + WARN = "WARN" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """CheckStatusLevel - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CheckStatusLevel): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_view_properties.py new file mode 100644 index 0000000..5226c49 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/check_view_properties.py @@ -0,0 +1,350 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class CheckViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'adaptive_zoom_hide': 'bool', + 'type': 'str', + 'shape': 'str', + 'check_id': 'str', + 'check': 'Check', + 'queries': 'list[DashboardQuery]', + 'colors': 'list[DashboardColor]', + 'legend_colorize_rows': 'bool', + 'legend_hide': 'bool', + 'legend_opacity': 'float', + 'legend_orientation_threshold': 'int' + } + + attribute_map = { + 'adaptive_zoom_hide': 'adaptiveZoomHide', + 'type': 'type', + 'shape': 'shape', + 'check_id': 'checkID', + 'check': 'check', + 'queries': 'queries', + 'colors': 'colors', + 'legend_colorize_rows': 'legendColorizeRows', + 'legend_hide': 'legendHide', + 'legend_opacity': 'legendOpacity', + 'legend_orientation_threshold': 'legendOrientationThreshold' + } + + def __init__(self, adaptive_zoom_hide=None, type=None, shape=None, check_id=None, check=None, queries=None, colors=None, legend_colorize_rows=None, legend_hide=None, legend_opacity=None, legend_orientation_threshold=None): # noqa: E501,D401,D403 + """CheckViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._adaptive_zoom_hide = None + self._type = None + self._shape = None + self._check_id = None + self._check = None + self._queries = None + self._colors = None + self._legend_colorize_rows = None + self._legend_hide = None + self._legend_opacity = None + self._legend_orientation_threshold = None + self.discriminator = None + + if adaptive_zoom_hide is not None: + self.adaptive_zoom_hide = adaptive_zoom_hide + self.type = type + self.shape = shape + self.check_id = check_id + if check is not None: + self.check = check + self.queries = queries + self.colors = colors + if legend_colorize_rows is not None: + self.legend_colorize_rows = legend_colorize_rows + if legend_hide is not None: + self.legend_hide = legend_hide + if legend_opacity is not None: + self.legend_opacity = legend_opacity + if legend_orientation_threshold is not None: + self.legend_orientation_threshold = legend_orientation_threshold + + @property + def adaptive_zoom_hide(self): + """Get the adaptive_zoom_hide of this CheckViewProperties. + + :return: The adaptive_zoom_hide of this CheckViewProperties. + :rtype: bool + """ # noqa: E501 + return self._adaptive_zoom_hide + + @adaptive_zoom_hide.setter + def adaptive_zoom_hide(self, adaptive_zoom_hide): + """Set the adaptive_zoom_hide of this CheckViewProperties. + + :param adaptive_zoom_hide: The adaptive_zoom_hide of this CheckViewProperties. + :type: bool + """ # noqa: E501 + self._adaptive_zoom_hide = adaptive_zoom_hide + + @property + def type(self): + """Get the type of this CheckViewProperties. + + :return: The type of this CheckViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this CheckViewProperties. + + :param type: The type of this CheckViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def shape(self): + """Get the shape of this CheckViewProperties. + + :return: The shape of this CheckViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this CheckViewProperties. + + :param shape: The shape of this CheckViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def check_id(self): + """Get the check_id of this CheckViewProperties. + + :return: The check_id of this CheckViewProperties. + :rtype: str + """ # noqa: E501 + return self._check_id + + @check_id.setter + def check_id(self, check_id): + """Set the check_id of this CheckViewProperties. + + :param check_id: The check_id of this CheckViewProperties. + :type: str + """ # noqa: E501 + if check_id is None: + raise ValueError("Invalid value for `check_id`, must not be `None`") # noqa: E501 + self._check_id = check_id + + @property + def check(self): + """Get the check of this CheckViewProperties. + + :return: The check of this CheckViewProperties. + :rtype: Check + """ # noqa: E501 + return self._check + + @check.setter + def check(self, check): + """Set the check of this CheckViewProperties. + + :param check: The check of this CheckViewProperties. + :type: Check + """ # noqa: E501 + self._check = check + + @property + def queries(self): + """Get the queries of this CheckViewProperties. + + :return: The queries of this CheckViewProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this CheckViewProperties. + + :param queries: The queries of this CheckViewProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def colors(self): + """Get the colors of this CheckViewProperties. + + Colors define color encoding of data into a visualization + + :return: The colors of this CheckViewProperties. + :rtype: list[DashboardColor] + """ # noqa: E501 + return self._colors + + @colors.setter + def colors(self, colors): + """Set the colors of this CheckViewProperties. + + Colors define color encoding of data into a visualization + + :param colors: The colors of this CheckViewProperties. + :type: list[DashboardColor] + """ # noqa: E501 + if colors is None: + raise ValueError("Invalid value for `colors`, must not be `None`") # noqa: E501 + self._colors = colors + + @property + def legend_colorize_rows(self): + """Get the legend_colorize_rows of this CheckViewProperties. + + :return: The legend_colorize_rows of this CheckViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_colorize_rows + + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows): + """Set the legend_colorize_rows of this CheckViewProperties. + + :param legend_colorize_rows: The legend_colorize_rows of this CheckViewProperties. + :type: bool + """ # noqa: E501 + self._legend_colorize_rows = legend_colorize_rows + + @property + def legend_hide(self): + """Get the legend_hide of this CheckViewProperties. + + :return: The legend_hide of this CheckViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_hide + + @legend_hide.setter + def legend_hide(self, legend_hide): + """Set the legend_hide of this CheckViewProperties. + + :param legend_hide: The legend_hide of this CheckViewProperties. + :type: bool + """ # noqa: E501 + self._legend_hide = legend_hide + + @property + def legend_opacity(self): + """Get the legend_opacity of this CheckViewProperties. + + :return: The legend_opacity of this CheckViewProperties. + :rtype: float + """ # noqa: E501 + return self._legend_opacity + + @legend_opacity.setter + def legend_opacity(self, legend_opacity): + """Set the legend_opacity of this CheckViewProperties. + + :param legend_opacity: The legend_opacity of this CheckViewProperties. + :type: float + """ # noqa: E501 + self._legend_opacity = legend_opacity + + @property + def legend_orientation_threshold(self): + """Get the legend_orientation_threshold of this CheckViewProperties. + + :return: The legend_orientation_threshold of this CheckViewProperties. + :rtype: int + """ # noqa: E501 + return self._legend_orientation_threshold + + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold): + """Set the legend_orientation_threshold of this CheckViewProperties. + + :param legend_orientation_threshold: The legend_orientation_threshold of this CheckViewProperties. + :type: int + """ # noqa: E501 + self._legend_orientation_threshold = legend_orientation_threshold + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CheckViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/checks.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/checks.py new file mode 100644 index 0000000..05083fc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/checks.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Checks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'checks': 'list[Check]', + 'links': 'Links' + } + + attribute_map = { + 'checks': 'checks', + 'links': 'links' + } + + def __init__(self, checks=None, links=None): # noqa: E501,D401,D403 + """Checks - a model defined in OpenAPI.""" # noqa: E501 + self._checks = None + self._links = None + self.discriminator = None + + if checks is not None: + self.checks = checks + if links is not None: + self.links = links + + @property + def checks(self): + """Get the checks of this Checks. + + :return: The checks of this Checks. + :rtype: list[Check] + """ # noqa: E501 + return self._checks + + @checks.setter + def checks(self, checks): + """Set the checks of this Checks. + + :param checks: The checks of this Checks. + :type: list[Check] + """ # noqa: E501 + self._checks = checks + + @property + def links(self): + """Get the links of this Checks. + + :return: The links of this Checks. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Checks. + + :param links: The links of this Checks. + :type: Links + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Checks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/column_data_type.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/column_data_type.py new file mode 100644 index 0000000..e2a2f2c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/column_data_type.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ColumnDataType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + INTEGER = "integer" + FLOAT = "float" + BOOLEAN = "boolean" + STRING = "string" + UNSIGNED = "unsigned" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """ColumnDataType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ColumnDataType): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/column_semantic_type.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/column_semantic_type.py new file mode 100644 index 0000000..3f88a13 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/column_semantic_type.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ColumnSemanticType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + TIMESTAMP = "timestamp" + TAG = "tag" + FIELD = "field" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """ColumnSemanticType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ColumnSemanticType): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/conditional_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/conditional_expression.py new file mode 100644 index 0000000..da5af43 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/conditional_expression.py @@ -0,0 +1,184 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class ConditionalExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'test': 'Expression', + 'alternate': 'Expression', + 'consequent': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'test': 'test', + 'alternate': 'alternate', + 'consequent': 'consequent' + } + + def __init__(self, type=None, test=None, alternate=None, consequent=None): # noqa: E501,D401,D403 + """ConditionalExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._test = None + self._alternate = None + self._consequent = None + self.discriminator = None + + if type is not None: + self.type = type + if test is not None: + self.test = test + if alternate is not None: + self.alternate = alternate + if consequent is not None: + self.consequent = consequent + + @property + def type(self): + """Get the type of this ConditionalExpression. + + Type of AST node + + :return: The type of this ConditionalExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ConditionalExpression. + + Type of AST node + + :param type: The type of this ConditionalExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def test(self): + """Get the test of this ConditionalExpression. + + :return: The test of this ConditionalExpression. + :rtype: Expression + """ # noqa: E501 + return self._test + + @test.setter + def test(self, test): + """Set the test of this ConditionalExpression. + + :param test: The test of this ConditionalExpression. + :type: Expression + """ # noqa: E501 + self._test = test + + @property + def alternate(self): + """Get the alternate of this ConditionalExpression. + + :return: The alternate of this ConditionalExpression. + :rtype: Expression + """ # noqa: E501 + return self._alternate + + @alternate.setter + def alternate(self, alternate): + """Set the alternate of this ConditionalExpression. + + :param alternate: The alternate of this ConditionalExpression. + :type: Expression + """ # noqa: E501 + self._alternate = alternate + + @property + def consequent(self): + """Get the consequent of this ConditionalExpression. + + :return: The consequent of this ConditionalExpression. + :rtype: Expression + """ # noqa: E501 + return self._consequent + + @consequent.setter + def consequent(self, consequent): + """Set the consequent of this ConditionalExpression. + + :param consequent: The consequent of this ConditionalExpression. + :type: Expression + """ # noqa: E501 + self._consequent = consequent + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ConditionalExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/config.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/config.py new file mode 100644 index 0000000..cc4a033 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/config.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Config(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'config': 'dict(str, object)' + } + + attribute_map = { + 'config': 'config' + } + + def __init__(self, config=None): # noqa: E501,D401,D403 + """Config - a model defined in OpenAPI.""" # noqa: E501 + self._config = None + self.discriminator = None + + if config is not None: + self.config = config + + @property + def config(self): + """Get the config of this Config. + + :return: The config of this Config. + :rtype: dict(str, object) + """ # noqa: E501 + return self._config + + @config.setter + def config(self, config): + """Set the config of this Config. + + :param config: The config of this Config. + :type: dict(str, object) + """ # noqa: E501 + self._config = config + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Config): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/constant_variable_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/constant_variable_properties.py new file mode 100644 index 0000000..806045e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/constant_variable_properties.py @@ -0,0 +1,134 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.variable_properties import VariableProperties + + +class ConstantVariableProperties(VariableProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'values': 'list[str]' + } + + attribute_map = { + 'type': 'type', + 'values': 'values' + } + + def __init__(self, type=None, values=None): # noqa: E501,D401,D403 + """ConstantVariableProperties - a model defined in OpenAPI.""" # noqa: E501 + VariableProperties.__init__(self) # noqa: E501 + + self._type = None + self._values = None + self.discriminator = None + + if type is not None: + self.type = type + if values is not None: + self.values = values + + @property + def type(self): + """Get the type of this ConstantVariableProperties. + + :return: The type of this ConstantVariableProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ConstantVariableProperties. + + :param type: The type of this ConstantVariableProperties. + :type: str + """ # noqa: E501 + self._type = type + + @property + def values(self): + """Get the values of this ConstantVariableProperties. + + :return: The values of this ConstantVariableProperties. + :rtype: list[str] + """ # noqa: E501 + return self._values + + @values.setter + def values(self, values): + """Set the values of this ConstantVariableProperties. + + :param values: The values of this ConstantVariableProperties. + :type: list[str] + """ # noqa: E501 + self._values = values + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ConstantVariableProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/create_cell.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/create_cell.py new file mode 100644 index 0000000..1251c2f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/create_cell.py @@ -0,0 +1,226 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class CreateCell(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'x': 'int', + 'y': 'int', + 'w': 'int', + 'h': 'int', + 'using_view': 'str' + } + + attribute_map = { + 'name': 'name', + 'x': 'x', + 'y': 'y', + 'w': 'w', + 'h': 'h', + 'using_view': 'usingView' + } + + def __init__(self, name=None, x=None, y=None, w=None, h=None, using_view=None): # noqa: E501,D401,D403 + """CreateCell - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._x = None + self._y = None + self._w = None + self._h = None + self._using_view = None + self.discriminator = None + + if name is not None: + self.name = name + if x is not None: + self.x = x + if y is not None: + self.y = y + if w is not None: + self.w = w + if h is not None: + self.h = h + if using_view is not None: + self.using_view = using_view + + @property + def name(self): + """Get the name of this CreateCell. + + :return: The name of this CreateCell. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this CreateCell. + + :param name: The name of this CreateCell. + :type: str + """ # noqa: E501 + self._name = name + + @property + def x(self): + """Get the x of this CreateCell. + + :return: The x of this CreateCell. + :rtype: int + """ # noqa: E501 + return self._x + + @x.setter + def x(self, x): + """Set the x of this CreateCell. + + :param x: The x of this CreateCell. + :type: int + """ # noqa: E501 + self._x = x + + @property + def y(self): + """Get the y of this CreateCell. + + :return: The y of this CreateCell. + :rtype: int + """ # noqa: E501 + return self._y + + @y.setter + def y(self, y): + """Set the y of this CreateCell. + + :param y: The y of this CreateCell. + :type: int + """ # noqa: E501 + self._y = y + + @property + def w(self): + """Get the w of this CreateCell. + + :return: The w of this CreateCell. + :rtype: int + """ # noqa: E501 + return self._w + + @w.setter + def w(self, w): + """Set the w of this CreateCell. + + :param w: The w of this CreateCell. + :type: int + """ # noqa: E501 + self._w = w + + @property + def h(self): + """Get the h of this CreateCell. + + :return: The h of this CreateCell. + :rtype: int + """ # noqa: E501 + return self._h + + @h.setter + def h(self, h): + """Set the h of this CreateCell. + + :param h: The h of this CreateCell. + :type: int + """ # noqa: E501 + self._h = h + + @property + def using_view(self): + """Get the using_view of this CreateCell. + + Makes a copy of the provided view. + + :return: The using_view of this CreateCell. + :rtype: str + """ # noqa: E501 + return self._using_view + + @using_view.setter + def using_view(self, using_view): + """Set the using_view of this CreateCell. + + Makes a copy of the provided view. + + :param using_view: The using_view of this CreateCell. + :type: str + """ # noqa: E501 + self._using_view = using_view + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CreateCell): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/create_dashboard_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/create_dashboard_request.py new file mode 100644 index 0000000..e5de81e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/create_dashboard_request.py @@ -0,0 +1,167 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class CreateDashboardRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'org_id': 'str', + 'name': 'str', + 'description': 'str' + } + + attribute_map = { + 'org_id': 'orgID', + 'name': 'name', + 'description': 'description' + } + + def __init__(self, org_id=None, name=None, description=None): # noqa: E501,D401,D403 + """CreateDashboardRequest - a model defined in OpenAPI.""" # noqa: E501 + self._org_id = None + self._name = None + self._description = None + self.discriminator = None + + self.org_id = org_id + self.name = name + if description is not None: + self.description = description + + @property + def org_id(self): + """Get the org_id of this CreateDashboardRequest. + + The ID of the organization that owns the dashboard. + + :return: The org_id of this CreateDashboardRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this CreateDashboardRequest. + + The ID of the organization that owns the dashboard. + + :param org_id: The org_id of this CreateDashboardRequest. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def name(self): + """Get the name of this CreateDashboardRequest. + + The user-facing name of the dashboard. + + :return: The name of this CreateDashboardRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this CreateDashboardRequest. + + The user-facing name of the dashboard. + + :param name: The name of this CreateDashboardRequest. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this CreateDashboardRequest. + + The user-facing description of the dashboard. + + :return: The description of this CreateDashboardRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this CreateDashboardRequest. + + The user-facing description of the dashboard. + + :param description: The description of this CreateDashboardRequest. + :type: str + """ # noqa: E501 + self._description = description + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CreateDashboardRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/custom_check.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/custom_check.py new file mode 100644 index 0000000..88c0180 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/custom_check.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.check_discriminator import CheckDiscriminator + + +class CustomCheck(CheckDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'id': 'str', + 'name': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'query': 'DashboardQuery', + 'status': 'TaskStatusType', + 'description': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'labels': 'list[Label]', + 'links': 'CheckBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'id': 'id', + 'name': 'name', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'query': 'query', + 'status': 'status', + 'description': 'description', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type="custom", id=None, name=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, query=None, status=None, description=None, latest_completed=None, last_run_status=None, last_run_error=None, labels=None, links=None): # noqa: E501,D401,D403 + """CustomCheck - a model defined in OpenAPI.""" # noqa: E501 + CheckDiscriminator.__init__(self, id=id, name=name, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, query=query, status=status, description=description, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, labels=labels, links=links) # noqa: E501 + + self._type = None + self.discriminator = None + + self.type = type + + @property + def type(self): + """Get the type of this CustomCheck. + + :return: The type of this CustomCheck. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this CustomCheck. + + :param type: The type of this CustomCheck. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, CustomCheck): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard.py new file mode 100644 index 0000000..4b4e47e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard.py @@ -0,0 +1,209 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest + + +class Dashboard(CreateDashboardRequest): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'object', + 'id': 'str', + 'meta': 'object', + 'cells': 'list[Cell]', + 'labels': 'list[Label]', + 'org_id': 'str', + 'name': 'str', + 'description': 'str' + } + + attribute_map = { + 'links': 'links', + 'id': 'id', + 'meta': 'meta', + 'cells': 'cells', + 'labels': 'labels', + 'org_id': 'orgID', + 'name': 'name', + 'description': 'description' + } + + def __init__(self, links=None, id=None, meta=None, cells=None, labels=None, org_id=None, name=None, description=None): # noqa: E501,D401,D403 + """Dashboard - a model defined in OpenAPI.""" # noqa: E501 + CreateDashboardRequest.__init__(self, org_id=org_id, name=name, description=description) # noqa: E501 + + self._links = None + self._id = None + self._meta = None + self._cells = None + self._labels = None + self.discriminator = None + + if links is not None: + self.links = links + if id is not None: + self.id = id + if meta is not None: + self.meta = meta + if cells is not None: + self.cells = cells + if labels is not None: + self.labels = labels + + @property + def links(self): + """Get the links of this Dashboard. + + :return: The links of this Dashboard. + :rtype: object + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Dashboard. + + :param links: The links of this Dashboard. + :type: object + """ # noqa: E501 + self._links = links + + @property + def id(self): + """Get the id of this Dashboard. + + :return: The id of this Dashboard. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Dashboard. + + :param id: The id of this Dashboard. + :type: str + """ # noqa: E501 + self._id = id + + @property + def meta(self): + """Get the meta of this Dashboard. + + :return: The meta of this Dashboard. + :rtype: object + """ # noqa: E501 + return self._meta + + @meta.setter + def meta(self, meta): + """Set the meta of this Dashboard. + + :param meta: The meta of this Dashboard. + :type: object + """ # noqa: E501 + self._meta = meta + + @property + def cells(self): + """Get the cells of this Dashboard. + + :return: The cells of this Dashboard. + :rtype: list[Cell] + """ # noqa: E501 + return self._cells + + @cells.setter + def cells(self, cells): + """Set the cells of this Dashboard. + + :param cells: The cells of this Dashboard. + :type: list[Cell] + """ # noqa: E501 + self._cells = cells + + @property + def labels(self): + """Get the labels of this Dashboard. + + :return: The labels of this Dashboard. + :rtype: list[Label] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this Dashboard. + + :param labels: The labels of this Dashboard. + :type: list[Label] + """ # noqa: E501 + self._labels = labels + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Dashboard): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard_color.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard_color.py new file mode 100644 index 0000000..b03a4e2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard_color.py @@ -0,0 +1,228 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class DashboardColor(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'type': 'str', + 'hex': 'str', + 'name': 'str', + 'value': 'float' + } + + attribute_map = { + 'id': 'id', + 'type': 'type', + 'hex': 'hex', + 'name': 'name', + 'value': 'value' + } + + def __init__(self, id=None, type=None, hex=None, name=None, value=None): # noqa: E501,D401,D403 + """DashboardColor - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._type = None + self._hex = None + self._name = None + self._value = None + self.discriminator = None + + self.id = id + self.type = type + self.hex = hex + self.name = name + self.value = value + + @property + def id(self): + """Get the id of this DashboardColor. + + The unique ID of the view color. + + :return: The id of this DashboardColor. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this DashboardColor. + + The unique ID of the view color. + + :param id: The id of this DashboardColor. + :type: str + """ # noqa: E501 + if id is None: + raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 + self._id = id + + @property + def type(self): + """Get the type of this DashboardColor. + + Type is how the color is used. + + :return: The type of this DashboardColor. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this DashboardColor. + + Type is how the color is used. + + :param type: The type of this DashboardColor. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def hex(self): + """Get the hex of this DashboardColor. + + The hex number of the color + + :return: The hex of this DashboardColor. + :rtype: str + """ # noqa: E501 + return self._hex + + @hex.setter + def hex(self, hex): + """Set the hex of this DashboardColor. + + The hex number of the color + + :param hex: The hex of this DashboardColor. + :type: str + """ # noqa: E501 + if hex is None: + raise ValueError("Invalid value for `hex`, must not be `None`") # noqa: E501 + if hex is not None and len(hex) > 7: + raise ValueError("Invalid value for `hex`, length must be less than or equal to `7`") # noqa: E501 + if hex is not None and len(hex) < 7: + raise ValueError("Invalid value for `hex`, length must be greater than or equal to `7`") # noqa: E501 + self._hex = hex + + @property + def name(self): + """Get the name of this DashboardColor. + + The user-facing name of the hex color. + + :return: The name of this DashboardColor. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this DashboardColor. + + The user-facing name of the hex color. + + :param name: The name of this DashboardColor. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def value(self): + """Get the value of this DashboardColor. + + The data value mapped to this color. + + :return: The value of this DashboardColor. + :rtype: float + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this DashboardColor. + + The data value mapped to this color. + + :param value: The value of this DashboardColor. + :type: float + """ # noqa: E501 + if value is None: + raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 + self._value = value + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DashboardColor): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard_query.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard_query.py new file mode 100644 index 0000000..e23c61f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard_query.py @@ -0,0 +1,180 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class DashboardQuery(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'text': 'str', + 'edit_mode': 'QueryEditMode', + 'name': 'str', + 'builder_config': 'BuilderConfig' + } + + attribute_map = { + 'text': 'text', + 'edit_mode': 'editMode', + 'name': 'name', + 'builder_config': 'builderConfig' + } + + def __init__(self, text=None, edit_mode=None, name=None, builder_config=None): # noqa: E501,D401,D403 + """DashboardQuery - a model defined in OpenAPI.""" # noqa: E501 + self._text = None + self._edit_mode = None + self._name = None + self._builder_config = None + self.discriminator = None + + if text is not None: + self.text = text + if edit_mode is not None: + self.edit_mode = edit_mode + if name is not None: + self.name = name + if builder_config is not None: + self.builder_config = builder_config + + @property + def text(self): + """Get the text of this DashboardQuery. + + The text of the Flux query. + + :return: The text of this DashboardQuery. + :rtype: str + """ # noqa: E501 + return self._text + + @text.setter + def text(self, text): + """Set the text of this DashboardQuery. + + The text of the Flux query. + + :param text: The text of this DashboardQuery. + :type: str + """ # noqa: E501 + self._text = text + + @property + def edit_mode(self): + """Get the edit_mode of this DashboardQuery. + + :return: The edit_mode of this DashboardQuery. + :rtype: QueryEditMode + """ # noqa: E501 + return self._edit_mode + + @edit_mode.setter + def edit_mode(self, edit_mode): + """Set the edit_mode of this DashboardQuery. + + :param edit_mode: The edit_mode of this DashboardQuery. + :type: QueryEditMode + """ # noqa: E501 + self._edit_mode = edit_mode + + @property + def name(self): + """Get the name of this DashboardQuery. + + :return: The name of this DashboardQuery. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this DashboardQuery. + + :param name: The name of this DashboardQuery. + :type: str + """ # noqa: E501 + self._name = name + + @property + def builder_config(self): + """Get the builder_config of this DashboardQuery. + + :return: The builder_config of this DashboardQuery. + :rtype: BuilderConfig + """ # noqa: E501 + return self._builder_config + + @builder_config.setter + def builder_config(self, builder_config): + """Set the builder_config of this DashboardQuery. + + :param builder_config: The builder_config of this DashboardQuery. + :type: BuilderConfig + """ # noqa: E501 + self._builder_config = builder_config + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DashboardQuery): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard_with_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard_with_view_properties.py new file mode 100644 index 0000000..08a78d2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboard_with_view_properties.py @@ -0,0 +1,209 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest + + +class DashboardWithViewProperties(CreateDashboardRequest): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'object', + 'id': 'str', + 'meta': 'object', + 'cells': 'list[CellWithViewProperties]', + 'labels': 'list[Label]', + 'org_id': 'str', + 'name': 'str', + 'description': 'str' + } + + attribute_map = { + 'links': 'links', + 'id': 'id', + 'meta': 'meta', + 'cells': 'cells', + 'labels': 'labels', + 'org_id': 'orgID', + 'name': 'name', + 'description': 'description' + } + + def __init__(self, links=None, id=None, meta=None, cells=None, labels=None, org_id=None, name=None, description=None): # noqa: E501,D401,D403 + """DashboardWithViewProperties - a model defined in OpenAPI.""" # noqa: E501 + CreateDashboardRequest.__init__(self, org_id=org_id, name=name, description=description) # noqa: E501 + + self._links = None + self._id = None + self._meta = None + self._cells = None + self._labels = None + self.discriminator = None + + if links is not None: + self.links = links + if id is not None: + self.id = id + if meta is not None: + self.meta = meta + if cells is not None: + self.cells = cells + if labels is not None: + self.labels = labels + + @property + def links(self): + """Get the links of this DashboardWithViewProperties. + + :return: The links of this DashboardWithViewProperties. + :rtype: object + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this DashboardWithViewProperties. + + :param links: The links of this DashboardWithViewProperties. + :type: object + """ # noqa: E501 + self._links = links + + @property + def id(self): + """Get the id of this DashboardWithViewProperties. + + :return: The id of this DashboardWithViewProperties. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this DashboardWithViewProperties. + + :param id: The id of this DashboardWithViewProperties. + :type: str + """ # noqa: E501 + self._id = id + + @property + def meta(self): + """Get the meta of this DashboardWithViewProperties. + + :return: The meta of this DashboardWithViewProperties. + :rtype: object + """ # noqa: E501 + return self._meta + + @meta.setter + def meta(self, meta): + """Set the meta of this DashboardWithViewProperties. + + :param meta: The meta of this DashboardWithViewProperties. + :type: object + """ # noqa: E501 + self._meta = meta + + @property + def cells(self): + """Get the cells of this DashboardWithViewProperties. + + :return: The cells of this DashboardWithViewProperties. + :rtype: list[CellWithViewProperties] + """ # noqa: E501 + return self._cells + + @cells.setter + def cells(self, cells): + """Set the cells of this DashboardWithViewProperties. + + :param cells: The cells of this DashboardWithViewProperties. + :type: list[CellWithViewProperties] + """ # noqa: E501 + self._cells = cells + + @property + def labels(self): + """Get the labels of this DashboardWithViewProperties. + + :return: The labels of this DashboardWithViewProperties. + :rtype: list[Label] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this DashboardWithViewProperties. + + :param labels: The labels of this DashboardWithViewProperties. + :type: list[Label] + """ # noqa: E501 + self._labels = labels + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DashboardWithViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboards.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboards.py new file mode 100644 index 0000000..e15de92 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dashboards.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Dashboards(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'Links', + 'dashboards': 'list[Dashboard]' + } + + attribute_map = { + 'links': 'links', + 'dashboards': 'dashboards' + } + + def __init__(self, links=None, dashboards=None): # noqa: E501,D401,D403 + """Dashboards - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._dashboards = None + self.discriminator = None + + if links is not None: + self.links = links + if dashboards is not None: + self.dashboards = dashboards + + @property + def links(self): + """Get the links of this Dashboards. + + :return: The links of this Dashboards. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Dashboards. + + :param links: The links of this Dashboards. + :type: Links + """ # noqa: E501 + self._links = links + + @property + def dashboards(self): + """Get the dashboards of this Dashboards. + + :return: The dashboards of this Dashboards. + :rtype: list[Dashboard] + """ # noqa: E501 + return self._dashboards + + @dashboards.setter + def dashboards(self, dashboards): + """Set the dashboards of this Dashboards. + + :param dashboards: The dashboards of this Dashboards. + :type: list[Dashboard] + """ # noqa: E501 + self._dashboards = dashboards + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Dashboards): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/date_time_literal.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/date_time_literal.py new file mode 100644 index 0000000..ce2280f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/date_time_literal.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class DateTimeLiteral(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'value': 'datetime' + } + + attribute_map = { + 'type': 'type', + 'value': 'value' + } + + def __init__(self, type=None, value=None): # noqa: E501,D401,D403 + """DateTimeLiteral - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._value = None + self.discriminator = None + + if type is not None: + self.type = type + if value is not None: + self.value = value + + @property + def type(self): + """Get the type of this DateTimeLiteral. + + Type of AST node + + :return: The type of this DateTimeLiteral. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this DateTimeLiteral. + + Type of AST node + + :param type: The type of this DateTimeLiteral. + :type: str + """ # noqa: E501 + self._type = type + + @property + def value(self): + """Get the value of this DateTimeLiteral. + + :return: The value of this DateTimeLiteral. + :rtype: datetime + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this DateTimeLiteral. + + :param value: The value of this DateTimeLiteral. + :type: datetime + """ # noqa: E501 + self._value = value + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DateTimeLiteral): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbr_ps.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbr_ps.py new file mode 100644 index 0000000..27617a9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbr_ps.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class DBRPs(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'content': 'list[DBRP]' + } + + attribute_map = { + 'content': 'content' + } + + def __init__(self, content=None): # noqa: E501,D401,D403 + """DBRPs - a model defined in OpenAPI.""" # noqa: E501 + self._content = None + self.discriminator = None + + if content is not None: + self.content = content + + @property + def content(self): + """Get the content of this DBRPs. + + :return: The content of this DBRPs. + :rtype: list[DBRP] + """ # noqa: E501 + return self._content + + @content.setter + def content(self, content): + """Set the content of this DBRPs. + + :param content: The content of this DBRPs. + :type: list[DBRP] + """ # noqa: E501 + self._content = content + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DBRPs): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp.py new file mode 100644 index 0000000..1bb74ec --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp.py @@ -0,0 +1,302 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class DBRP(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'org_id': 'str', + 'bucket_id': 'str', + 'database': 'str', + 'retention_policy': 'str', + 'default': 'bool', + 'virtual': 'bool', + 'links': 'Links' + } + + attribute_map = { + 'id': 'id', + 'org_id': 'orgID', + 'bucket_id': 'bucketID', + 'database': 'database', + 'retention_policy': 'retention_policy', + 'default': 'default', + 'virtual': 'virtual', + 'links': 'links' + } + + def __init__(self, id=None, org_id=None, bucket_id=None, database=None, retention_policy=None, default=None, virtual=None, links=None): # noqa: E501,D401,D403 + """DBRP - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._org_id = None + self._bucket_id = None + self._database = None + self._retention_policy = None + self._default = None + self._virtual = None + self._links = None + self.discriminator = None + + self.id = id + self.org_id = org_id + self.bucket_id = bucket_id + self.database = database + self.retention_policy = retention_policy + self.default = default + if virtual is not None: + self.virtual = virtual + if links is not None: + self.links = links + + @property + def id(self): + """Get the id of this DBRP. + + The resource ID that InfluxDB uses to uniquely identify the database retention policy (DBRP) mapping. + + :return: The id of this DBRP. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this DBRP. + + The resource ID that InfluxDB uses to uniquely identify the database retention policy (DBRP) mapping. + + :param id: The id of this DBRP. + :type: str + """ # noqa: E501 + if id is None: + raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this DBRP. + + An organization ID. Identifies the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) that owns the mapping. + + :return: The org_id of this DBRP. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this DBRP. + + An organization ID. Identifies the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) that owns the mapping. + + :param org_id: The org_id of this DBRP. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def bucket_id(self): + """Get the bucket_id of this DBRP. + + A bucket ID. Identifies the bucket used as the target for the translation. + + :return: The bucket_id of this DBRP. + :rtype: str + """ # noqa: E501 + return self._bucket_id + + @bucket_id.setter + def bucket_id(self, bucket_id): + """Set the bucket_id of this DBRP. + + A bucket ID. Identifies the bucket used as the target for the translation. + + :param bucket_id: The bucket_id of this DBRP. + :type: str + """ # noqa: E501 + if bucket_id is None: + raise ValueError("Invalid value for `bucket_id`, must not be `None`") # noqa: E501 + self._bucket_id = bucket_id + + @property + def database(self): + """Get the database of this DBRP. + + A database name. Identifies the InfluxDB v1 database. + + :return: The database of this DBRP. + :rtype: str + """ # noqa: E501 + return self._database + + @database.setter + def database(self, database): + """Set the database of this DBRP. + + A database name. Identifies the InfluxDB v1 database. + + :param database: The database of this DBRP. + :type: str + """ # noqa: E501 + if database is None: + raise ValueError("Invalid value for `database`, must not be `None`") # noqa: E501 + self._database = database + + @property + def retention_policy(self): + """Get the retention_policy of this DBRP. + + A [retention policy](https://docs.influxdata.com/influxdb/v1.8/concepts/glossary/#retention-policy-rp) name. Identifies the InfluxDB v1 retention policy mapping. + + :return: The retention_policy of this DBRP. + :rtype: str + """ # noqa: E501 + return self._retention_policy + + @retention_policy.setter + def retention_policy(self, retention_policy): + """Set the retention_policy of this DBRP. + + A [retention policy](https://docs.influxdata.com/influxdb/v1.8/concepts/glossary/#retention-policy-rp) name. Identifies the InfluxDB v1 retention policy mapping. + + :param retention_policy: The retention_policy of this DBRP. + :type: str + """ # noqa: E501 + if retention_policy is None: + raise ValueError("Invalid value for `retention_policy`, must not be `None`") # noqa: E501 + self._retention_policy = retention_policy + + @property + def default(self): + """Get the default of this DBRP. + + If set to `true`, this DBRP mapping is the default retention policy for the database (specified by the `database` property's value). + + :return: The default of this DBRP. + :rtype: bool + """ # noqa: E501 + return self._default + + @default.setter + def default(self, default): + """Set the default of this DBRP. + + If set to `true`, this DBRP mapping is the default retention policy for the database (specified by the `database` property's value). + + :param default: The default of this DBRP. + :type: bool + """ # noqa: E501 + if default is None: + raise ValueError("Invalid value for `default`, must not be `None`") # noqa: E501 + self._default = default + + @property + def virtual(self): + """Get the virtual of this DBRP. + + Indicates an autogenerated, virtual mapping based on the bucket name. Currently only available in OSS. + + :return: The virtual of this DBRP. + :rtype: bool + """ # noqa: E501 + return self._virtual + + @virtual.setter + def virtual(self, virtual): + """Set the virtual of this DBRP. + + Indicates an autogenerated, virtual mapping based on the bucket name. Currently only available in OSS. + + :param virtual: The virtual of this DBRP. + :type: bool + """ # noqa: E501 + self._virtual = virtual + + @property + def links(self): + """Get the links of this DBRP. + + :return: The links of this DBRP. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this DBRP. + + :param links: The links of this DBRP. + :type: Links + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DBRP): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp_create.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp_create.py new file mode 100644 index 0000000..e69183f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp_create.py @@ -0,0 +1,249 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class DBRPCreate(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'org': 'str', + 'org_id': 'str', + 'bucket_id': 'str', + 'database': 'str', + 'retention_policy': 'str', + 'default': 'bool' + } + + attribute_map = { + 'org': 'org', + 'org_id': 'orgID', + 'bucket_id': 'bucketID', + 'database': 'database', + 'retention_policy': 'retention_policy', + 'default': 'default' + } + + def __init__(self, org=None, org_id=None, bucket_id=None, database=None, retention_policy=None, default=None): # noqa: E501,D401,D403 + """DBRPCreate - a model defined in OpenAPI.""" # noqa: E501 + self._org = None + self._org_id = None + self._bucket_id = None + self._database = None + self._retention_policy = None + self._default = None + self.discriminator = None + + if org is not None: + self.org = org + if org_id is not None: + self.org_id = org_id + self.bucket_id = bucket_id + self.database = database + self.retention_policy = retention_policy + if default is not None: + self.default = default + + @property + def org(self): + """Get the org of this DBRPCreate. + + An organization name. Identifies the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) that owns the mapping. + + :return: The org of this DBRPCreate. + :rtype: str + """ # noqa: E501 + return self._org + + @org.setter + def org(self, org): + """Set the org of this DBRPCreate. + + An organization name. Identifies the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) that owns the mapping. + + :param org: The org of this DBRPCreate. + :type: str + """ # noqa: E501 + self._org = org + + @property + def org_id(self): + """Get the org_id of this DBRPCreate. + + An organization ID. Identifies the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) that owns the mapping. + + :return: The org_id of this DBRPCreate. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this DBRPCreate. + + An organization ID. Identifies the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) that owns the mapping. + + :param org_id: The org_id of this DBRPCreate. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def bucket_id(self): + """Get the bucket_id of this DBRPCreate. + + A bucket ID. Identifies the bucket used as the target for the translation. + + :return: The bucket_id of this DBRPCreate. + :rtype: str + """ # noqa: E501 + return self._bucket_id + + @bucket_id.setter + def bucket_id(self, bucket_id): + """Set the bucket_id of this DBRPCreate. + + A bucket ID. Identifies the bucket used as the target for the translation. + + :param bucket_id: The bucket_id of this DBRPCreate. + :type: str + """ # noqa: E501 + if bucket_id is None: + raise ValueError("Invalid value for `bucket_id`, must not be `None`") # noqa: E501 + self._bucket_id = bucket_id + + @property + def database(self): + """Get the database of this DBRPCreate. + + A database name. Identifies the InfluxDB v1 database. + + :return: The database of this DBRPCreate. + :rtype: str + """ # noqa: E501 + return self._database + + @database.setter + def database(self, database): + """Set the database of this DBRPCreate. + + A database name. Identifies the InfluxDB v1 database. + + :param database: The database of this DBRPCreate. + :type: str + """ # noqa: E501 + if database is None: + raise ValueError("Invalid value for `database`, must not be `None`") # noqa: E501 + self._database = database + + @property + def retention_policy(self): + """Get the retention_policy of this DBRPCreate. + + A [retention policy](https://docs.influxdata.com/influxdb/v1.8/concepts/glossary/#retention-policy-rp) name. Identifies the InfluxDB v1 retention policy mapping. + + :return: The retention_policy of this DBRPCreate. + :rtype: str + """ # noqa: E501 + return self._retention_policy + + @retention_policy.setter + def retention_policy(self, retention_policy): + """Set the retention_policy of this DBRPCreate. + + A [retention policy](https://docs.influxdata.com/influxdb/v1.8/concepts/glossary/#retention-policy-rp) name. Identifies the InfluxDB v1 retention policy mapping. + + :param retention_policy: The retention_policy of this DBRPCreate. + :type: str + """ # noqa: E501 + if retention_policy is None: + raise ValueError("Invalid value for `retention_policy`, must not be `None`") # noqa: E501 + self._retention_policy = retention_policy + + @property + def default(self): + """Get the default of this DBRPCreate. + + Set to `true` to use this DBRP mapping as the default retention policy for the database (specified by the `database` property's value). + + :return: The default of this DBRPCreate. + :rtype: bool + """ # noqa: E501 + return self._default + + @default.setter + def default(self, default): + """Set the default of this DBRPCreate. + + Set to `true` to use this DBRP mapping as the default retention policy for the database (specified by the `database` property's value). + + :param default: The default of this DBRPCreate. + :type: bool + """ # noqa: E501 + self._default = default + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DBRPCreate): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp_get.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp_get.py new file mode 100644 index 0000000..375f0a5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp_get.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class DBRPGet(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'content': 'DBRP' + } + + attribute_map = { + 'content': 'content' + } + + def __init__(self, content=None): # noqa: E501,D401,D403 + """DBRPGet - a model defined in OpenAPI.""" # noqa: E501 + self._content = None + self.discriminator = None + + if content is not None: + self.content = content + + @property + def content(self): + """Get the content of this DBRPGet. + + :return: The content of this DBRPGet. + :rtype: DBRP + """ # noqa: E501 + return self._content + + @content.setter + def content(self, content): + """Set the content of this DBRPGet. + + :param content: The content of this DBRPGet. + :type: DBRP + """ # noqa: E501 + self._content = content + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DBRPGet): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp_update.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp_update.py new file mode 100644 index 0000000..4b0c5ba --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dbrp_update.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class DBRPUpdate(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'retention_policy': 'str', + 'default': 'bool' + } + + attribute_map = { + 'retention_policy': 'retention_policy', + 'default': 'default' + } + + def __init__(self, retention_policy=None, default=None): # noqa: E501,D401,D403 + """DBRPUpdate - a model defined in OpenAPI.""" # noqa: E501 + self._retention_policy = None + self._default = None + self.discriminator = None + + if retention_policy is not None: + self.retention_policy = retention_policy + if default is not None: + self.default = default + + @property + def retention_policy(self): + """Get the retention_policy of this DBRPUpdate. + + A [retention policy](https://docs.influxdata.com/influxdb/v1.8/concepts/glossary/#retention-policy-rp) name. Identifies the InfluxDB v1 retention policy mapping. + + :return: The retention_policy of this DBRPUpdate. + :rtype: str + """ # noqa: E501 + return self._retention_policy + + @retention_policy.setter + def retention_policy(self, retention_policy): + """Set the retention_policy of this DBRPUpdate. + + A [retention policy](https://docs.influxdata.com/influxdb/v1.8/concepts/glossary/#retention-policy-rp) name. Identifies the InfluxDB v1 retention policy mapping. + + :param retention_policy: The retention_policy of this DBRPUpdate. + :type: str + """ # noqa: E501 + self._retention_policy = retention_policy + + @property + def default(self): + """Get the default of this DBRPUpdate. + + Set to `true` to use this DBRP mapping as the default retention policy for the database (specified by the `database` property's value). To remove the default mapping, set to `false`. + + :return: The default of this DBRPUpdate. + :rtype: bool + """ # noqa: E501 + return self._default + + @default.setter + def default(self, default): + """Set the default of this DBRPUpdate. + + Set to `true` to use this DBRP mapping as the default retention policy for the database (specified by the `database` property's value). To remove the default mapping, set to `false`. + + :param default: The default of this DBRPUpdate. + :type: bool + """ # noqa: E501 + self._default = default + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DBRPUpdate): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/deadman_check.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/deadman_check.py new file mode 100644 index 0000000..81e368f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/deadman_check.py @@ -0,0 +1,354 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.check_discriminator import CheckDiscriminator + + +class DeadmanCheck(CheckDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'time_since': 'str', + 'stale_time': 'str', + 'report_zero': 'bool', + 'level': 'CheckStatusLevel', + 'every': 'str', + 'offset': 'str', + 'tags': 'list[object]', + 'status_message_template': 'str', + 'id': 'str', + 'name': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'query': 'DashboardQuery', + 'status': 'TaskStatusType', + 'description': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'labels': 'list[Label]', + 'links': 'CheckBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'time_since': 'timeSince', + 'stale_time': 'staleTime', + 'report_zero': 'reportZero', + 'level': 'level', + 'every': 'every', + 'offset': 'offset', + 'tags': 'tags', + 'status_message_template': 'statusMessageTemplate', + 'id': 'id', + 'name': 'name', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'query': 'query', + 'status': 'status', + 'description': 'description', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type="deadman", time_since=None, stale_time=None, report_zero=None, level=None, every=None, offset=None, tags=None, status_message_template=None, id=None, name=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, query=None, status=None, description=None, latest_completed=None, last_run_status=None, last_run_error=None, labels=None, links=None): # noqa: E501,D401,D403 + """DeadmanCheck - a model defined in OpenAPI.""" # noqa: E501 + CheckDiscriminator.__init__(self, id=id, name=name, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, query=query, status=status, description=description, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, labels=labels, links=links) # noqa: E501 + + self._type = None + self._time_since = None + self._stale_time = None + self._report_zero = None + self._level = None + self._every = None + self._offset = None + self._tags = None + self._status_message_template = None + self.discriminator = None + + self.type = type + if time_since is not None: + self.time_since = time_since + if stale_time is not None: + self.stale_time = stale_time + if report_zero is not None: + self.report_zero = report_zero + if level is not None: + self.level = level + if every is not None: + self.every = every + if offset is not None: + self.offset = offset + if tags is not None: + self.tags = tags + if status_message_template is not None: + self.status_message_template = status_message_template + + @property + def type(self): + """Get the type of this DeadmanCheck. + + :return: The type of this DeadmanCheck. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this DeadmanCheck. + + :param type: The type of this DeadmanCheck. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def time_since(self): + """Get the time_since of this DeadmanCheck. + + String duration before deadman triggers. + + :return: The time_since of this DeadmanCheck. + :rtype: str + """ # noqa: E501 + return self._time_since + + @time_since.setter + def time_since(self, time_since): + """Set the time_since of this DeadmanCheck. + + String duration before deadman triggers. + + :param time_since: The time_since of this DeadmanCheck. + :type: str + """ # noqa: E501 + self._time_since = time_since + + @property + def stale_time(self): + """Get the stale_time of this DeadmanCheck. + + String duration for time that a series is considered stale and should not trigger deadman. + + :return: The stale_time of this DeadmanCheck. + :rtype: str + """ # noqa: E501 + return self._stale_time + + @stale_time.setter + def stale_time(self, stale_time): + """Set the stale_time of this DeadmanCheck. + + String duration for time that a series is considered stale and should not trigger deadman. + + :param stale_time: The stale_time of this DeadmanCheck. + :type: str + """ # noqa: E501 + self._stale_time = stale_time + + @property + def report_zero(self): + """Get the report_zero of this DeadmanCheck. + + If only zero values reported since time, trigger an alert + + :return: The report_zero of this DeadmanCheck. + :rtype: bool + """ # noqa: E501 + return self._report_zero + + @report_zero.setter + def report_zero(self, report_zero): + """Set the report_zero of this DeadmanCheck. + + If only zero values reported since time, trigger an alert + + :param report_zero: The report_zero of this DeadmanCheck. + :type: bool + """ # noqa: E501 + self._report_zero = report_zero + + @property + def level(self): + """Get the level of this DeadmanCheck. + + :return: The level of this DeadmanCheck. + :rtype: CheckStatusLevel + """ # noqa: E501 + return self._level + + @level.setter + def level(self, level): + """Set the level of this DeadmanCheck. + + :param level: The level of this DeadmanCheck. + :type: CheckStatusLevel + """ # noqa: E501 + self._level = level + + @property + def every(self): + """Get the every of this DeadmanCheck. + + Check repetition interval. + + :return: The every of this DeadmanCheck. + :rtype: str + """ # noqa: E501 + return self._every + + @every.setter + def every(self, every): + """Set the every of this DeadmanCheck. + + Check repetition interval. + + :param every: The every of this DeadmanCheck. + :type: str + """ # noqa: E501 + self._every = every + + @property + def offset(self): + """Get the offset of this DeadmanCheck. + + Duration to delay after the schedule, before executing check. + + :return: The offset of this DeadmanCheck. + :rtype: str + """ # noqa: E501 + return self._offset + + @offset.setter + def offset(self, offset): + """Set the offset of this DeadmanCheck. + + Duration to delay after the schedule, before executing check. + + :param offset: The offset of this DeadmanCheck. + :type: str + """ # noqa: E501 + self._offset = offset + + @property + def tags(self): + """Get the tags of this DeadmanCheck. + + List of tags to write to each status. + + :return: The tags of this DeadmanCheck. + :rtype: list[object] + """ # noqa: E501 + return self._tags + + @tags.setter + def tags(self, tags): + """Set the tags of this DeadmanCheck. + + List of tags to write to each status. + + :param tags: The tags of this DeadmanCheck. + :type: list[object] + """ # noqa: E501 + self._tags = tags + + @property + def status_message_template(self): + """Get the status_message_template of this DeadmanCheck. + + The template used to generate and write a status message. + + :return: The status_message_template of this DeadmanCheck. + :rtype: str + """ # noqa: E501 + return self._status_message_template + + @status_message_template.setter + def status_message_template(self, status_message_template): + """Set the status_message_template of this DeadmanCheck. + + The template used to generate and write a status message. + + :param status_message_template: The status_message_template of this DeadmanCheck. + :type: str + """ # noqa: E501 + self._status_message_template = status_message_template + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DeadmanCheck): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/decimal_places.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/decimal_places.py new file mode 100644 index 0000000..7a855a6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/decimal_places.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class DecimalPlaces(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'is_enforced': 'bool', + 'digits': 'int' + } + + attribute_map = { + 'is_enforced': 'isEnforced', + 'digits': 'digits' + } + + def __init__(self, is_enforced=None, digits=None): # noqa: E501,D401,D403 + """DecimalPlaces - a model defined in OpenAPI.""" # noqa: E501 + self._is_enforced = None + self._digits = None + self.discriminator = None + + if is_enforced is not None: + self.is_enforced = is_enforced + if digits is not None: + self.digits = digits + + @property + def is_enforced(self): + """Get the is_enforced of this DecimalPlaces. + + Indicates whether decimal point setting should be enforced + + :return: The is_enforced of this DecimalPlaces. + :rtype: bool + """ # noqa: E501 + return self._is_enforced + + @is_enforced.setter + def is_enforced(self, is_enforced): + """Set the is_enforced of this DecimalPlaces. + + Indicates whether decimal point setting should be enforced + + :param is_enforced: The is_enforced of this DecimalPlaces. + :type: bool + """ # noqa: E501 + self._is_enforced = is_enforced + + @property + def digits(self): + """Get the digits of this DecimalPlaces. + + The number of digits after decimal to display + + :return: The digits of this DecimalPlaces. + :rtype: int + """ # noqa: E501 + return self._digits + + @digits.setter + def digits(self, digits): + """Set the digits of this DecimalPlaces. + + The number of digits after decimal to display + + :param digits: The digits of this DecimalPlaces. + :type: int + """ # noqa: E501 + self._digits = digits + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DecimalPlaces): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/delete_predicate_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/delete_predicate_request.py new file mode 100644 index 0000000..1320291 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/delete_predicate_request.py @@ -0,0 +1,167 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class DeletePredicateRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'start': 'datetime', + 'stop': 'datetime', + 'predicate': 'str' + } + + attribute_map = { + 'start': 'start', + 'stop': 'stop', + 'predicate': 'predicate' + } + + def __init__(self, start=None, stop=None, predicate=None): # noqa: E501,D401,D403 + """DeletePredicateRequest - a model defined in OpenAPI.""" # noqa: E501 + self._start = None + self._stop = None + self._predicate = None + self.discriminator = None + + self.start = start + self.stop = stop + if predicate is not None: + self.predicate = predicate + + @property + def start(self): + """Get the start of this DeletePredicateRequest. + + A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)). The earliest time to delete from. + + :return: The start of this DeletePredicateRequest. + :rtype: datetime + """ # noqa: E501 + return self._start + + @start.setter + def start(self, start): + """Set the start of this DeletePredicateRequest. + + A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)). The earliest time to delete from. + + :param start: The start of this DeletePredicateRequest. + :type: datetime + """ # noqa: E501 + if start is None: + raise ValueError("Invalid value for `start`, must not be `None`") # noqa: E501 + self._start = start + + @property + def stop(self): + """Get the stop of this DeletePredicateRequest. + + A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)). The latest time to delete from. + + :return: The stop of this DeletePredicateRequest. + :rtype: datetime + """ # noqa: E501 + return self._stop + + @stop.setter + def stop(self, stop): + """Set the stop of this DeletePredicateRequest. + + A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)). The latest time to delete from. + + :param stop: The stop of this DeletePredicateRequest. + :type: datetime + """ # noqa: E501 + if stop is None: + raise ValueError("Invalid value for `stop`, must not be `None`") # noqa: E501 + self._stop = stop + + @property + def predicate(self): + """Get the predicate of this DeletePredicateRequest. + + An expression in [delete predicate syntax](https://docs.influxdata.com/influxdb/latest/reference/syntax/delete-predicate/). + + :return: The predicate of this DeletePredicateRequest. + :rtype: str + """ # noqa: E501 + return self._predicate + + @predicate.setter + def predicate(self, predicate): + """Set the predicate of this DeletePredicateRequest. + + An expression in [delete predicate syntax](https://docs.influxdata.com/influxdb/latest/reference/syntax/delete-predicate/). + + :param predicate: The predicate of this DeletePredicateRequest. + :type: str + """ # noqa: E501 + self._predicate = predicate + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DeletePredicateRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dialect.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dialect.py new file mode 100644 index 0000000..18bc753 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dialect.py @@ -0,0 +1,234 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Dialect(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'header': 'bool', + 'delimiter': 'str', + 'annotations': 'list[str]', + 'comment_prefix': 'str', + 'date_time_format': 'str' + } + + attribute_map = { + 'header': 'header', + 'delimiter': 'delimiter', + 'annotations': 'annotations', + 'comment_prefix': 'commentPrefix', + 'date_time_format': 'dateTimeFormat' + } + + def __init__(self, header=True, delimiter=',', annotations=None, comment_prefix='#', date_time_format='RFC3339'): # noqa: E501,D401,D403 + """Dialect - a model defined in OpenAPI.""" # noqa: E501 + self._header = None + self._delimiter = None + self._annotations = None + self._comment_prefix = None + self._date_time_format = None + self.discriminator = None + + if header is not None: + self.header = header + if delimiter is not None: + self.delimiter = delimiter + if annotations is not None: + self.annotations = annotations + if comment_prefix is not None: + self.comment_prefix = comment_prefix + if date_time_format is not None: + self.date_time_format = date_time_format + + @property + def header(self): + """Get the header of this Dialect. + + If true, the results contain a header row. + + :return: The header of this Dialect. + :rtype: bool + """ # noqa: E501 + return self._header + + @header.setter + def header(self, header): + """Set the header of this Dialect. + + If true, the results contain a header row. + + :param header: The header of this Dialect. + :type: bool + """ # noqa: E501 + self._header = header + + @property + def delimiter(self): + """Get the delimiter of this Dialect. + + The separator used between cells. Default is a comma (`,`). + + :return: The delimiter of this Dialect. + :rtype: str + """ # noqa: E501 + return self._delimiter + + @delimiter.setter + def delimiter(self, delimiter): + """Set the delimiter of this Dialect. + + The separator used between cells. Default is a comma (`,`). + + :param delimiter: The delimiter of this Dialect. + :type: str + """ # noqa: E501 + if delimiter is not None and len(delimiter) > 1: + raise ValueError("Invalid value for `delimiter`, length must be less than or equal to `1`") # noqa: E501 + if delimiter is not None and len(delimiter) < 1: + raise ValueError("Invalid value for `delimiter`, length must be greater than or equal to `1`") # noqa: E501 + self._delimiter = delimiter + + @property + def annotations(self): + """Get the annotations of this Dialect. + + Annotation rows to include in the results. An _annotation_ is metadata associated with an object (column) in the data model. #### Related guides - See [Annotated CSV annotations](https://docs.influxdata.com/influxdb/latest/reference/syntax/annotated-csv/#annotations) for examples and more information. For more information about **annotations** in tabular data, see [W3 metadata vocabulary for tabular data](https://www.w3.org/TR/2015/REC-tabular-data-model-20151217/#columns). + + :return: The annotations of this Dialect. + :rtype: list[str] + """ # noqa: E501 + return self._annotations + + @annotations.setter + def annotations(self, annotations): + """Set the annotations of this Dialect. + + Annotation rows to include in the results. An _annotation_ is metadata associated with an object (column) in the data model. #### Related guides - See [Annotated CSV annotations](https://docs.influxdata.com/influxdb/latest/reference/syntax/annotated-csv/#annotations) for examples and more information. For more information about **annotations** in tabular data, see [W3 metadata vocabulary for tabular data](https://www.w3.org/TR/2015/REC-tabular-data-model-20151217/#columns). + + :param annotations: The annotations of this Dialect. + :type: list[str] + """ # noqa: E501 + allowed_values = ["group", "datatype", "default"] # noqa: E501 + if not set(annotations).issubset(set(allowed_values)): + raise ValueError( + "Invalid values for `annotations` [{0}], must be a subset of [{1}]" # noqa: E501 + .format(", ".join(map(str, set(annotations) - set(allowed_values))), # noqa: E501 + ", ".join(map(str, allowed_values))) + ) + self._annotations = annotations + + @property + def comment_prefix(self): + """Get the comment_prefix of this Dialect. + + The character prefixed to comment strings. Default is a number sign (`#`). + + :return: The comment_prefix of this Dialect. + :rtype: str + """ # noqa: E501 + return self._comment_prefix + + @comment_prefix.setter + def comment_prefix(self, comment_prefix): + """Set the comment_prefix of this Dialect. + + The character prefixed to comment strings. Default is a number sign (`#`). + + :param comment_prefix: The comment_prefix of this Dialect. + :type: str + """ # noqa: E501 + if comment_prefix is not None and len(comment_prefix) > 1: + raise ValueError("Invalid value for `comment_prefix`, length must be less than or equal to `1`") # noqa: E501 + if comment_prefix is not None and len(comment_prefix) < 0: + raise ValueError("Invalid value for `comment_prefix`, length must be greater than or equal to `0`") # noqa: E501 + self._comment_prefix = comment_prefix + + @property + def date_time_format(self): + """Get the date_time_format of this Dialect. + + The format for timestamps in results. Default is [`RFC3339` date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp). To include nanoseconds in timestamps, use `RFC3339Nano`. #### Example formatted date/time values | Format | Value | |:------------|:----------------------------| | `RFC3339` | `"2006-01-02T15:04:05Z07:00"` | | `RFC3339Nano` | `"2006-01-02T15:04:05.999999999Z07:00"` | + + :return: The date_time_format of this Dialect. + :rtype: str + """ # noqa: E501 + return self._date_time_format + + @date_time_format.setter + def date_time_format(self, date_time_format): + """Set the date_time_format of this Dialect. + + The format for timestamps in results. Default is [`RFC3339` date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp). To include nanoseconds in timestamps, use `RFC3339Nano`. #### Example formatted date/time values | Format | Value | |:------------|:----------------------------| | `RFC3339` | `"2006-01-02T15:04:05Z07:00"` | | `RFC3339Nano` | `"2006-01-02T15:04:05.999999999Z07:00"` | + + :param date_time_format: The date_time_format of this Dialect. + :type: str + """ # noqa: E501 + self._date_time_format = date_time_format + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Dialect): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dict_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dict_expression.py new file mode 100644 index 0000000..f6c0242 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dict_expression.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class DictExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'elements': 'list[DictItem]' + } + + attribute_map = { + 'type': 'type', + 'elements': 'elements' + } + + def __init__(self, type=None, elements=None): # noqa: E501,D401,D403 + """DictExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._elements = None + self.discriminator = None + + if type is not None: + self.type = type + if elements is not None: + self.elements = elements + + @property + def type(self): + """Get the type of this DictExpression. + + Type of AST node + + :return: The type of this DictExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this DictExpression. + + Type of AST node + + :param type: The type of this DictExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def elements(self): + """Get the elements of this DictExpression. + + Elements of the dictionary + + :return: The elements of this DictExpression. + :rtype: list[DictItem] + """ # noqa: E501 + return self._elements + + @elements.setter + def elements(self, elements): + """Set the elements of this DictExpression. + + Elements of the dictionary + + :param elements: The elements of this DictExpression. + :type: list[DictItem] + """ # noqa: E501 + self._elements = elements + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DictExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/dict_item.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dict_item.py new file mode 100644 index 0000000..77beac9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/dict_item.py @@ -0,0 +1,157 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class DictItem(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'key': 'Expression', + 'val': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'key': 'key', + 'val': 'val' + } + + def __init__(self, type=None, key=None, val=None): # noqa: E501,D401,D403 + """DictItem - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self._key = None + self._val = None + self.discriminator = None + + if type is not None: + self.type = type + if key is not None: + self.key = key + if val is not None: + self.val = val + + @property + def type(self): + """Get the type of this DictItem. + + Type of AST node + + :return: The type of this DictItem. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this DictItem. + + Type of AST node + + :param type: The type of this DictItem. + :type: str + """ # noqa: E501 + self._type = type + + @property + def key(self): + """Get the key of this DictItem. + + :return: The key of this DictItem. + :rtype: Expression + """ # noqa: E501 + return self._key + + @key.setter + def key(self, key): + """Set the key of this DictItem. + + :param key: The key of this DictItem. + :type: Expression + """ # noqa: E501 + self._key = key + + @property + def val(self): + """Get the val of this DictItem. + + :return: The val of this DictItem. + :rtype: Expression + """ # noqa: E501 + return self._val + + @val.setter + def val(self, val): + """Set the val of this DictItem. + + :param val: The val of this DictItem. + :type: Expression + """ # noqa: E501 + self._val = val + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DictItem): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/duration.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/duration.py new file mode 100644 index 0000000..5104444 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/duration.py @@ -0,0 +1,157 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Duration(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'magnitude': 'int', + 'unit': 'str' + } + + attribute_map = { + 'type': 'type', + 'magnitude': 'magnitude', + 'unit': 'unit' + } + + def __init__(self, type=None, magnitude=None, unit=None): # noqa: E501,D401,D403 + """Duration - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self._magnitude = None + self._unit = None + self.discriminator = None + + if type is not None: + self.type = type + if magnitude is not None: + self.magnitude = magnitude + if unit is not None: + self.unit = unit + + @property + def type(self): + """Get the type of this Duration. + + Type of AST node + + :return: The type of this Duration. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this Duration. + + Type of AST node + + :param type: The type of this Duration. + :type: str + """ # noqa: E501 + self._type = type + + @property + def magnitude(self): + """Get the magnitude of this Duration. + + :return: The magnitude of this Duration. + :rtype: int + """ # noqa: E501 + return self._magnitude + + @magnitude.setter + def magnitude(self, magnitude): + """Set the magnitude of this Duration. + + :param magnitude: The magnitude of this Duration. + :type: int + """ # noqa: E501 + self._magnitude = magnitude + + @property + def unit(self): + """Get the unit of this Duration. + + :return: The unit of this Duration. + :rtype: str + """ # noqa: E501 + return self._unit + + @unit.setter + def unit(self, unit): + """Set the unit of this Duration. + + :param unit: The unit of this Duration. + :type: str + """ # noqa: E501 + self._unit = unit + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Duration): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/duration_literal.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/duration_literal.py new file mode 100644 index 0000000..f6d6396 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/duration_literal.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class DurationLiteral(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'values': 'list[Duration]' + } + + attribute_map = { + 'type': 'type', + 'values': 'values' + } + + def __init__(self, type=None, values=None): # noqa: E501,D401,D403 + """DurationLiteral - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._values = None + self.discriminator = None + + if type is not None: + self.type = type + if values is not None: + self.values = values + + @property + def type(self): + """Get the type of this DurationLiteral. + + Type of AST node + + :return: The type of this DurationLiteral. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this DurationLiteral. + + Type of AST node + + :param type: The type of this DurationLiteral. + :type: str + """ # noqa: E501 + self._type = type + + @property + def values(self): + """Get the values of this DurationLiteral. + + Duration values + + :return: The values of this DurationLiteral. + :rtype: list[Duration] + """ # noqa: E501 + return self._values + + @values.setter + def values(self, values): + """Set the values of this DurationLiteral. + + Duration values + + :param values: The values of this DurationLiteral. + :type: list[Duration] + """ # noqa: E501 + self._values = values + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, DurationLiteral): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/error.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/error.py new file mode 100644 index 0000000..2a3655b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/error.py @@ -0,0 +1,193 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Error(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'code': 'str', + 'message': 'str', + 'op': 'str', + 'err': 'str' + } + + attribute_map = { + 'code': 'code', + 'message': 'message', + 'op': 'op', + 'err': 'err' + } + + def __init__(self, code=None, message=None, op=None, err=None): # noqa: E501,D401,D403 + """Error - a model defined in OpenAPI.""" # noqa: E501 + self._code = None + self._message = None + self._op = None + self._err = None + self.discriminator = None + + self.code = code + if message is not None: + self.message = message + if op is not None: + self.op = op + if err is not None: + self.err = err + + @property + def code(self): + """Get the code of this Error. + + code is the machine-readable error code. + + :return: The code of this Error. + :rtype: str + """ # noqa: E501 + return self._code + + @code.setter + def code(self, code): + """Set the code of this Error. + + code is the machine-readable error code. + + :param code: The code of this Error. + :type: str + """ # noqa: E501 + if code is None: + raise ValueError("Invalid value for `code`, must not be `None`") # noqa: E501 + self._code = code + + @property + def message(self): + """Get the message of this Error. + + Human-readable message. + + :return: The message of this Error. + :rtype: str + """ # noqa: E501 + return self._message + + @message.setter + def message(self, message): + """Set the message of this Error. + + Human-readable message. + + :param message: The message of this Error. + :type: str + """ # noqa: E501 + self._message = message + + @property + def op(self): + """Get the op of this Error. + + Describes the logical code operation when the error occurred. Useful for debugging. + + :return: The op of this Error. + :rtype: str + """ # noqa: E501 + return self._op + + @op.setter + def op(self, op): + """Set the op of this Error. + + Describes the logical code operation when the error occurred. Useful for debugging. + + :param op: The op of this Error. + :type: str + """ # noqa: E501 + self._op = op + + @property + def err(self): + """Get the err of this Error. + + Stack of errors that occurred during processing of the request. Useful for debugging. + + :return: The err of this Error. + :rtype: str + """ # noqa: E501 + return self._err + + @err.setter + def err(self, err): + """Set the err of this Error. + + Stack of errors that occurred during processing of the request. Useful for debugging. + + :param err: The err of this Error. + :type: str + """ # noqa: E501 + self._err = err + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Error): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/expression.py new file mode 100644 index 0000000..9d449fd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/expression.py @@ -0,0 +1,86 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.node import Node + + +class Expression(Node): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """Expression - a model defined in OpenAPI.""" # noqa: E501 + Node.__init__(self) # noqa: E501 + self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Expression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/expression_statement.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/expression_statement.py new file mode 100644 index 0000000..04dd216 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/expression_statement.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.statement import Statement + + +class ExpressionStatement(Statement): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'expression': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'expression': 'expression' + } + + def __init__(self, type=None, expression=None): # noqa: E501,D401,D403 + """ExpressionStatement - a model defined in OpenAPI.""" # noqa: E501 + Statement.__init__(self) # noqa: E501 + + self._type = None + self._expression = None + self.discriminator = None + + if type is not None: + self.type = type + if expression is not None: + self.expression = expression + + @property + def type(self): + """Get the type of this ExpressionStatement. + + Type of AST node + + :return: The type of this ExpressionStatement. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ExpressionStatement. + + Type of AST node + + :param type: The type of this ExpressionStatement. + :type: str + """ # noqa: E501 + self._type = type + + @property + def expression(self): + """Get the expression of this ExpressionStatement. + + :return: The expression of this ExpressionStatement. + :rtype: Expression + """ # noqa: E501 + return self._expression + + @expression.setter + def expression(self, expression): + """Set the expression of this ExpressionStatement. + + :param expression: The expression of this ExpressionStatement. + :type: Expression + """ # noqa: E501 + self._expression = expression + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ExpressionStatement): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/field.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/field.py new file mode 100644 index 0000000..62bc3d1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/field.py @@ -0,0 +1,192 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Field(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'value': 'str', + 'type': 'str', + 'alias': 'str', + 'args': 'list[Field]' + } + + attribute_map = { + 'value': 'value', + 'type': 'type', + 'alias': 'alias', + 'args': 'args' + } + + def __init__(self, value=None, type=None, alias=None, args=None): # noqa: E501,D401,D403 + """Field - a model defined in OpenAPI.""" # noqa: E501 + self._value = None + self._type = None + self._alias = None + self._args = None + self.discriminator = None + + if value is not None: + self.value = value + if type is not None: + self.type = type + if alias is not None: + self.alias = alias + if args is not None: + self.args = args + + @property + def value(self): + """Get the value of this Field. + + value is the value of the field. Meaning of the value is implied by the `type` key + + :return: The value of this Field. + :rtype: str + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this Field. + + value is the value of the field. Meaning of the value is implied by the `type` key + + :param value: The value of this Field. + :type: str + """ # noqa: E501 + self._value = value + + @property + def type(self): + """Get the type of this Field. + + `type` describes the field type. `func` is a function. `field` is a field reference. + + :return: The type of this Field. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this Field. + + `type` describes the field type. `func` is a function. `field` is a field reference. + + :param type: The type of this Field. + :type: str + """ # noqa: E501 + self._type = type + + @property + def alias(self): + """Get the alias of this Field. + + Alias overrides the field name in the returned response. Applies only if type is `func` + + :return: The alias of this Field. + :rtype: str + """ # noqa: E501 + return self._alias + + @alias.setter + def alias(self, alias): + """Set the alias of this Field. + + Alias overrides the field name in the returned response. Applies only if type is `func` + + :param alias: The alias of this Field. + :type: str + """ # noqa: E501 + self._alias = alias + + @property + def args(self): + """Get the args of this Field. + + Args are the arguments to the function + + :return: The args of this Field. + :rtype: list[Field] + """ # noqa: E501 + return self._args + + @args.setter + def args(self, args): + """Set the args of this Field. + + Args are the arguments to the function + + :param args: The args of this Field. + :type: list[Field] + """ # noqa: E501 + self._args = args + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Field): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/file.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/file.py new file mode 100644 index 0000000..6d3ac81 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/file.py @@ -0,0 +1,215 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class File(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'name': 'str', + 'package': 'PackageClause', + 'imports': 'list[ImportDeclaration]', + 'body': 'list[Statement]' + } + + attribute_map = { + 'type': 'type', + 'name': 'name', + 'package': 'package', + 'imports': 'imports', + 'body': 'body' + } + + def __init__(self, type=None, name=None, package=None, imports=None, body=None): # noqa: E501,D401,D403 + """File - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self._name = None + self._package = None + self._imports = None + self._body = None + self.discriminator = None + + if type is not None: + self.type = type + if name is not None: + self.name = name + if package is not None: + self.package = package + if imports is not None: + self.imports = imports + if body is not None: + self.body = body + + @property + def type(self): + """Get the type of this File. + + Type of AST node + + :return: The type of this File. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this File. + + Type of AST node + + :param type: The type of this File. + :type: str + """ # noqa: E501 + self._type = type + + @property + def name(self): + """Get the name of this File. + + The name of the file. + + :return: The name of this File. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this File. + + The name of the file. + + :param name: The name of this File. + :type: str + """ # noqa: E501 + self._name = name + + @property + def package(self): + """Get the package of this File. + + :return: The package of this File. + :rtype: PackageClause + """ # noqa: E501 + return self._package + + @package.setter + def package(self, package): + """Set the package of this File. + + :param package: The package of this File. + :type: PackageClause + """ # noqa: E501 + self._package = package + + @property + def imports(self): + """Get the imports of this File. + + A list of package imports + + :return: The imports of this File. + :rtype: list[ImportDeclaration] + """ # noqa: E501 + return self._imports + + @imports.setter + def imports(self, imports): + """Set the imports of this File. + + A list of package imports + + :param imports: The imports of this File. + :type: list[ImportDeclaration] + """ # noqa: E501 + self._imports = imports + + @property + def body(self): + """Get the body of this File. + + List of Flux statements + + :return: The body of this File. + :rtype: list[Statement] + """ # noqa: E501 + return self._body + + @body.setter + def body(self, body): + """Set the body of this File. + + List of Flux statements + + :param body: The body of this File. + :type: list[Statement] + """ # noqa: E501 + self._body = body + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, File): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/float_literal.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/float_literal.py new file mode 100644 index 0000000..41e8ec3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/float_literal.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class FloatLiteral(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'value': 'float' + } + + attribute_map = { + 'type': 'type', + 'value': 'value' + } + + def __init__(self, type=None, value=None): # noqa: E501,D401,D403 + """FloatLiteral - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._value = None + self.discriminator = None + + if type is not None: + self.type = type + if value is not None: + self.value = value + + @property + def type(self): + """Get the type of this FloatLiteral. + + Type of AST node + + :return: The type of this FloatLiteral. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this FloatLiteral. + + Type of AST node + + :param type: The type of this FloatLiteral. + :type: str + """ # noqa: E501 + self._type = type + + @property + def value(self): + """Get the value of this FloatLiteral. + + :return: The value of this FloatLiteral. + :rtype: float + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this FloatLiteral. + + :param value: The value of this FloatLiteral. + :type: float + """ # noqa: E501 + self._value = value + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, FloatLiteral): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/flux_response.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/flux_response.py new file mode 100644 index 0000000..9460811 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/flux_response.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class FluxResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'flux': 'str' + } + + attribute_map = { + 'flux': 'flux' + } + + def __init__(self, flux=None): # noqa: E501,D401,D403 + """FluxResponse - a model defined in OpenAPI.""" # noqa: E501 + self._flux = None + self.discriminator = None + + if flux is not None: + self.flux = flux + + @property + def flux(self): + """Get the flux of this FluxResponse. + + :return: The flux of this FluxResponse. + :rtype: str + """ # noqa: E501 + return self._flux + + @flux.setter + def flux(self, flux): + """Set the flux of this FluxResponse. + + :param flux: The flux of this FluxResponse. + :type: str + """ # noqa: E501 + self._flux = flux + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, FluxResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/flux_suggestion.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/flux_suggestion.py new file mode 100644 index 0000000..46c692f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/flux_suggestion.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class FluxSuggestion(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'params': 'dict(str, str)' + } + + attribute_map = { + 'name': 'name', + 'params': 'params' + } + + def __init__(self, name=None, params=None): # noqa: E501,D401,D403 + """FluxSuggestion - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._params = None + self.discriminator = None + + if name is not None: + self.name = name + if params is not None: + self.params = params + + @property + def name(self): + """Get the name of this FluxSuggestion. + + :return: The name of this FluxSuggestion. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this FluxSuggestion. + + :param name: The name of this FluxSuggestion. + :type: str + """ # noqa: E501 + self._name = name + + @property + def params(self): + """Get the params of this FluxSuggestion. + + :return: The params of this FluxSuggestion. + :rtype: dict(str, str) + """ # noqa: E501 + return self._params + + @params.setter + def params(self, params): + """Set the params of this FluxSuggestion. + + :param params: The params of this FluxSuggestion. + :type: dict(str, str) + """ # noqa: E501 + self._params = params + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, FluxSuggestion): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/flux_suggestions.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/flux_suggestions.py new file mode 100644 index 0000000..38c3ccf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/flux_suggestions.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class FluxSuggestions(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'funcs': 'list[FluxSuggestion]' + } + + attribute_map = { + 'funcs': 'funcs' + } + + def __init__(self, funcs=None): # noqa: E501,D401,D403 + """FluxSuggestions - a model defined in OpenAPI.""" # noqa: E501 + self._funcs = None + self.discriminator = None + + if funcs is not None: + self.funcs = funcs + + @property + def funcs(self): + """Get the funcs of this FluxSuggestions. + + :return: The funcs of this FluxSuggestions. + :rtype: list[FluxSuggestion] + """ # noqa: E501 + return self._funcs + + @funcs.setter + def funcs(self, funcs): + """Set the funcs of this FluxSuggestions. + + :param funcs: The funcs of this FluxSuggestions. + :type: list[FluxSuggestion] + """ # noqa: E501 + self._funcs = funcs + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, FluxSuggestions): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/function_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/function_expression.py new file mode 100644 index 0000000..b06c738 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/function_expression.py @@ -0,0 +1,165 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class FunctionExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'params': 'list[ModelProperty]', + 'body': 'Node' + } + + attribute_map = { + 'type': 'type', + 'params': 'params', + 'body': 'body' + } + + def __init__(self, type=None, params=None, body=None): # noqa: E501,D401,D403 + """FunctionExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._params = None + self._body = None + self.discriminator = None + + if type is not None: + self.type = type + if params is not None: + self.params = params + if body is not None: + self.body = body + + @property + def type(self): + """Get the type of this FunctionExpression. + + Type of AST node + + :return: The type of this FunctionExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this FunctionExpression. + + Type of AST node + + :param type: The type of this FunctionExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def params(self): + """Get the params of this FunctionExpression. + + Function parameters + + :return: The params of this FunctionExpression. + :rtype: list[ModelProperty] + """ # noqa: E501 + return self._params + + @params.setter + def params(self, params): + """Set the params of this FunctionExpression. + + Function parameters + + :param params: The params of this FunctionExpression. + :type: list[ModelProperty] + """ # noqa: E501 + self._params = params + + @property + def body(self): + """Get the body of this FunctionExpression. + + :return: The body of this FunctionExpression. + :rtype: Node + """ # noqa: E501 + return self._body + + @body.setter + def body(self, body): + """Set the body of this FunctionExpression. + + :param body: The body of this FunctionExpression. + :type: Node + """ # noqa: E501 + self._body = body + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, FunctionExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/gauge_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/gauge_view_properties.py new file mode 100644 index 0000000..2378598 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/gauge_view_properties.py @@ -0,0 +1,360 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class GaugeViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'queries': 'list[DashboardQuery]', + 'colors': 'list[DashboardColor]', + 'shape': 'str', + 'note': 'str', + 'show_note_when_empty': 'bool', + 'prefix': 'str', + 'tick_prefix': 'str', + 'suffix': 'str', + 'tick_suffix': 'str', + 'decimal_places': 'DecimalPlaces' + } + + attribute_map = { + 'type': 'type', + 'queries': 'queries', + 'colors': 'colors', + 'shape': 'shape', + 'note': 'note', + 'show_note_when_empty': 'showNoteWhenEmpty', + 'prefix': 'prefix', + 'tick_prefix': 'tickPrefix', + 'suffix': 'suffix', + 'tick_suffix': 'tickSuffix', + 'decimal_places': 'decimalPlaces' + } + + def __init__(self, type=None, queries=None, colors=None, shape=None, note=None, show_note_when_empty=None, prefix=None, tick_prefix=None, suffix=None, tick_suffix=None, decimal_places=None): # noqa: E501,D401,D403 + """GaugeViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._type = None + self._queries = None + self._colors = None + self._shape = None + self._note = None + self._show_note_when_empty = None + self._prefix = None + self._tick_prefix = None + self._suffix = None + self._tick_suffix = None + self._decimal_places = None + self.discriminator = None + + self.type = type + self.queries = queries + self.colors = colors + self.shape = shape + self.note = note + self.show_note_when_empty = show_note_when_empty + self.prefix = prefix + self.tick_prefix = tick_prefix + self.suffix = suffix + self.tick_suffix = tick_suffix + self.decimal_places = decimal_places + + @property + def type(self): + """Get the type of this GaugeViewProperties. + + :return: The type of this GaugeViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this GaugeViewProperties. + + :param type: The type of this GaugeViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def queries(self): + """Get the queries of this GaugeViewProperties. + + :return: The queries of this GaugeViewProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this GaugeViewProperties. + + :param queries: The queries of this GaugeViewProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def colors(self): + """Get the colors of this GaugeViewProperties. + + Colors define color encoding of data into a visualization + + :return: The colors of this GaugeViewProperties. + :rtype: list[DashboardColor] + """ # noqa: E501 + return self._colors + + @colors.setter + def colors(self, colors): + """Set the colors of this GaugeViewProperties. + + Colors define color encoding of data into a visualization + + :param colors: The colors of this GaugeViewProperties. + :type: list[DashboardColor] + """ # noqa: E501 + if colors is None: + raise ValueError("Invalid value for `colors`, must not be `None`") # noqa: E501 + self._colors = colors + + @property + def shape(self): + """Get the shape of this GaugeViewProperties. + + :return: The shape of this GaugeViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this GaugeViewProperties. + + :param shape: The shape of this GaugeViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this GaugeViewProperties. + + :return: The note of this GaugeViewProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this GaugeViewProperties. + + :param note: The note of this GaugeViewProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + @property + def show_note_when_empty(self): + """Get the show_note_when_empty of this GaugeViewProperties. + + If true, will display note when empty + + :return: The show_note_when_empty of this GaugeViewProperties. + :rtype: bool + """ # noqa: E501 + return self._show_note_when_empty + + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty): + """Set the show_note_when_empty of this GaugeViewProperties. + + If true, will display note when empty + + :param show_note_when_empty: The show_note_when_empty of this GaugeViewProperties. + :type: bool + """ # noqa: E501 + if show_note_when_empty is None: + raise ValueError("Invalid value for `show_note_when_empty`, must not be `None`") # noqa: E501 + self._show_note_when_empty = show_note_when_empty + + @property + def prefix(self): + """Get the prefix of this GaugeViewProperties. + + :return: The prefix of this GaugeViewProperties. + :rtype: str + """ # noqa: E501 + return self._prefix + + @prefix.setter + def prefix(self, prefix): + """Set the prefix of this GaugeViewProperties. + + :param prefix: The prefix of this GaugeViewProperties. + :type: str + """ # noqa: E501 + if prefix is None: + raise ValueError("Invalid value for `prefix`, must not be `None`") # noqa: E501 + self._prefix = prefix + + @property + def tick_prefix(self): + """Get the tick_prefix of this GaugeViewProperties. + + :return: The tick_prefix of this GaugeViewProperties. + :rtype: str + """ # noqa: E501 + return self._tick_prefix + + @tick_prefix.setter + def tick_prefix(self, tick_prefix): + """Set the tick_prefix of this GaugeViewProperties. + + :param tick_prefix: The tick_prefix of this GaugeViewProperties. + :type: str + """ # noqa: E501 + if tick_prefix is None: + raise ValueError("Invalid value for `tick_prefix`, must not be `None`") # noqa: E501 + self._tick_prefix = tick_prefix + + @property + def suffix(self): + """Get the suffix of this GaugeViewProperties. + + :return: The suffix of this GaugeViewProperties. + :rtype: str + """ # noqa: E501 + return self._suffix + + @suffix.setter + def suffix(self, suffix): + """Set the suffix of this GaugeViewProperties. + + :param suffix: The suffix of this GaugeViewProperties. + :type: str + """ # noqa: E501 + if suffix is None: + raise ValueError("Invalid value for `suffix`, must not be `None`") # noqa: E501 + self._suffix = suffix + + @property + def tick_suffix(self): + """Get the tick_suffix of this GaugeViewProperties. + + :return: The tick_suffix of this GaugeViewProperties. + :rtype: str + """ # noqa: E501 + return self._tick_suffix + + @tick_suffix.setter + def tick_suffix(self, tick_suffix): + """Set the tick_suffix of this GaugeViewProperties. + + :param tick_suffix: The tick_suffix of this GaugeViewProperties. + :type: str + """ # noqa: E501 + if tick_suffix is None: + raise ValueError("Invalid value for `tick_suffix`, must not be `None`") # noqa: E501 + self._tick_suffix = tick_suffix + + @property + def decimal_places(self): + """Get the decimal_places of this GaugeViewProperties. + + :return: The decimal_places of this GaugeViewProperties. + :rtype: DecimalPlaces + """ # noqa: E501 + return self._decimal_places + + @decimal_places.setter + def decimal_places(self, decimal_places): + """Set the decimal_places of this GaugeViewProperties. + + :param decimal_places: The decimal_places of this GaugeViewProperties. + :type: DecimalPlaces + """ # noqa: E501 + if decimal_places is None: + raise ValueError("Invalid value for `decimal_places`, must not be `None`") # noqa: E501 + self._decimal_places = decimal_places + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, GaugeViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/greater_threshold.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/greater_threshold.py new file mode 100644 index 0000000..b857f13 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/greater_threshold.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.threshold_base import ThresholdBase + + +class GreaterThreshold(ThresholdBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'value': 'float', + 'level': 'CheckStatusLevel', + 'all_values': 'bool' + } + + attribute_map = { + 'type': 'type', + 'value': 'value', + 'level': 'level', + 'all_values': 'allValues' + } + + def __init__(self, type="greater", value=None, level=None, all_values=None): # noqa: E501,D401,D403 + """GreaterThreshold - a model defined in OpenAPI.""" # noqa: E501 + ThresholdBase.__init__(self, level=level, all_values=all_values) # noqa: E501 + + self._type = None + self._value = None + self.discriminator = None + + self.type = type + self.value = value + + @property + def type(self): + """Get the type of this GreaterThreshold. + + :return: The type of this GreaterThreshold. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this GreaterThreshold. + + :param type: The type of this GreaterThreshold. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def value(self): + """Get the value of this GreaterThreshold. + + :return: The value of this GreaterThreshold. + :rtype: float + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this GreaterThreshold. + + :param value: The value of this GreaterThreshold. + :type: float + """ # noqa: E501 + if value is None: + raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 + self._value = value + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, GreaterThreshold): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/health_check.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/health_check.py new file mode 100644 index 0000000..b355243 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/health_check.py @@ -0,0 +1,224 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class HealthCheck(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'message': 'str', + 'checks': 'list[HealthCheck]', + 'status': 'str', + 'version': 'str', + 'commit': 'str' + } + + attribute_map = { + 'name': 'name', + 'message': 'message', + 'checks': 'checks', + 'status': 'status', + 'version': 'version', + 'commit': 'commit' + } + + def __init__(self, name=None, message=None, checks=None, status=None, version=None, commit=None): # noqa: E501,D401,D403 + """HealthCheck - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._message = None + self._checks = None + self._status = None + self._version = None + self._commit = None + self.discriminator = None + + self.name = name + if message is not None: + self.message = message + if checks is not None: + self.checks = checks + self.status = status + if version is not None: + self.version = version + if commit is not None: + self.commit = commit + + @property + def name(self): + """Get the name of this HealthCheck. + + :return: The name of this HealthCheck. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this HealthCheck. + + :param name: The name of this HealthCheck. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def message(self): + """Get the message of this HealthCheck. + + :return: The message of this HealthCheck. + :rtype: str + """ # noqa: E501 + return self._message + + @message.setter + def message(self, message): + """Set the message of this HealthCheck. + + :param message: The message of this HealthCheck. + :type: str + """ # noqa: E501 + self._message = message + + @property + def checks(self): + """Get the checks of this HealthCheck. + + :return: The checks of this HealthCheck. + :rtype: list[HealthCheck] + """ # noqa: E501 + return self._checks + + @checks.setter + def checks(self, checks): + """Set the checks of this HealthCheck. + + :param checks: The checks of this HealthCheck. + :type: list[HealthCheck] + """ # noqa: E501 + self._checks = checks + + @property + def status(self): + """Get the status of this HealthCheck. + + :return: The status of this HealthCheck. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this HealthCheck. + + :param status: The status of this HealthCheck. + :type: str + """ # noqa: E501 + if status is None: + raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501 + self._status = status + + @property + def version(self): + """Get the version of this HealthCheck. + + :return: The version of this HealthCheck. + :rtype: str + """ # noqa: E501 + return self._version + + @version.setter + def version(self, version): + """Set the version of this HealthCheck. + + :param version: The version of this HealthCheck. + :type: str + """ # noqa: E501 + self._version = version + + @property + def commit(self): + """Get the commit of this HealthCheck. + + :return: The commit of this HealthCheck. + :rtype: str + """ # noqa: E501 + return self._commit + + @commit.setter + def commit(self, commit): + """Set the commit of this HealthCheck. + + :param commit: The commit of this HealthCheck. + :type: str + """ # noqa: E501 + self._commit = commit + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, HealthCheck): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/heatmap_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/heatmap_view_properties.py new file mode 100644 index 0000000..c4061b1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/heatmap_view_properties.py @@ -0,0 +1,826 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class HeatmapViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'adaptive_zoom_hide': 'bool', + 'time_format': 'str', + 'type': 'str', + 'queries': 'list[DashboardQuery]', + 'colors': 'list[str]', + 'shape': 'str', + 'note': 'str', + 'show_note_when_empty': 'bool', + 'x_column': 'str', + 'generate_x_axis_ticks': 'list[str]', + 'x_total_ticks': 'int', + 'x_tick_start': 'float', + 'x_tick_step': 'float', + 'y_column': 'str', + 'generate_y_axis_ticks': 'list[str]', + 'y_total_ticks': 'int', + 'y_tick_start': 'float', + 'y_tick_step': 'float', + 'x_domain': 'list[float]', + 'y_domain': 'list[float]', + 'x_axis_label': 'str', + 'y_axis_label': 'str', + 'x_prefix': 'str', + 'x_suffix': 'str', + 'y_prefix': 'str', + 'y_suffix': 'str', + 'bin_size': 'float', + 'legend_colorize_rows': 'bool', + 'legend_hide': 'bool', + 'legend_opacity': 'float', + 'legend_orientation_threshold': 'int' + } + + attribute_map = { + 'adaptive_zoom_hide': 'adaptiveZoomHide', + 'time_format': 'timeFormat', + 'type': 'type', + 'queries': 'queries', + 'colors': 'colors', + 'shape': 'shape', + 'note': 'note', + 'show_note_when_empty': 'showNoteWhenEmpty', + 'x_column': 'xColumn', + 'generate_x_axis_ticks': 'generateXAxisTicks', + 'x_total_ticks': 'xTotalTicks', + 'x_tick_start': 'xTickStart', + 'x_tick_step': 'xTickStep', + 'y_column': 'yColumn', + 'generate_y_axis_ticks': 'generateYAxisTicks', + 'y_total_ticks': 'yTotalTicks', + 'y_tick_start': 'yTickStart', + 'y_tick_step': 'yTickStep', + 'x_domain': 'xDomain', + 'y_domain': 'yDomain', + 'x_axis_label': 'xAxisLabel', + 'y_axis_label': 'yAxisLabel', + 'x_prefix': 'xPrefix', + 'x_suffix': 'xSuffix', + 'y_prefix': 'yPrefix', + 'y_suffix': 'ySuffix', + 'bin_size': 'binSize', + 'legend_colorize_rows': 'legendColorizeRows', + 'legend_hide': 'legendHide', + 'legend_opacity': 'legendOpacity', + 'legend_orientation_threshold': 'legendOrientationThreshold' + } + + def __init__(self, adaptive_zoom_hide=None, time_format=None, type=None, queries=None, colors=None, shape=None, note=None, show_note_when_empty=None, x_column=None, generate_x_axis_ticks=None, x_total_ticks=None, x_tick_start=None, x_tick_step=None, y_column=None, generate_y_axis_ticks=None, y_total_ticks=None, y_tick_start=None, y_tick_step=None, x_domain=None, y_domain=None, x_axis_label=None, y_axis_label=None, x_prefix=None, x_suffix=None, y_prefix=None, y_suffix=None, bin_size=None, legend_colorize_rows=None, legend_hide=None, legend_opacity=None, legend_orientation_threshold=None): # noqa: E501,D401,D403 + """HeatmapViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._adaptive_zoom_hide = None + self._time_format = None + self._type = None + self._queries = None + self._colors = None + self._shape = None + self._note = None + self._show_note_when_empty = None + self._x_column = None + self._generate_x_axis_ticks = None + self._x_total_ticks = None + self._x_tick_start = None + self._x_tick_step = None + self._y_column = None + self._generate_y_axis_ticks = None + self._y_total_ticks = None + self._y_tick_start = None + self._y_tick_step = None + self._x_domain = None + self._y_domain = None + self._x_axis_label = None + self._y_axis_label = None + self._x_prefix = None + self._x_suffix = None + self._y_prefix = None + self._y_suffix = None + self._bin_size = None + self._legend_colorize_rows = None + self._legend_hide = None + self._legend_opacity = None + self._legend_orientation_threshold = None + self.discriminator = None + + if adaptive_zoom_hide is not None: + self.adaptive_zoom_hide = adaptive_zoom_hide + if time_format is not None: + self.time_format = time_format + self.type = type + self.queries = queries + self.colors = colors + self.shape = shape + self.note = note + self.show_note_when_empty = show_note_when_empty + self.x_column = x_column + if generate_x_axis_ticks is not None: + self.generate_x_axis_ticks = generate_x_axis_ticks + if x_total_ticks is not None: + self.x_total_ticks = x_total_ticks + if x_tick_start is not None: + self.x_tick_start = x_tick_start + if x_tick_step is not None: + self.x_tick_step = x_tick_step + self.y_column = y_column + if generate_y_axis_ticks is not None: + self.generate_y_axis_ticks = generate_y_axis_ticks + if y_total_ticks is not None: + self.y_total_ticks = y_total_ticks + if y_tick_start is not None: + self.y_tick_start = y_tick_start + if y_tick_step is not None: + self.y_tick_step = y_tick_step + self.x_domain = x_domain + self.y_domain = y_domain + self.x_axis_label = x_axis_label + self.y_axis_label = y_axis_label + self.x_prefix = x_prefix + self.x_suffix = x_suffix + self.y_prefix = y_prefix + self.y_suffix = y_suffix + self.bin_size = bin_size + if legend_colorize_rows is not None: + self.legend_colorize_rows = legend_colorize_rows + if legend_hide is not None: + self.legend_hide = legend_hide + if legend_opacity is not None: + self.legend_opacity = legend_opacity + if legend_orientation_threshold is not None: + self.legend_orientation_threshold = legend_orientation_threshold + + @property + def adaptive_zoom_hide(self): + """Get the adaptive_zoom_hide of this HeatmapViewProperties. + + :return: The adaptive_zoom_hide of this HeatmapViewProperties. + :rtype: bool + """ # noqa: E501 + return self._adaptive_zoom_hide + + @adaptive_zoom_hide.setter + def adaptive_zoom_hide(self, adaptive_zoom_hide): + """Set the adaptive_zoom_hide of this HeatmapViewProperties. + + :param adaptive_zoom_hide: The adaptive_zoom_hide of this HeatmapViewProperties. + :type: bool + """ # noqa: E501 + self._adaptive_zoom_hide = adaptive_zoom_hide + + @property + def time_format(self): + """Get the time_format of this HeatmapViewProperties. + + :return: The time_format of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._time_format + + @time_format.setter + def time_format(self, time_format): + """Set the time_format of this HeatmapViewProperties. + + :param time_format: The time_format of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + self._time_format = time_format + + @property + def type(self): + """Get the type of this HeatmapViewProperties. + + :return: The type of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this HeatmapViewProperties. + + :param type: The type of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def queries(self): + """Get the queries of this HeatmapViewProperties. + + :return: The queries of this HeatmapViewProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this HeatmapViewProperties. + + :param queries: The queries of this HeatmapViewProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def colors(self): + """Get the colors of this HeatmapViewProperties. + + Colors define color encoding of data into a visualization + + :return: The colors of this HeatmapViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._colors + + @colors.setter + def colors(self, colors): + """Set the colors of this HeatmapViewProperties. + + Colors define color encoding of data into a visualization + + :param colors: The colors of this HeatmapViewProperties. + :type: list[str] + """ # noqa: E501 + if colors is None: + raise ValueError("Invalid value for `colors`, must not be `None`") # noqa: E501 + self._colors = colors + + @property + def shape(self): + """Get the shape of this HeatmapViewProperties. + + :return: The shape of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this HeatmapViewProperties. + + :param shape: The shape of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this HeatmapViewProperties. + + :return: The note of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this HeatmapViewProperties. + + :param note: The note of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + @property + def show_note_when_empty(self): + """Get the show_note_when_empty of this HeatmapViewProperties. + + If true, will display note when empty + + :return: The show_note_when_empty of this HeatmapViewProperties. + :rtype: bool + """ # noqa: E501 + return self._show_note_when_empty + + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty): + """Set the show_note_when_empty of this HeatmapViewProperties. + + If true, will display note when empty + + :param show_note_when_empty: The show_note_when_empty of this HeatmapViewProperties. + :type: bool + """ # noqa: E501 + if show_note_when_empty is None: + raise ValueError("Invalid value for `show_note_when_empty`, must not be `None`") # noqa: E501 + self._show_note_when_empty = show_note_when_empty + + @property + def x_column(self): + """Get the x_column of this HeatmapViewProperties. + + :return: The x_column of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_column + + @x_column.setter + def x_column(self, x_column): + """Set the x_column of this HeatmapViewProperties. + + :param x_column: The x_column of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + if x_column is None: + raise ValueError("Invalid value for `x_column`, must not be `None`") # noqa: E501 + self._x_column = x_column + + @property + def generate_x_axis_ticks(self): + """Get the generate_x_axis_ticks of this HeatmapViewProperties. + + :return: The generate_x_axis_ticks of this HeatmapViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._generate_x_axis_ticks + + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks): + """Set the generate_x_axis_ticks of this HeatmapViewProperties. + + :param generate_x_axis_ticks: The generate_x_axis_ticks of this HeatmapViewProperties. + :type: list[str] + """ # noqa: E501 + self._generate_x_axis_ticks = generate_x_axis_ticks + + @property + def x_total_ticks(self): + """Get the x_total_ticks of this HeatmapViewProperties. + + :return: The x_total_ticks of this HeatmapViewProperties. + :rtype: int + """ # noqa: E501 + return self._x_total_ticks + + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks): + """Set the x_total_ticks of this HeatmapViewProperties. + + :param x_total_ticks: The x_total_ticks of this HeatmapViewProperties. + :type: int + """ # noqa: E501 + self._x_total_ticks = x_total_ticks + + @property + def x_tick_start(self): + """Get the x_tick_start of this HeatmapViewProperties. + + :return: The x_tick_start of this HeatmapViewProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_start + + @x_tick_start.setter + def x_tick_start(self, x_tick_start): + """Set the x_tick_start of this HeatmapViewProperties. + + :param x_tick_start: The x_tick_start of this HeatmapViewProperties. + :type: float + """ # noqa: E501 + self._x_tick_start = x_tick_start + + @property + def x_tick_step(self): + """Get the x_tick_step of this HeatmapViewProperties. + + :return: The x_tick_step of this HeatmapViewProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_step + + @x_tick_step.setter + def x_tick_step(self, x_tick_step): + """Set the x_tick_step of this HeatmapViewProperties. + + :param x_tick_step: The x_tick_step of this HeatmapViewProperties. + :type: float + """ # noqa: E501 + self._x_tick_step = x_tick_step + + @property + def y_column(self): + """Get the y_column of this HeatmapViewProperties. + + :return: The y_column of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_column + + @y_column.setter + def y_column(self, y_column): + """Set the y_column of this HeatmapViewProperties. + + :param y_column: The y_column of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + if y_column is None: + raise ValueError("Invalid value for `y_column`, must not be `None`") # noqa: E501 + self._y_column = y_column + + @property + def generate_y_axis_ticks(self): + """Get the generate_y_axis_ticks of this HeatmapViewProperties. + + :return: The generate_y_axis_ticks of this HeatmapViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._generate_y_axis_ticks + + @generate_y_axis_ticks.setter + def generate_y_axis_ticks(self, generate_y_axis_ticks): + """Set the generate_y_axis_ticks of this HeatmapViewProperties. + + :param generate_y_axis_ticks: The generate_y_axis_ticks of this HeatmapViewProperties. + :type: list[str] + """ # noqa: E501 + self._generate_y_axis_ticks = generate_y_axis_ticks + + @property + def y_total_ticks(self): + """Get the y_total_ticks of this HeatmapViewProperties. + + :return: The y_total_ticks of this HeatmapViewProperties. + :rtype: int + """ # noqa: E501 + return self._y_total_ticks + + @y_total_ticks.setter + def y_total_ticks(self, y_total_ticks): + """Set the y_total_ticks of this HeatmapViewProperties. + + :param y_total_ticks: The y_total_ticks of this HeatmapViewProperties. + :type: int + """ # noqa: E501 + self._y_total_ticks = y_total_ticks + + @property + def y_tick_start(self): + """Get the y_tick_start of this HeatmapViewProperties. + + :return: The y_tick_start of this HeatmapViewProperties. + :rtype: float + """ # noqa: E501 + return self._y_tick_start + + @y_tick_start.setter + def y_tick_start(self, y_tick_start): + """Set the y_tick_start of this HeatmapViewProperties. + + :param y_tick_start: The y_tick_start of this HeatmapViewProperties. + :type: float + """ # noqa: E501 + self._y_tick_start = y_tick_start + + @property + def y_tick_step(self): + """Get the y_tick_step of this HeatmapViewProperties. + + :return: The y_tick_step of this HeatmapViewProperties. + :rtype: float + """ # noqa: E501 + return self._y_tick_step + + @y_tick_step.setter + def y_tick_step(self, y_tick_step): + """Set the y_tick_step of this HeatmapViewProperties. + + :param y_tick_step: The y_tick_step of this HeatmapViewProperties. + :type: float + """ # noqa: E501 + self._y_tick_step = y_tick_step + + @property + def x_domain(self): + """Get the x_domain of this HeatmapViewProperties. + + :return: The x_domain of this HeatmapViewProperties. + :rtype: list[float] + """ # noqa: E501 + return self._x_domain + + @x_domain.setter + def x_domain(self, x_domain): + """Set the x_domain of this HeatmapViewProperties. + + :param x_domain: The x_domain of this HeatmapViewProperties. + :type: list[float] + """ # noqa: E501 + if x_domain is None: + raise ValueError("Invalid value for `x_domain`, must not be `None`") # noqa: E501 + self._x_domain = x_domain + + @property + def y_domain(self): + """Get the y_domain of this HeatmapViewProperties. + + :return: The y_domain of this HeatmapViewProperties. + :rtype: list[float] + """ # noqa: E501 + return self._y_domain + + @y_domain.setter + def y_domain(self, y_domain): + """Set the y_domain of this HeatmapViewProperties. + + :param y_domain: The y_domain of this HeatmapViewProperties. + :type: list[float] + """ # noqa: E501 + if y_domain is None: + raise ValueError("Invalid value for `y_domain`, must not be `None`") # noqa: E501 + self._y_domain = y_domain + + @property + def x_axis_label(self): + """Get the x_axis_label of this HeatmapViewProperties. + + :return: The x_axis_label of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_axis_label + + @x_axis_label.setter + def x_axis_label(self, x_axis_label): + """Set the x_axis_label of this HeatmapViewProperties. + + :param x_axis_label: The x_axis_label of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + if x_axis_label is None: + raise ValueError("Invalid value for `x_axis_label`, must not be `None`") # noqa: E501 + self._x_axis_label = x_axis_label + + @property + def y_axis_label(self): + """Get the y_axis_label of this HeatmapViewProperties. + + :return: The y_axis_label of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_axis_label + + @y_axis_label.setter + def y_axis_label(self, y_axis_label): + """Set the y_axis_label of this HeatmapViewProperties. + + :param y_axis_label: The y_axis_label of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + if y_axis_label is None: + raise ValueError("Invalid value for `y_axis_label`, must not be `None`") # noqa: E501 + self._y_axis_label = y_axis_label + + @property + def x_prefix(self): + """Get the x_prefix of this HeatmapViewProperties. + + :return: The x_prefix of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_prefix + + @x_prefix.setter + def x_prefix(self, x_prefix): + """Set the x_prefix of this HeatmapViewProperties. + + :param x_prefix: The x_prefix of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + if x_prefix is None: + raise ValueError("Invalid value for `x_prefix`, must not be `None`") # noqa: E501 + self._x_prefix = x_prefix + + @property + def x_suffix(self): + """Get the x_suffix of this HeatmapViewProperties. + + :return: The x_suffix of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_suffix + + @x_suffix.setter + def x_suffix(self, x_suffix): + """Set the x_suffix of this HeatmapViewProperties. + + :param x_suffix: The x_suffix of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + if x_suffix is None: + raise ValueError("Invalid value for `x_suffix`, must not be `None`") # noqa: E501 + self._x_suffix = x_suffix + + @property + def y_prefix(self): + """Get the y_prefix of this HeatmapViewProperties. + + :return: The y_prefix of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_prefix + + @y_prefix.setter + def y_prefix(self, y_prefix): + """Set the y_prefix of this HeatmapViewProperties. + + :param y_prefix: The y_prefix of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + if y_prefix is None: + raise ValueError("Invalid value for `y_prefix`, must not be `None`") # noqa: E501 + self._y_prefix = y_prefix + + @property + def y_suffix(self): + """Get the y_suffix of this HeatmapViewProperties. + + :return: The y_suffix of this HeatmapViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_suffix + + @y_suffix.setter + def y_suffix(self, y_suffix): + """Set the y_suffix of this HeatmapViewProperties. + + :param y_suffix: The y_suffix of this HeatmapViewProperties. + :type: str + """ # noqa: E501 + if y_suffix is None: + raise ValueError("Invalid value for `y_suffix`, must not be `None`") # noqa: E501 + self._y_suffix = y_suffix + + @property + def bin_size(self): + """Get the bin_size of this HeatmapViewProperties. + + :return: The bin_size of this HeatmapViewProperties. + :rtype: float + """ # noqa: E501 + return self._bin_size + + @bin_size.setter + def bin_size(self, bin_size): + """Set the bin_size of this HeatmapViewProperties. + + :param bin_size: The bin_size of this HeatmapViewProperties. + :type: float + """ # noqa: E501 + if bin_size is None: + raise ValueError("Invalid value for `bin_size`, must not be `None`") # noqa: E501 + self._bin_size = bin_size + + @property + def legend_colorize_rows(self): + """Get the legend_colorize_rows of this HeatmapViewProperties. + + :return: The legend_colorize_rows of this HeatmapViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_colorize_rows + + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows): + """Set the legend_colorize_rows of this HeatmapViewProperties. + + :param legend_colorize_rows: The legend_colorize_rows of this HeatmapViewProperties. + :type: bool + """ # noqa: E501 + self._legend_colorize_rows = legend_colorize_rows + + @property + def legend_hide(self): + """Get the legend_hide of this HeatmapViewProperties. + + :return: The legend_hide of this HeatmapViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_hide + + @legend_hide.setter + def legend_hide(self, legend_hide): + """Set the legend_hide of this HeatmapViewProperties. + + :param legend_hide: The legend_hide of this HeatmapViewProperties. + :type: bool + """ # noqa: E501 + self._legend_hide = legend_hide + + @property + def legend_opacity(self): + """Get the legend_opacity of this HeatmapViewProperties. + + :return: The legend_opacity of this HeatmapViewProperties. + :rtype: float + """ # noqa: E501 + return self._legend_opacity + + @legend_opacity.setter + def legend_opacity(self, legend_opacity): + """Set the legend_opacity of this HeatmapViewProperties. + + :param legend_opacity: The legend_opacity of this HeatmapViewProperties. + :type: float + """ # noqa: E501 + self._legend_opacity = legend_opacity + + @property + def legend_orientation_threshold(self): + """Get the legend_orientation_threshold of this HeatmapViewProperties. + + :return: The legend_orientation_threshold of this HeatmapViewProperties. + :rtype: int + """ # noqa: E501 + return self._legend_orientation_threshold + + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold): + """Set the legend_orientation_threshold of this HeatmapViewProperties. + + :param legend_orientation_threshold: The legend_orientation_threshold of this HeatmapViewProperties. + :type: int + """ # noqa: E501 + self._legend_orientation_threshold = legend_orientation_threshold + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, HeatmapViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/histogram_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/histogram_view_properties.py new file mode 100644 index 0000000..7505743 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/histogram_view_properties.py @@ -0,0 +1,476 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class HistogramViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'queries': 'list[DashboardQuery]', + 'colors': 'list[DashboardColor]', + 'shape': 'str', + 'note': 'str', + 'show_note_when_empty': 'bool', + 'x_column': 'str', + 'fill_columns': 'list[str]', + 'x_domain': 'list[float]', + 'x_axis_label': 'str', + 'position': 'str', + 'bin_count': 'int', + 'legend_colorize_rows': 'bool', + 'legend_hide': 'bool', + 'legend_opacity': 'float', + 'legend_orientation_threshold': 'int' + } + + attribute_map = { + 'type': 'type', + 'queries': 'queries', + 'colors': 'colors', + 'shape': 'shape', + 'note': 'note', + 'show_note_when_empty': 'showNoteWhenEmpty', + 'x_column': 'xColumn', + 'fill_columns': 'fillColumns', + 'x_domain': 'xDomain', + 'x_axis_label': 'xAxisLabel', + 'position': 'position', + 'bin_count': 'binCount', + 'legend_colorize_rows': 'legendColorizeRows', + 'legend_hide': 'legendHide', + 'legend_opacity': 'legendOpacity', + 'legend_orientation_threshold': 'legendOrientationThreshold' + } + + def __init__(self, type=None, queries=None, colors=None, shape=None, note=None, show_note_when_empty=None, x_column=None, fill_columns=None, x_domain=None, x_axis_label=None, position=None, bin_count=None, legend_colorize_rows=None, legend_hide=None, legend_opacity=None, legend_orientation_threshold=None): # noqa: E501,D401,D403 + """HistogramViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._type = None + self._queries = None + self._colors = None + self._shape = None + self._note = None + self._show_note_when_empty = None + self._x_column = None + self._fill_columns = None + self._x_domain = None + self._x_axis_label = None + self._position = None + self._bin_count = None + self._legend_colorize_rows = None + self._legend_hide = None + self._legend_opacity = None + self._legend_orientation_threshold = None + self.discriminator = None + + self.type = type + self.queries = queries + self.colors = colors + self.shape = shape + self.note = note + self.show_note_when_empty = show_note_when_empty + self.x_column = x_column + self.fill_columns = fill_columns + self.x_domain = x_domain + self.x_axis_label = x_axis_label + self.position = position + self.bin_count = bin_count + if legend_colorize_rows is not None: + self.legend_colorize_rows = legend_colorize_rows + if legend_hide is not None: + self.legend_hide = legend_hide + if legend_opacity is not None: + self.legend_opacity = legend_opacity + if legend_orientation_threshold is not None: + self.legend_orientation_threshold = legend_orientation_threshold + + @property + def type(self): + """Get the type of this HistogramViewProperties. + + :return: The type of this HistogramViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this HistogramViewProperties. + + :param type: The type of this HistogramViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def queries(self): + """Get the queries of this HistogramViewProperties. + + :return: The queries of this HistogramViewProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this HistogramViewProperties. + + :param queries: The queries of this HistogramViewProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def colors(self): + """Get the colors of this HistogramViewProperties. + + Colors define color encoding of data into a visualization + + :return: The colors of this HistogramViewProperties. + :rtype: list[DashboardColor] + """ # noqa: E501 + return self._colors + + @colors.setter + def colors(self, colors): + """Set the colors of this HistogramViewProperties. + + Colors define color encoding of data into a visualization + + :param colors: The colors of this HistogramViewProperties. + :type: list[DashboardColor] + """ # noqa: E501 + if colors is None: + raise ValueError("Invalid value for `colors`, must not be `None`") # noqa: E501 + self._colors = colors + + @property + def shape(self): + """Get the shape of this HistogramViewProperties. + + :return: The shape of this HistogramViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this HistogramViewProperties. + + :param shape: The shape of this HistogramViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this HistogramViewProperties. + + :return: The note of this HistogramViewProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this HistogramViewProperties. + + :param note: The note of this HistogramViewProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + @property + def show_note_when_empty(self): + """Get the show_note_when_empty of this HistogramViewProperties. + + If true, will display note when empty + + :return: The show_note_when_empty of this HistogramViewProperties. + :rtype: bool + """ # noqa: E501 + return self._show_note_when_empty + + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty): + """Set the show_note_when_empty of this HistogramViewProperties. + + If true, will display note when empty + + :param show_note_when_empty: The show_note_when_empty of this HistogramViewProperties. + :type: bool + """ # noqa: E501 + if show_note_when_empty is None: + raise ValueError("Invalid value for `show_note_when_empty`, must not be `None`") # noqa: E501 + self._show_note_when_empty = show_note_when_empty + + @property + def x_column(self): + """Get the x_column of this HistogramViewProperties. + + :return: The x_column of this HistogramViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_column + + @x_column.setter + def x_column(self, x_column): + """Set the x_column of this HistogramViewProperties. + + :param x_column: The x_column of this HistogramViewProperties. + :type: str + """ # noqa: E501 + if x_column is None: + raise ValueError("Invalid value for `x_column`, must not be `None`") # noqa: E501 + self._x_column = x_column + + @property + def fill_columns(self): + """Get the fill_columns of this HistogramViewProperties. + + :return: The fill_columns of this HistogramViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._fill_columns + + @fill_columns.setter + def fill_columns(self, fill_columns): + """Set the fill_columns of this HistogramViewProperties. + + :param fill_columns: The fill_columns of this HistogramViewProperties. + :type: list[str] + """ # noqa: E501 + if fill_columns is None: + raise ValueError("Invalid value for `fill_columns`, must not be `None`") # noqa: E501 + self._fill_columns = fill_columns + + @property + def x_domain(self): + """Get the x_domain of this HistogramViewProperties. + + :return: The x_domain of this HistogramViewProperties. + :rtype: list[float] + """ # noqa: E501 + return self._x_domain + + @x_domain.setter + def x_domain(self, x_domain): + """Set the x_domain of this HistogramViewProperties. + + :param x_domain: The x_domain of this HistogramViewProperties. + :type: list[float] + """ # noqa: E501 + if x_domain is None: + raise ValueError("Invalid value for `x_domain`, must not be `None`") # noqa: E501 + self._x_domain = x_domain + + @property + def x_axis_label(self): + """Get the x_axis_label of this HistogramViewProperties. + + :return: The x_axis_label of this HistogramViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_axis_label + + @x_axis_label.setter + def x_axis_label(self, x_axis_label): + """Set the x_axis_label of this HistogramViewProperties. + + :param x_axis_label: The x_axis_label of this HistogramViewProperties. + :type: str + """ # noqa: E501 + if x_axis_label is None: + raise ValueError("Invalid value for `x_axis_label`, must not be `None`") # noqa: E501 + self._x_axis_label = x_axis_label + + @property + def position(self): + """Get the position of this HistogramViewProperties. + + :return: The position of this HistogramViewProperties. + :rtype: str + """ # noqa: E501 + return self._position + + @position.setter + def position(self, position): + """Set the position of this HistogramViewProperties. + + :param position: The position of this HistogramViewProperties. + :type: str + """ # noqa: E501 + if position is None: + raise ValueError("Invalid value for `position`, must not be `None`") # noqa: E501 + self._position = position + + @property + def bin_count(self): + """Get the bin_count of this HistogramViewProperties. + + :return: The bin_count of this HistogramViewProperties. + :rtype: int + """ # noqa: E501 + return self._bin_count + + @bin_count.setter + def bin_count(self, bin_count): + """Set the bin_count of this HistogramViewProperties. + + :param bin_count: The bin_count of this HistogramViewProperties. + :type: int + """ # noqa: E501 + if bin_count is None: + raise ValueError("Invalid value for `bin_count`, must not be `None`") # noqa: E501 + self._bin_count = bin_count + + @property + def legend_colorize_rows(self): + """Get the legend_colorize_rows of this HistogramViewProperties. + + :return: The legend_colorize_rows of this HistogramViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_colorize_rows + + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows): + """Set the legend_colorize_rows of this HistogramViewProperties. + + :param legend_colorize_rows: The legend_colorize_rows of this HistogramViewProperties. + :type: bool + """ # noqa: E501 + self._legend_colorize_rows = legend_colorize_rows + + @property + def legend_hide(self): + """Get the legend_hide of this HistogramViewProperties. + + :return: The legend_hide of this HistogramViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_hide + + @legend_hide.setter + def legend_hide(self, legend_hide): + """Set the legend_hide of this HistogramViewProperties. + + :param legend_hide: The legend_hide of this HistogramViewProperties. + :type: bool + """ # noqa: E501 + self._legend_hide = legend_hide + + @property + def legend_opacity(self): + """Get the legend_opacity of this HistogramViewProperties. + + :return: The legend_opacity of this HistogramViewProperties. + :rtype: float + """ # noqa: E501 + return self._legend_opacity + + @legend_opacity.setter + def legend_opacity(self, legend_opacity): + """Set the legend_opacity of this HistogramViewProperties. + + :param legend_opacity: The legend_opacity of this HistogramViewProperties. + :type: float + """ # noqa: E501 + self._legend_opacity = legend_opacity + + @property + def legend_orientation_threshold(self): + """Get the legend_orientation_threshold of this HistogramViewProperties. + + :return: The legend_orientation_threshold of this HistogramViewProperties. + :rtype: int + """ # noqa: E501 + return self._legend_orientation_threshold + + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold): + """Set the legend_orientation_threshold of this HistogramViewProperties. + + :param legend_orientation_threshold: The legend_orientation_threshold of this HistogramViewProperties. + :type: int + """ # noqa: E501 + self._legend_orientation_threshold = legend_orientation_threshold + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, HistogramViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/http_notification_endpoint.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/http_notification_endpoint.py new file mode 100644 index 0000000..d8a4885 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/http_notification_endpoint.py @@ -0,0 +1,301 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator + + +class HTTPNotificationEndpoint(NotificationEndpointDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'url': 'str', + 'username': 'str', + 'password': 'str', + 'token': 'str', + 'method': 'str', + 'auth_method': 'str', + 'content_template': 'str', + 'headers': 'dict(str, str)', + 'id': 'str', + 'org_id': 'str', + 'user_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'description': 'str', + 'name': 'str', + 'status': 'str', + 'labels': 'list[Label]', + 'links': 'NotificationEndpointBaseLinks', + 'type': 'NotificationEndpointType' + } + + attribute_map = { + 'url': 'url', + 'username': 'username', + 'password': 'password', + 'token': 'token', + 'method': 'method', + 'auth_method': 'authMethod', + 'content_template': 'contentTemplate', + 'headers': 'headers', + 'id': 'id', + 'org_id': 'orgID', + 'user_id': 'userID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'description': 'description', + 'name': 'name', + 'status': 'status', + 'labels': 'labels', + 'links': 'links', + 'type': 'type' + } + + def __init__(self, url=None, username=None, password=None, token=None, method=None, auth_method=None, content_template=None, headers=None, id=None, org_id=None, user_id=None, created_at=None, updated_at=None, description=None, name=None, status='active', labels=None, links=None, type="http"): # noqa: E501,D401,D403 + """HTTPNotificationEndpoint - a model defined in OpenAPI.""" # noqa: E501 + NotificationEndpointDiscriminator.__init__(self, id=id, org_id=org_id, user_id=user_id, created_at=created_at, updated_at=updated_at, description=description, name=name, status=status, labels=labels, links=links, type=type) # noqa: E501 + + self._url = None + self._username = None + self._password = None + self._token = None + self._method = None + self._auth_method = None + self._content_template = None + self._headers = None + self.discriminator = None + + self.url = url + if username is not None: + self.username = username + if password is not None: + self.password = password + if token is not None: + self.token = token + self.method = method + self.auth_method = auth_method + if content_template is not None: + self.content_template = content_template + if headers is not None: + self.headers = headers + + @property + def url(self): + """Get the url of this HTTPNotificationEndpoint. + + :return: The url of this HTTPNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._url + + @url.setter + def url(self, url): + """Set the url of this HTTPNotificationEndpoint. + + :param url: The url of this HTTPNotificationEndpoint. + :type: str + """ # noqa: E501 + if url is None: + raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501 + self._url = url + + @property + def username(self): + """Get the username of this HTTPNotificationEndpoint. + + :return: The username of this HTTPNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._username + + @username.setter + def username(self, username): + """Set the username of this HTTPNotificationEndpoint. + + :param username: The username of this HTTPNotificationEndpoint. + :type: str + """ # noqa: E501 + self._username = username + + @property + def password(self): + """Get the password of this HTTPNotificationEndpoint. + + :return: The password of this HTTPNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._password + + @password.setter + def password(self, password): + """Set the password of this HTTPNotificationEndpoint. + + :param password: The password of this HTTPNotificationEndpoint. + :type: str + """ # noqa: E501 + self._password = password + + @property + def token(self): + """Get the token of this HTTPNotificationEndpoint. + + :return: The token of this HTTPNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._token + + @token.setter + def token(self, token): + """Set the token of this HTTPNotificationEndpoint. + + :param token: The token of this HTTPNotificationEndpoint. + :type: str + """ # noqa: E501 + self._token = token + + @property + def method(self): + """Get the method of this HTTPNotificationEndpoint. + + :return: The method of this HTTPNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._method + + @method.setter + def method(self, method): + """Set the method of this HTTPNotificationEndpoint. + + :param method: The method of this HTTPNotificationEndpoint. + :type: str + """ # noqa: E501 + if method is None: + raise ValueError("Invalid value for `method`, must not be `None`") # noqa: E501 + self._method = method + + @property + def auth_method(self): + """Get the auth_method of this HTTPNotificationEndpoint. + + :return: The auth_method of this HTTPNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._auth_method + + @auth_method.setter + def auth_method(self, auth_method): + """Set the auth_method of this HTTPNotificationEndpoint. + + :param auth_method: The auth_method of this HTTPNotificationEndpoint. + :type: str + """ # noqa: E501 + if auth_method is None: + raise ValueError("Invalid value for `auth_method`, must not be `None`") # noqa: E501 + self._auth_method = auth_method + + @property + def content_template(self): + """Get the content_template of this HTTPNotificationEndpoint. + + :return: The content_template of this HTTPNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._content_template + + @content_template.setter + def content_template(self, content_template): + """Set the content_template of this HTTPNotificationEndpoint. + + :param content_template: The content_template of this HTTPNotificationEndpoint. + :type: str + """ # noqa: E501 + self._content_template = content_template + + @property + def headers(self): + """Get the headers of this HTTPNotificationEndpoint. + + Customized headers. + + :return: The headers of this HTTPNotificationEndpoint. + :rtype: dict(str, str) + """ # noqa: E501 + return self._headers + + @headers.setter + def headers(self, headers): + """Set the headers of this HTTPNotificationEndpoint. + + Customized headers. + + :param headers: The headers of this HTTPNotificationEndpoint. + :type: dict(str, str) + """ # noqa: E501 + self._headers = headers + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, HTTPNotificationEndpoint): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/http_notification_rule.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/http_notification_rule.py new file mode 100644 index 0000000..050b3fc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/http_notification_rule.py @@ -0,0 +1,136 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.http_notification_rule_base import HTTPNotificationRuleBase + + +class HTTPNotificationRule(HTTPNotificationRuleBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'url': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'url': 'url', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type="http", url=None, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """HTTPNotificationRule - a model defined in OpenAPI.""" # noqa: E501 + HTTPNotificationRuleBase.__init__(self, type=type, url=url, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, id=id, endpoint_id=endpoint_id, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, status=status, name=name, sleep_until=sleep_until, every=every, offset=offset, runbook_link=runbook_link, limit_every=limit_every, limit=limit, tag_rules=tag_rules, description=description, status_rules=status_rules, labels=labels, links=links) # noqa: E501 + self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, HTTPNotificationRule): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/http_notification_rule_base.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/http_notification_rule_base.py new file mode 100644 index 0000000..521f908 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/http_notification_rule_base.py @@ -0,0 +1,181 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator + + +class HTTPNotificationRuleBase(NotificationRuleDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'url': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'url': 'url', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type=None, url=None, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """HTTPNotificationRuleBase - a model defined in OpenAPI.""" # noqa: E501 + NotificationRuleDiscriminator.__init__(self, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, id=id, endpoint_id=endpoint_id, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, status=status, name=name, sleep_until=sleep_until, every=every, offset=offset, runbook_link=runbook_link, limit_every=limit_every, limit=limit, tag_rules=tag_rules, description=description, status_rules=status_rules, labels=labels, links=links) # noqa: E501 + + self._type = None + self._url = None + self.discriminator = None + + self.type = type + if url is not None: + self.url = url + + @property + def type(self): + """Get the type of this HTTPNotificationRuleBase. + + :return: The type of this HTTPNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this HTTPNotificationRuleBase. + + :param type: The type of this HTTPNotificationRuleBase. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def url(self): + """Get the url of this HTTPNotificationRuleBase. + + :return: The url of this HTTPNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._url + + @url.setter + def url(self, url): + """Set the url of this HTTPNotificationRuleBase. + + :param url: The url of this HTTPNotificationRuleBase. + :type: str + """ # noqa: E501 + self._url = url + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, HTTPNotificationRuleBase): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/identifier.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/identifier.py new file mode 100644 index 0000000..aa9a3fb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/identifier.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.property_key import PropertyKey + + +class Identifier(PropertyKey): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'name': 'str' + } + + attribute_map = { + 'type': 'type', + 'name': 'name' + } + + def __init__(self, type=None, name=None): # noqa: E501,D401,D403 + """Identifier - a model defined in OpenAPI.""" # noqa: E501 + PropertyKey.__init__(self) # noqa: E501 + + self._type = None + self._name = None + self.discriminator = None + + if type is not None: + self.type = type + if name is not None: + self.name = name + + @property + def type(self): + """Get the type of this Identifier. + + Type of AST node + + :return: The type of this Identifier. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this Identifier. + + Type of AST node + + :param type: The type of this Identifier. + :type: str + """ # noqa: E501 + self._type = type + + @property + def name(self): + """Get the name of this Identifier. + + :return: The name of this Identifier. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this Identifier. + + :param name: The name of this Identifier. + :type: str + """ # noqa: E501 + self._name = name + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Identifier): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/import_declaration.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/import_declaration.py new file mode 100644 index 0000000..435380c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/import_declaration.py @@ -0,0 +1,157 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ImportDeclaration(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + '_as': 'Identifier', + 'path': 'StringLiteral' + } + + attribute_map = { + 'type': 'type', + '_as': 'as', + 'path': 'path' + } + + def __init__(self, type=None, _as=None, path=None): # noqa: E501,D401,D403 + """ImportDeclaration - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self.__as = None + self._path = None + self.discriminator = None + + if type is not None: + self.type = type + if _as is not None: + self._as = _as + if path is not None: + self.path = path + + @property + def type(self): + """Get the type of this ImportDeclaration. + + Type of AST node + + :return: The type of this ImportDeclaration. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ImportDeclaration. + + Type of AST node + + :param type: The type of this ImportDeclaration. + :type: str + """ # noqa: E501 + self._type = type + + @property + def _as(self): + """Get the _as of this ImportDeclaration. + + :return: The _as of this ImportDeclaration. + :rtype: Identifier + """ # noqa: E501 + return self.__as + + @_as.setter + def _as(self, _as): + """Set the _as of this ImportDeclaration. + + :param _as: The _as of this ImportDeclaration. + :type: Identifier + """ # noqa: E501 + self.__as = _as + + @property + def path(self): + """Get the path of this ImportDeclaration. + + :return: The path of this ImportDeclaration. + :rtype: StringLiteral + """ # noqa: E501 + return self._path + + @path.setter + def path(self, path): + """Set the path of this ImportDeclaration. + + :param path: The path of this ImportDeclaration. + :type: StringLiteral + """ # noqa: E501 + self._path = path + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ImportDeclaration): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/index_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/index_expression.py new file mode 100644 index 0000000..4aedd73 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/index_expression.py @@ -0,0 +1,161 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class IndexExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'array': 'Expression', + 'index': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'array': 'array', + 'index': 'index' + } + + def __init__(self, type=None, array=None, index=None): # noqa: E501,D401,D403 + """IndexExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._array = None + self._index = None + self.discriminator = None + + if type is not None: + self.type = type + if array is not None: + self.array = array + if index is not None: + self.index = index + + @property + def type(self): + """Get the type of this IndexExpression. + + Type of AST node + + :return: The type of this IndexExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this IndexExpression. + + Type of AST node + + :param type: The type of this IndexExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def array(self): + """Get the array of this IndexExpression. + + :return: The array of this IndexExpression. + :rtype: Expression + """ # noqa: E501 + return self._array + + @array.setter + def array(self, array): + """Set the array of this IndexExpression. + + :param array: The array of this IndexExpression. + :type: Expression + """ # noqa: E501 + self._array = array + + @property + def index(self): + """Get the index of this IndexExpression. + + :return: The index of this IndexExpression. + :rtype: Expression + """ # noqa: E501 + return self._index + + @index.setter + def index(self, index): + """Set the index of this IndexExpression. + + :param index: The index of this IndexExpression. + :type: Expression + """ # noqa: E501 + self._index = index + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, IndexExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/integer_literal.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/integer_literal.py new file mode 100644 index 0000000..dcedf78 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/integer_literal.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class IntegerLiteral(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'value': 'str' + } + + attribute_map = { + 'type': 'type', + 'value': 'value' + } + + def __init__(self, type=None, value=None): # noqa: E501,D401,D403 + """IntegerLiteral - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._value = None + self.discriminator = None + + if type is not None: + self.type = type + if value is not None: + self.value = value + + @property + def type(self): + """Get the type of this IntegerLiteral. + + Type of AST node + + :return: The type of this IntegerLiteral. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this IntegerLiteral. + + Type of AST node + + :param type: The type of this IntegerLiteral. + :type: str + """ # noqa: E501 + self._type = type + + @property + def value(self): + """Get the value of this IntegerLiteral. + + :return: The value of this IntegerLiteral. + :rtype: str + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this IntegerLiteral. + + :param value: The value of this IntegerLiteral. + :type: str + """ # noqa: E501 + self._value = value + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, IntegerLiteral): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/is_onboarding.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/is_onboarding.py new file mode 100644 index 0000000..0e14abd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/is_onboarding.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class IsOnboarding(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'allowed': 'bool' + } + + attribute_map = { + 'allowed': 'allowed' + } + + def __init__(self, allowed=None): # noqa: E501,D401,D403 + """IsOnboarding - a model defined in OpenAPI.""" # noqa: E501 + self._allowed = None + self.discriminator = None + + if allowed is not None: + self.allowed = allowed + + @property + def allowed(self): + """Get the allowed of this IsOnboarding. + + If `true`, the InfluxDB instance hasn't had initial setup; `false` otherwise. + + :return: The allowed of this IsOnboarding. + :rtype: bool + """ # noqa: E501 + return self._allowed + + @allowed.setter + def allowed(self, allowed): + """Set the allowed of this IsOnboarding. + + If `true`, the InfluxDB instance hasn't had initial setup; `false` otherwise. + + :param allowed: The allowed of this IsOnboarding. + :type: bool + """ # noqa: E501 + self._allowed = allowed + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, IsOnboarding): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/label.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/label.py new file mode 100644 index 0000000..cceaab9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/label.py @@ -0,0 +1,180 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Label(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'org_id': 'str', + 'name': 'str', + 'properties': 'dict(str, str)' + } + + attribute_map = { + 'id': 'id', + 'org_id': 'orgID', + 'name': 'name', + 'properties': 'properties' + } + + def __init__(self, id=None, org_id=None, name=None, properties=None): # noqa: E501,D401,D403 + """Label - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._org_id = None + self._name = None + self._properties = None + self.discriminator = None + + if id is not None: + self.id = id + if org_id is not None: + self.org_id = org_id + if name is not None: + self.name = name + if properties is not None: + self.properties = properties + + @property + def id(self): + """Get the id of this Label. + + :return: The id of this Label. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Label. + + :param id: The id of this Label. + :type: str + """ # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this Label. + + :return: The org_id of this Label. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this Label. + + :param org_id: The org_id of this Label. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def name(self): + """Get the name of this Label. + + :return: The name of this Label. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this Label. + + :param name: The name of this Label. + :type: str + """ # noqa: E501 + self._name = name + + @property + def properties(self): + """Get the properties of this Label. + + Key-value pairs associated with this label. To remove a property, send an update with an empty value (`""`) for the key. + + :return: The properties of this Label. + :rtype: dict(str, str) + """ # noqa: E501 + return self._properties + + @properties.setter + def properties(self, properties): + """Set the properties of this Label. + + Key-value pairs associated with this label. To remove a property, send an update with an empty value (`""`) for the key. + + :param properties: The properties of this Label. + :type: dict(str, str) + """ # noqa: E501 + self._properties = properties + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Label): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_create_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_create_request.py new file mode 100644 index 0000000..73259ee --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_create_request.py @@ -0,0 +1,159 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class LabelCreateRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'org_id': 'str', + 'name': 'str', + 'properties': 'dict(str, str)' + } + + attribute_map = { + 'org_id': 'orgID', + 'name': 'name', + 'properties': 'properties' + } + + def __init__(self, org_id=None, name=None, properties=None): # noqa: E501,D401,D403 + """LabelCreateRequest - a model defined in OpenAPI.""" # noqa: E501 + self._org_id = None + self._name = None + self._properties = None + self.discriminator = None + + self.org_id = org_id + self.name = name + if properties is not None: + self.properties = properties + + @property + def org_id(self): + """Get the org_id of this LabelCreateRequest. + + :return: The org_id of this LabelCreateRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this LabelCreateRequest. + + :param org_id: The org_id of this LabelCreateRequest. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def name(self): + """Get the name of this LabelCreateRequest. + + :return: The name of this LabelCreateRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this LabelCreateRequest. + + :param name: The name of this LabelCreateRequest. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def properties(self): + """Get the properties of this LabelCreateRequest. + + Key-value pairs associated with this label. To remove a property, send an update with an empty value (`""`) for the key. + + :return: The properties of this LabelCreateRequest. + :rtype: dict(str, str) + """ # noqa: E501 + return self._properties + + @properties.setter + def properties(self, properties): + """Set the properties of this LabelCreateRequest. + + Key-value pairs associated with this label. To remove a property, send an update with an empty value (`""`) for the key. + + :param properties: The properties of this LabelCreateRequest. + :type: dict(str, str) + """ # noqa: E501 + self._properties = properties + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LabelCreateRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_mapping.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_mapping.py new file mode 100644 index 0000000..909d1b3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_mapping.py @@ -0,0 +1,112 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class LabelMapping(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'label_id': 'str' + } + + attribute_map = { + 'label_id': 'labelID' + } + + def __init__(self, label_id=None): # noqa: E501,D401,D403 + """LabelMapping - a model defined in OpenAPI.""" # noqa: E501 + self._label_id = None + self.discriminator = None + + self.label_id = label_id + + @property + def label_id(self): + """Get the label_id of this LabelMapping. + + A label ID. Specifies the label to attach. + + :return: The label_id of this LabelMapping. + :rtype: str + """ # noqa: E501 + return self._label_id + + @label_id.setter + def label_id(self, label_id): + """Set the label_id of this LabelMapping. + + A label ID. Specifies the label to attach. + + :param label_id: The label_id of this LabelMapping. + :type: str + """ # noqa: E501 + if label_id is None: + raise ValueError("Invalid value for `label_id`, must not be `None`") # noqa: E501 + self._label_id = label_id + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LabelMapping): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_response.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_response.py new file mode 100644 index 0000000..8eed840 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_response.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class LabelResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'label': 'Label', + 'links': 'Links' + } + + attribute_map = { + 'label': 'label', + 'links': 'links' + } + + def __init__(self, label=None, links=None): # noqa: E501,D401,D403 + """LabelResponse - a model defined in OpenAPI.""" # noqa: E501 + self._label = None + self._links = None + self.discriminator = None + + if label is not None: + self.label = label + if links is not None: + self.links = links + + @property + def label(self): + """Get the label of this LabelResponse. + + :return: The label of this LabelResponse. + :rtype: Label + """ # noqa: E501 + return self._label + + @label.setter + def label(self, label): + """Set the label of this LabelResponse. + + :param label: The label of this LabelResponse. + :type: Label + """ # noqa: E501 + self._label = label + + @property + def links(self): + """Get the links of this LabelResponse. + + :return: The links of this LabelResponse. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this LabelResponse. + + :param links: The links of this LabelResponse. + :type: Links + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LabelResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_update.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_update.py new file mode 100644 index 0000000..c1a7c7e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/label_update.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class LabelUpdate(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'properties': 'dict(str, str)' + } + + attribute_map = { + 'name': 'name', + 'properties': 'properties' + } + + def __init__(self, name=None, properties=None): # noqa: E501,D401,D403 + """LabelUpdate - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._properties = None + self.discriminator = None + + if name is not None: + self.name = name + if properties is not None: + self.properties = properties + + @property + def name(self): + """Get the name of this LabelUpdate. + + :return: The name of this LabelUpdate. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this LabelUpdate. + + :param name: The name of this LabelUpdate. + :type: str + """ # noqa: E501 + self._name = name + + @property + def properties(self): + """Get the properties of this LabelUpdate. + + :return: The properties of this LabelUpdate. + :rtype: dict(str, str) + """ # noqa: E501 + return self._properties + + @properties.setter + def properties(self, properties): + """Set the properties of this LabelUpdate. + + :param properties: The properties of this LabelUpdate. + :type: dict(str, str) + """ # noqa: E501 + self._properties = properties + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LabelUpdate): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/labels_response.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/labels_response.py new file mode 100644 index 0000000..fee2fdc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/labels_response.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class LabelsResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'labels': 'list[Label]', + 'links': 'Links' + } + + attribute_map = { + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, labels=None, links=None): # noqa: E501,D401,D403 + """LabelsResponse - a model defined in OpenAPI.""" # noqa: E501 + self._labels = None + self._links = None + self.discriminator = None + + if labels is not None: + self.labels = labels + if links is not None: + self.links = links + + @property + def labels(self): + """Get the labels of this LabelsResponse. + + :return: The labels of this LabelsResponse. + :rtype: list[Label] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this LabelsResponse. + + :param labels: The labels of this LabelsResponse. + :type: list[Label] + """ # noqa: E501 + self._labels = labels + + @property + def links(self): + """Get the links of this LabelsResponse. + + :return: The links of this LabelsResponse. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this LabelsResponse. + + :param links: The links of this LabelsResponse. + :type: Links + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LabelsResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/language_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/language_request.py new file mode 100644 index 0000000..083e70a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/language_request.py @@ -0,0 +1,112 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class LanguageRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'query': 'str' + } + + attribute_map = { + 'query': 'query' + } + + def __init__(self, query=None): # noqa: E501,D401,D403 + """LanguageRequest - a model defined in OpenAPI.""" # noqa: E501 + self._query = None + self.discriminator = None + + self.query = query + + @property + def query(self): + """Get the query of this LanguageRequest. + + The Flux query script to be analyzed. + + :return: The query of this LanguageRequest. + :rtype: str + """ # noqa: E501 + return self._query + + @query.setter + def query(self, query): + """Set the query of this LanguageRequest. + + The Flux query script to be analyzed. + + :param query: The query of this LanguageRequest. + :type: str + """ # noqa: E501 + if query is None: + raise ValueError("Invalid value for `query`, must not be `None`") # noqa: E501 + self._query = query + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LanguageRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/legacy_authorization_post_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/legacy_authorization_post_request.py new file mode 100644 index 0000000..2976671 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/legacy_authorization_post_request.py @@ -0,0 +1,200 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest + + +class LegacyAuthorizationPostRequest(AuthorizationUpdateRequest): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'org_id': 'str', + 'user_id': 'str', + 'token': 'str', + 'permissions': 'list[Permission]', + 'status': 'str', + 'description': 'str' + } + + attribute_map = { + 'org_id': 'orgID', + 'user_id': 'userID', + 'token': 'token', + 'permissions': 'permissions', + 'status': 'status', + 'description': 'description' + } + + def __init__(self, org_id=None, user_id=None, token=None, permissions=None, status='active', description=None): # noqa: E501,D401,D403 + """LegacyAuthorizationPostRequest - a model defined in OpenAPI.""" # noqa: E501 + AuthorizationUpdateRequest.__init__(self, status=status, description=description) # noqa: E501 + + self._org_id = None + self._user_id = None + self._token = None + self._permissions = None + self.discriminator = None + + if org_id is not None: + self.org_id = org_id + if user_id is not None: + self.user_id = user_id + if token is not None: + self.token = token + if permissions is not None: + self.permissions = permissions + + @property + def org_id(self): + """Get the org_id of this LegacyAuthorizationPostRequest. + + ID of org that authorization is scoped to. + + :return: The org_id of this LegacyAuthorizationPostRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this LegacyAuthorizationPostRequest. + + ID of org that authorization is scoped to. + + :param org_id: The org_id of this LegacyAuthorizationPostRequest. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def user_id(self): + """Get the user_id of this LegacyAuthorizationPostRequest. + + ID of user that authorization is scoped to. + + :return: The user_id of this LegacyAuthorizationPostRequest. + :rtype: str + """ # noqa: E501 + return self._user_id + + @user_id.setter + def user_id(self, user_id): + """Set the user_id of this LegacyAuthorizationPostRequest. + + ID of user that authorization is scoped to. + + :param user_id: The user_id of this LegacyAuthorizationPostRequest. + :type: str + """ # noqa: E501 + self._user_id = user_id + + @property + def token(self): + """Get the token of this LegacyAuthorizationPostRequest. + + Token (name) of the authorization + + :return: The token of this LegacyAuthorizationPostRequest. + :rtype: str + """ # noqa: E501 + return self._token + + @token.setter + def token(self, token): + """Set the token of this LegacyAuthorizationPostRequest. + + Token (name) of the authorization + + :param token: The token of this LegacyAuthorizationPostRequest. + :type: str + """ # noqa: E501 + self._token = token + + @property + def permissions(self): + """Get the permissions of this LegacyAuthorizationPostRequest. + + List of permissions for an auth. An auth must have at least one Permission. + + :return: The permissions of this LegacyAuthorizationPostRequest. + :rtype: list[Permission] + """ # noqa: E501 + return self._permissions + + @permissions.setter + def permissions(self, permissions): + """Set the permissions of this LegacyAuthorizationPostRequest. + + List of permissions for an auth. An auth must have at least one Permission. + + :param permissions: The permissions of this LegacyAuthorizationPostRequest. + :type: list[Permission] + """ # noqa: E501 + self._permissions = permissions + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LegacyAuthorizationPostRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/lesser_threshold.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/lesser_threshold.py new file mode 100644 index 0000000..a75a19e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/lesser_threshold.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.threshold_base import ThresholdBase + + +class LesserThreshold(ThresholdBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'value': 'float', + 'level': 'CheckStatusLevel', + 'all_values': 'bool' + } + + attribute_map = { + 'type': 'type', + 'value': 'value', + 'level': 'level', + 'all_values': 'allValues' + } + + def __init__(self, type="lesser", value=None, level=None, all_values=None): # noqa: E501,D401,D403 + """LesserThreshold - a model defined in OpenAPI.""" # noqa: E501 + ThresholdBase.__init__(self, level=level, all_values=all_values) # noqa: E501 + + self._type = None + self._value = None + self.discriminator = None + + self.type = type + self.value = value + + @property + def type(self): + """Get the type of this LesserThreshold. + + :return: The type of this LesserThreshold. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this LesserThreshold. + + :param type: The type of this LesserThreshold. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def value(self): + """Get the value of this LesserThreshold. + + :return: The value of this LesserThreshold. + :rtype: float + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this LesserThreshold. + + :param value: The value of this LesserThreshold. + :type: float + """ # noqa: E501 + if value is None: + raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 + self._value = value + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LesserThreshold): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/line_plus_single_stat_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/line_plus_single_stat_properties.py new file mode 100644 index 0000000..5196ba0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/line_plus_single_stat_properties.py @@ -0,0 +1,797 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class LinePlusSingleStatProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'adaptive_zoom_hide': 'bool', + 'time_format': 'str', + 'type': 'str', + 'queries': 'list[DashboardQuery]', + 'colors': 'list[DashboardColor]', + 'shape': 'str', + 'note': 'str', + 'show_note_when_empty': 'bool', + 'axes': 'Axes', + 'static_legend': 'StaticLegend', + 'x_column': 'str', + 'generate_x_axis_ticks': 'list[str]', + 'x_total_ticks': 'int', + 'x_tick_start': 'float', + 'x_tick_step': 'float', + 'y_column': 'str', + 'generate_y_axis_ticks': 'list[str]', + 'y_total_ticks': 'int', + 'y_tick_start': 'float', + 'y_tick_step': 'float', + 'shade_below': 'bool', + 'hover_dimension': 'str', + 'position': 'str', + 'prefix': 'str', + 'suffix': 'str', + 'decimal_places': 'DecimalPlaces', + 'legend_colorize_rows': 'bool', + 'legend_hide': 'bool', + 'legend_opacity': 'float', + 'legend_orientation_threshold': 'int' + } + + attribute_map = { + 'adaptive_zoom_hide': 'adaptiveZoomHide', + 'time_format': 'timeFormat', + 'type': 'type', + 'queries': 'queries', + 'colors': 'colors', + 'shape': 'shape', + 'note': 'note', + 'show_note_when_empty': 'showNoteWhenEmpty', + 'axes': 'axes', + 'static_legend': 'staticLegend', + 'x_column': 'xColumn', + 'generate_x_axis_ticks': 'generateXAxisTicks', + 'x_total_ticks': 'xTotalTicks', + 'x_tick_start': 'xTickStart', + 'x_tick_step': 'xTickStep', + 'y_column': 'yColumn', + 'generate_y_axis_ticks': 'generateYAxisTicks', + 'y_total_ticks': 'yTotalTicks', + 'y_tick_start': 'yTickStart', + 'y_tick_step': 'yTickStep', + 'shade_below': 'shadeBelow', + 'hover_dimension': 'hoverDimension', + 'position': 'position', + 'prefix': 'prefix', + 'suffix': 'suffix', + 'decimal_places': 'decimalPlaces', + 'legend_colorize_rows': 'legendColorizeRows', + 'legend_hide': 'legendHide', + 'legend_opacity': 'legendOpacity', + 'legend_orientation_threshold': 'legendOrientationThreshold' + } + + def __init__(self, adaptive_zoom_hide=None, time_format=None, type=None, queries=None, colors=None, shape=None, note=None, show_note_when_empty=None, axes=None, static_legend=None, x_column=None, generate_x_axis_ticks=None, x_total_ticks=None, x_tick_start=None, x_tick_step=None, y_column=None, generate_y_axis_ticks=None, y_total_ticks=None, y_tick_start=None, y_tick_step=None, shade_below=None, hover_dimension=None, position=None, prefix=None, suffix=None, decimal_places=None, legend_colorize_rows=None, legend_hide=None, legend_opacity=None, legend_orientation_threshold=None): # noqa: E501,D401,D403 + """LinePlusSingleStatProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._adaptive_zoom_hide = None + self._time_format = None + self._type = None + self._queries = None + self._colors = None + self._shape = None + self._note = None + self._show_note_when_empty = None + self._axes = None + self._static_legend = None + self._x_column = None + self._generate_x_axis_ticks = None + self._x_total_ticks = None + self._x_tick_start = None + self._x_tick_step = None + self._y_column = None + self._generate_y_axis_ticks = None + self._y_total_ticks = None + self._y_tick_start = None + self._y_tick_step = None + self._shade_below = None + self._hover_dimension = None + self._position = None + self._prefix = None + self._suffix = None + self._decimal_places = None + self._legend_colorize_rows = None + self._legend_hide = None + self._legend_opacity = None + self._legend_orientation_threshold = None + self.discriminator = None + + if adaptive_zoom_hide is not None: + self.adaptive_zoom_hide = adaptive_zoom_hide + if time_format is not None: + self.time_format = time_format + self.type = type + self.queries = queries + self.colors = colors + self.shape = shape + self.note = note + self.show_note_when_empty = show_note_when_empty + self.axes = axes + if static_legend is not None: + self.static_legend = static_legend + if x_column is not None: + self.x_column = x_column + if generate_x_axis_ticks is not None: + self.generate_x_axis_ticks = generate_x_axis_ticks + if x_total_ticks is not None: + self.x_total_ticks = x_total_ticks + if x_tick_start is not None: + self.x_tick_start = x_tick_start + if x_tick_step is not None: + self.x_tick_step = x_tick_step + if y_column is not None: + self.y_column = y_column + if generate_y_axis_ticks is not None: + self.generate_y_axis_ticks = generate_y_axis_ticks + if y_total_ticks is not None: + self.y_total_ticks = y_total_ticks + if y_tick_start is not None: + self.y_tick_start = y_tick_start + if y_tick_step is not None: + self.y_tick_step = y_tick_step + if shade_below is not None: + self.shade_below = shade_below + if hover_dimension is not None: + self.hover_dimension = hover_dimension + self.position = position + self.prefix = prefix + self.suffix = suffix + self.decimal_places = decimal_places + if legend_colorize_rows is not None: + self.legend_colorize_rows = legend_colorize_rows + if legend_hide is not None: + self.legend_hide = legend_hide + if legend_opacity is not None: + self.legend_opacity = legend_opacity + if legend_orientation_threshold is not None: + self.legend_orientation_threshold = legend_orientation_threshold + + @property + def adaptive_zoom_hide(self): + """Get the adaptive_zoom_hide of this LinePlusSingleStatProperties. + + :return: The adaptive_zoom_hide of this LinePlusSingleStatProperties. + :rtype: bool + """ # noqa: E501 + return self._adaptive_zoom_hide + + @adaptive_zoom_hide.setter + def adaptive_zoom_hide(self, adaptive_zoom_hide): + """Set the adaptive_zoom_hide of this LinePlusSingleStatProperties. + + :param adaptive_zoom_hide: The adaptive_zoom_hide of this LinePlusSingleStatProperties. + :type: bool + """ # noqa: E501 + self._adaptive_zoom_hide = adaptive_zoom_hide + + @property + def time_format(self): + """Get the time_format of this LinePlusSingleStatProperties. + + :return: The time_format of this LinePlusSingleStatProperties. + :rtype: str + """ # noqa: E501 + return self._time_format + + @time_format.setter + def time_format(self, time_format): + """Set the time_format of this LinePlusSingleStatProperties. + + :param time_format: The time_format of this LinePlusSingleStatProperties. + :type: str + """ # noqa: E501 + self._time_format = time_format + + @property + def type(self): + """Get the type of this LinePlusSingleStatProperties. + + :return: The type of this LinePlusSingleStatProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this LinePlusSingleStatProperties. + + :param type: The type of this LinePlusSingleStatProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def queries(self): + """Get the queries of this LinePlusSingleStatProperties. + + :return: The queries of this LinePlusSingleStatProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this LinePlusSingleStatProperties. + + :param queries: The queries of this LinePlusSingleStatProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def colors(self): + """Get the colors of this LinePlusSingleStatProperties. + + Colors define color encoding of data into a visualization + + :return: The colors of this LinePlusSingleStatProperties. + :rtype: list[DashboardColor] + """ # noqa: E501 + return self._colors + + @colors.setter + def colors(self, colors): + """Set the colors of this LinePlusSingleStatProperties. + + Colors define color encoding of data into a visualization + + :param colors: The colors of this LinePlusSingleStatProperties. + :type: list[DashboardColor] + """ # noqa: E501 + if colors is None: + raise ValueError("Invalid value for `colors`, must not be `None`") # noqa: E501 + self._colors = colors + + @property + def shape(self): + """Get the shape of this LinePlusSingleStatProperties. + + :return: The shape of this LinePlusSingleStatProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this LinePlusSingleStatProperties. + + :param shape: The shape of this LinePlusSingleStatProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this LinePlusSingleStatProperties. + + :return: The note of this LinePlusSingleStatProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this LinePlusSingleStatProperties. + + :param note: The note of this LinePlusSingleStatProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + @property + def show_note_when_empty(self): + """Get the show_note_when_empty of this LinePlusSingleStatProperties. + + If true, will display note when empty + + :return: The show_note_when_empty of this LinePlusSingleStatProperties. + :rtype: bool + """ # noqa: E501 + return self._show_note_when_empty + + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty): + """Set the show_note_when_empty of this LinePlusSingleStatProperties. + + If true, will display note when empty + + :param show_note_when_empty: The show_note_when_empty of this LinePlusSingleStatProperties. + :type: bool + """ # noqa: E501 + if show_note_when_empty is None: + raise ValueError("Invalid value for `show_note_when_empty`, must not be `None`") # noqa: E501 + self._show_note_when_empty = show_note_when_empty + + @property + def axes(self): + """Get the axes of this LinePlusSingleStatProperties. + + :return: The axes of this LinePlusSingleStatProperties. + :rtype: Axes + """ # noqa: E501 + return self._axes + + @axes.setter + def axes(self, axes): + """Set the axes of this LinePlusSingleStatProperties. + + :param axes: The axes of this LinePlusSingleStatProperties. + :type: Axes + """ # noqa: E501 + if axes is None: + raise ValueError("Invalid value for `axes`, must not be `None`") # noqa: E501 + self._axes = axes + + @property + def static_legend(self): + """Get the static_legend of this LinePlusSingleStatProperties. + + :return: The static_legend of this LinePlusSingleStatProperties. + :rtype: StaticLegend + """ # noqa: E501 + return self._static_legend + + @static_legend.setter + def static_legend(self, static_legend): + """Set the static_legend of this LinePlusSingleStatProperties. + + :param static_legend: The static_legend of this LinePlusSingleStatProperties. + :type: StaticLegend + """ # noqa: E501 + self._static_legend = static_legend + + @property + def x_column(self): + """Get the x_column of this LinePlusSingleStatProperties. + + :return: The x_column of this LinePlusSingleStatProperties. + :rtype: str + """ # noqa: E501 + return self._x_column + + @x_column.setter + def x_column(self, x_column): + """Set the x_column of this LinePlusSingleStatProperties. + + :param x_column: The x_column of this LinePlusSingleStatProperties. + :type: str + """ # noqa: E501 + self._x_column = x_column + + @property + def generate_x_axis_ticks(self): + """Get the generate_x_axis_ticks of this LinePlusSingleStatProperties. + + :return: The generate_x_axis_ticks of this LinePlusSingleStatProperties. + :rtype: list[str] + """ # noqa: E501 + return self._generate_x_axis_ticks + + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks): + """Set the generate_x_axis_ticks of this LinePlusSingleStatProperties. + + :param generate_x_axis_ticks: The generate_x_axis_ticks of this LinePlusSingleStatProperties. + :type: list[str] + """ # noqa: E501 + self._generate_x_axis_ticks = generate_x_axis_ticks + + @property + def x_total_ticks(self): + """Get the x_total_ticks of this LinePlusSingleStatProperties. + + :return: The x_total_ticks of this LinePlusSingleStatProperties. + :rtype: int + """ # noqa: E501 + return self._x_total_ticks + + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks): + """Set the x_total_ticks of this LinePlusSingleStatProperties. + + :param x_total_ticks: The x_total_ticks of this LinePlusSingleStatProperties. + :type: int + """ # noqa: E501 + self._x_total_ticks = x_total_ticks + + @property + def x_tick_start(self): + """Get the x_tick_start of this LinePlusSingleStatProperties. + + :return: The x_tick_start of this LinePlusSingleStatProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_start + + @x_tick_start.setter + def x_tick_start(self, x_tick_start): + """Set the x_tick_start of this LinePlusSingleStatProperties. + + :param x_tick_start: The x_tick_start of this LinePlusSingleStatProperties. + :type: float + """ # noqa: E501 + self._x_tick_start = x_tick_start + + @property + def x_tick_step(self): + """Get the x_tick_step of this LinePlusSingleStatProperties. + + :return: The x_tick_step of this LinePlusSingleStatProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_step + + @x_tick_step.setter + def x_tick_step(self, x_tick_step): + """Set the x_tick_step of this LinePlusSingleStatProperties. + + :param x_tick_step: The x_tick_step of this LinePlusSingleStatProperties. + :type: float + """ # noqa: E501 + self._x_tick_step = x_tick_step + + @property + def y_column(self): + """Get the y_column of this LinePlusSingleStatProperties. + + :return: The y_column of this LinePlusSingleStatProperties. + :rtype: str + """ # noqa: E501 + return self._y_column + + @y_column.setter + def y_column(self, y_column): + """Set the y_column of this LinePlusSingleStatProperties. + + :param y_column: The y_column of this LinePlusSingleStatProperties. + :type: str + """ # noqa: E501 + self._y_column = y_column + + @property + def generate_y_axis_ticks(self): + """Get the generate_y_axis_ticks of this LinePlusSingleStatProperties. + + :return: The generate_y_axis_ticks of this LinePlusSingleStatProperties. + :rtype: list[str] + """ # noqa: E501 + return self._generate_y_axis_ticks + + @generate_y_axis_ticks.setter + def generate_y_axis_ticks(self, generate_y_axis_ticks): + """Set the generate_y_axis_ticks of this LinePlusSingleStatProperties. + + :param generate_y_axis_ticks: The generate_y_axis_ticks of this LinePlusSingleStatProperties. + :type: list[str] + """ # noqa: E501 + self._generate_y_axis_ticks = generate_y_axis_ticks + + @property + def y_total_ticks(self): + """Get the y_total_ticks of this LinePlusSingleStatProperties. + + :return: The y_total_ticks of this LinePlusSingleStatProperties. + :rtype: int + """ # noqa: E501 + return self._y_total_ticks + + @y_total_ticks.setter + def y_total_ticks(self, y_total_ticks): + """Set the y_total_ticks of this LinePlusSingleStatProperties. + + :param y_total_ticks: The y_total_ticks of this LinePlusSingleStatProperties. + :type: int + """ # noqa: E501 + self._y_total_ticks = y_total_ticks + + @property + def y_tick_start(self): + """Get the y_tick_start of this LinePlusSingleStatProperties. + + :return: The y_tick_start of this LinePlusSingleStatProperties. + :rtype: float + """ # noqa: E501 + return self._y_tick_start + + @y_tick_start.setter + def y_tick_start(self, y_tick_start): + """Set the y_tick_start of this LinePlusSingleStatProperties. + + :param y_tick_start: The y_tick_start of this LinePlusSingleStatProperties. + :type: float + """ # noqa: E501 + self._y_tick_start = y_tick_start + + @property + def y_tick_step(self): + """Get the y_tick_step of this LinePlusSingleStatProperties. + + :return: The y_tick_step of this LinePlusSingleStatProperties. + :rtype: float + """ # noqa: E501 + return self._y_tick_step + + @y_tick_step.setter + def y_tick_step(self, y_tick_step): + """Set the y_tick_step of this LinePlusSingleStatProperties. + + :param y_tick_step: The y_tick_step of this LinePlusSingleStatProperties. + :type: float + """ # noqa: E501 + self._y_tick_step = y_tick_step + + @property + def shade_below(self): + """Get the shade_below of this LinePlusSingleStatProperties. + + :return: The shade_below of this LinePlusSingleStatProperties. + :rtype: bool + """ # noqa: E501 + return self._shade_below + + @shade_below.setter + def shade_below(self, shade_below): + """Set the shade_below of this LinePlusSingleStatProperties. + + :param shade_below: The shade_below of this LinePlusSingleStatProperties. + :type: bool + """ # noqa: E501 + self._shade_below = shade_below + + @property + def hover_dimension(self): + """Get the hover_dimension of this LinePlusSingleStatProperties. + + :return: The hover_dimension of this LinePlusSingleStatProperties. + :rtype: str + """ # noqa: E501 + return self._hover_dimension + + @hover_dimension.setter + def hover_dimension(self, hover_dimension): + """Set the hover_dimension of this LinePlusSingleStatProperties. + + :param hover_dimension: The hover_dimension of this LinePlusSingleStatProperties. + :type: str + """ # noqa: E501 + self._hover_dimension = hover_dimension + + @property + def position(self): + """Get the position of this LinePlusSingleStatProperties. + + :return: The position of this LinePlusSingleStatProperties. + :rtype: str + """ # noqa: E501 + return self._position + + @position.setter + def position(self, position): + """Set the position of this LinePlusSingleStatProperties. + + :param position: The position of this LinePlusSingleStatProperties. + :type: str + """ # noqa: E501 + if position is None: + raise ValueError("Invalid value for `position`, must not be `None`") # noqa: E501 + self._position = position + + @property + def prefix(self): + """Get the prefix of this LinePlusSingleStatProperties. + + :return: The prefix of this LinePlusSingleStatProperties. + :rtype: str + """ # noqa: E501 + return self._prefix + + @prefix.setter + def prefix(self, prefix): + """Set the prefix of this LinePlusSingleStatProperties. + + :param prefix: The prefix of this LinePlusSingleStatProperties. + :type: str + """ # noqa: E501 + if prefix is None: + raise ValueError("Invalid value for `prefix`, must not be `None`") # noqa: E501 + self._prefix = prefix + + @property + def suffix(self): + """Get the suffix of this LinePlusSingleStatProperties. + + :return: The suffix of this LinePlusSingleStatProperties. + :rtype: str + """ # noqa: E501 + return self._suffix + + @suffix.setter + def suffix(self, suffix): + """Set the suffix of this LinePlusSingleStatProperties. + + :param suffix: The suffix of this LinePlusSingleStatProperties. + :type: str + """ # noqa: E501 + if suffix is None: + raise ValueError("Invalid value for `suffix`, must not be `None`") # noqa: E501 + self._suffix = suffix + + @property + def decimal_places(self): + """Get the decimal_places of this LinePlusSingleStatProperties. + + :return: The decimal_places of this LinePlusSingleStatProperties. + :rtype: DecimalPlaces + """ # noqa: E501 + return self._decimal_places + + @decimal_places.setter + def decimal_places(self, decimal_places): + """Set the decimal_places of this LinePlusSingleStatProperties. + + :param decimal_places: The decimal_places of this LinePlusSingleStatProperties. + :type: DecimalPlaces + """ # noqa: E501 + if decimal_places is None: + raise ValueError("Invalid value for `decimal_places`, must not be `None`") # noqa: E501 + self._decimal_places = decimal_places + + @property + def legend_colorize_rows(self): + """Get the legend_colorize_rows of this LinePlusSingleStatProperties. + + :return: The legend_colorize_rows of this LinePlusSingleStatProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_colorize_rows + + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows): + """Set the legend_colorize_rows of this LinePlusSingleStatProperties. + + :param legend_colorize_rows: The legend_colorize_rows of this LinePlusSingleStatProperties. + :type: bool + """ # noqa: E501 + self._legend_colorize_rows = legend_colorize_rows + + @property + def legend_hide(self): + """Get the legend_hide of this LinePlusSingleStatProperties. + + :return: The legend_hide of this LinePlusSingleStatProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_hide + + @legend_hide.setter + def legend_hide(self, legend_hide): + """Set the legend_hide of this LinePlusSingleStatProperties. + + :param legend_hide: The legend_hide of this LinePlusSingleStatProperties. + :type: bool + """ # noqa: E501 + self._legend_hide = legend_hide + + @property + def legend_opacity(self): + """Get the legend_opacity of this LinePlusSingleStatProperties. + + :return: The legend_opacity of this LinePlusSingleStatProperties. + :rtype: float + """ # noqa: E501 + return self._legend_opacity + + @legend_opacity.setter + def legend_opacity(self, legend_opacity): + """Set the legend_opacity of this LinePlusSingleStatProperties. + + :param legend_opacity: The legend_opacity of this LinePlusSingleStatProperties. + :type: float + """ # noqa: E501 + self._legend_opacity = legend_opacity + + @property + def legend_orientation_threshold(self): + """Get the legend_orientation_threshold of this LinePlusSingleStatProperties. + + :return: The legend_orientation_threshold of this LinePlusSingleStatProperties. + :rtype: int + """ # noqa: E501 + return self._legend_orientation_threshold + + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold): + """Set the legend_orientation_threshold of this LinePlusSingleStatProperties. + + :param legend_orientation_threshold: The legend_orientation_threshold of this LinePlusSingleStatProperties. + :type: int + """ # noqa: E501 + self._legend_orientation_threshold = legend_orientation_threshold + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LinePlusSingleStatProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/line_protocol_error.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/line_protocol_error.py new file mode 100644 index 0000000..2050a8b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/line_protocol_error.py @@ -0,0 +1,220 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class LineProtocolError(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'code': 'str', + 'message': 'str', + 'op': 'str', + 'err': 'str', + 'line': 'int' + } + + attribute_map = { + 'code': 'code', + 'message': 'message', + 'op': 'op', + 'err': 'err', + 'line': 'line' + } + + def __init__(self, code=None, message=None, op=None, err=None, line=None): # noqa: E501,D401,D403 + """LineProtocolError - a model defined in OpenAPI.""" # noqa: E501 + self._code = None + self._message = None + self._op = None + self._err = None + self._line = None + self.discriminator = None + + self.code = code + if message is not None: + self.message = message + if op is not None: + self.op = op + if err is not None: + self.err = err + if line is not None: + self.line = line + + @property + def code(self): + """Get the code of this LineProtocolError. + + Code is the machine-readable error code. + + :return: The code of this LineProtocolError. + :rtype: str + """ # noqa: E501 + return self._code + + @code.setter + def code(self, code): + """Set the code of this LineProtocolError. + + Code is the machine-readable error code. + + :param code: The code of this LineProtocolError. + :type: str + """ # noqa: E501 + if code is None: + raise ValueError("Invalid value for `code`, must not be `None`") # noqa: E501 + self._code = code + + @property + def message(self): + """Get the message of this LineProtocolError. + + Human-readable message. + + :return: The message of this LineProtocolError. + :rtype: str + """ # noqa: E501 + return self._message + + @message.setter + def message(self, message): + """Set the message of this LineProtocolError. + + Human-readable message. + + :param message: The message of this LineProtocolError. + :type: str + """ # noqa: E501 + self._message = message + + @property + def op(self): + """Get the op of this LineProtocolError. + + Describes the logical code operation when the error occurred. Useful for debugging. + + :return: The op of this LineProtocolError. + :rtype: str + """ # noqa: E501 + return self._op + + @op.setter + def op(self, op): + """Set the op of this LineProtocolError. + + Describes the logical code operation when the error occurred. Useful for debugging. + + :param op: The op of this LineProtocolError. + :type: str + """ # noqa: E501 + self._op = op + + @property + def err(self): + """Get the err of this LineProtocolError. + + Stack of errors that occurred during processing of the request. Useful for debugging. + + :return: The err of this LineProtocolError. + :rtype: str + """ # noqa: E501 + return self._err + + @err.setter + def err(self, err): + """Set the err of this LineProtocolError. + + Stack of errors that occurred during processing of the request. Useful for debugging. + + :param err: The err of this LineProtocolError. + :type: str + """ # noqa: E501 + self._err = err + + @property + def line(self): + """Get the line of this LineProtocolError. + + First line in the request body that contains malformed data. + + :return: The line of this LineProtocolError. + :rtype: int + """ # noqa: E501 + return self._line + + @line.setter + def line(self, line): + """Set the line of this LineProtocolError. + + First line in the request body that contains malformed data. + + :param line: The line of this LineProtocolError. + :type: int + """ # noqa: E501 + self._line = line + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LineProtocolError): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/line_protocol_length_error.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/line_protocol_length_error.py new file mode 100644 index 0000000..d40aa6a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/line_protocol_length_error.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class LineProtocolLengthError(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'code': 'str', + 'message': 'str' + } + + attribute_map = { + 'code': 'code', + 'message': 'message' + } + + def __init__(self, code=None, message=None): # noqa: E501,D401,D403 + """LineProtocolLengthError - a model defined in OpenAPI.""" # noqa: E501 + self._code = None + self._message = None + self.discriminator = None + + self.code = code + self.message = message + + @property + def code(self): + """Get the code of this LineProtocolLengthError. + + Code is the machine-readable error code. + + :return: The code of this LineProtocolLengthError. + :rtype: str + """ # noqa: E501 + return self._code + + @code.setter + def code(self, code): + """Set the code of this LineProtocolLengthError. + + Code is the machine-readable error code. + + :param code: The code of this LineProtocolLengthError. + :type: str + """ # noqa: E501 + if code is None: + raise ValueError("Invalid value for `code`, must not be `None`") # noqa: E501 + self._code = code + + @property + def message(self): + """Get the message of this LineProtocolLengthError. + + Human-readable message. + + :return: The message of this LineProtocolLengthError. + :rtype: str + """ # noqa: E501 + return self._message + + @message.setter + def message(self, message): + """Set the message of this LineProtocolLengthError. + + Human-readable message. + + :param message: The message of this LineProtocolLengthError. + :type: str + """ # noqa: E501 + if message is None: + raise ValueError("Invalid value for `message`, must not be `None`") # noqa: E501 + self._message = message + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LineProtocolLengthError): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/links.py new file mode 100644 index 0000000..d9844c8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/links.py @@ -0,0 +1,166 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Links(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'next': 'str', + '_self': 'str', + 'prev': 'str' + } + + attribute_map = { + 'next': 'next', + '_self': 'self', + 'prev': 'prev' + } + + def __init__(self, next=None, _self=None, prev=None): # noqa: E501,D401,D403 + """Links - a model defined in OpenAPI.""" # noqa: E501 + self._next = None + self.__self = None + self._prev = None + self.discriminator = None + + if next is not None: + self.next = next + self._self = _self + if prev is not None: + self.prev = prev + + @property + def next(self): + """Get the next of this Links. + + URI of resource. + + :return: The next of this Links. + :rtype: str + """ # noqa: E501 + return self._next + + @next.setter + def next(self, next): + """Set the next of this Links. + + URI of resource. + + :param next: The next of this Links. + :type: str + """ # noqa: E501 + self._next = next + + @property + def _self(self): + """Get the _self of this Links. + + URI of resource. + + :return: The _self of this Links. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this Links. + + URI of resource. + + :param _self: The _self of this Links. + :type: str + """ # noqa: E501 + if _self is None: + raise ValueError("Invalid value for `_self`, must not be `None`") # noqa: E501 + self.__self = _self + + @property + def prev(self): + """Get the prev of this Links. + + URI of resource. + + :return: The prev of this Links. + :rtype: str + """ # noqa: E501 + return self._prev + + @prev.setter + def prev(self, prev): + """Set the prev of this Links. + + URI of resource. + + :param prev: The prev of this Links. + :type: str + """ # noqa: E501 + self._prev = prev + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Links): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/list_stacks_response.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/list_stacks_response.py new file mode 100644 index 0000000..084e340 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/list_stacks_response.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ListStacksResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'stacks': 'list[Stack]' + } + + attribute_map = { + 'stacks': 'stacks' + } + + def __init__(self, stacks=None): # noqa: E501,D401,D403 + """ListStacksResponse - a model defined in OpenAPI.""" # noqa: E501 + self._stacks = None + self.discriminator = None + + if stacks is not None: + self.stacks = stacks + + @property + def stacks(self): + """Get the stacks of this ListStacksResponse. + + :return: The stacks of this ListStacksResponse. + :rtype: list[Stack] + """ # noqa: E501 + return self._stacks + + @stacks.setter + def stacks(self, stacks): + """Set the stacks of this ListStacksResponse. + + :param stacks: The stacks of this ListStacksResponse. + :type: list[Stack] + """ # noqa: E501 + self._stacks = stacks + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ListStacksResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/log_event.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/log_event.py new file mode 100644 index 0000000..336736e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/log_event.py @@ -0,0 +1,165 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class LogEvent(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'time': 'datetime', + 'message': 'str', + 'run_id': 'str' + } + + attribute_map = { + 'time': 'time', + 'message': 'message', + 'run_id': 'runID' + } + + def __init__(self, time=None, message=None, run_id=None): # noqa: E501,D401,D403 + """LogEvent - a model defined in OpenAPI.""" # noqa: E501 + self._time = None + self._message = None + self._run_id = None + self.discriminator = None + + if time is not None: + self.time = time + if message is not None: + self.message = message + if run_id is not None: + self.run_id = run_id + + @property + def time(self): + """Get the time of this LogEvent. + + The time ([RFC3339Nano date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339nano-timestamp)) that the event occurred. + + :return: The time of this LogEvent. + :rtype: datetime + """ # noqa: E501 + return self._time + + @time.setter + def time(self, time): + """Set the time of this LogEvent. + + The time ([RFC3339Nano date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339nano-timestamp)) that the event occurred. + + :param time: The time of this LogEvent. + :type: datetime + """ # noqa: E501 + self._time = time + + @property + def message(self): + """Get the message of this LogEvent. + + A description of the event that occurred. + + :return: The message of this LogEvent. + :rtype: str + """ # noqa: E501 + return self._message + + @message.setter + def message(self, message): + """Set the message of this LogEvent. + + A description of the event that occurred. + + :param message: The message of this LogEvent. + :type: str + """ # noqa: E501 + self._message = message + + @property + def run_id(self): + """Get the run_id of this LogEvent. + + The ID of the task run that generated the event. + + :return: The run_id of this LogEvent. + :rtype: str + """ # noqa: E501 + return self._run_id + + @run_id.setter + def run_id(self, run_id): + """Set the run_id of this LogEvent. + + The ID of the task run that generated the event. + + :param run_id: The run_id of this LogEvent. + :type: str + """ # noqa: E501 + self._run_id = run_id + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LogEvent): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/logical_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/logical_expression.py new file mode 100644 index 0000000..25a1aa9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/logical_expression.py @@ -0,0 +1,184 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class LogicalExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'operator': 'str', + 'left': 'Expression', + 'right': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'operator': 'operator', + 'left': 'left', + 'right': 'right' + } + + def __init__(self, type=None, operator=None, left=None, right=None): # noqa: E501,D401,D403 + """LogicalExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._operator = None + self._left = None + self._right = None + self.discriminator = None + + if type is not None: + self.type = type + if operator is not None: + self.operator = operator + if left is not None: + self.left = left + if right is not None: + self.right = right + + @property + def type(self): + """Get the type of this LogicalExpression. + + Type of AST node + + :return: The type of this LogicalExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this LogicalExpression. + + Type of AST node + + :param type: The type of this LogicalExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def operator(self): + """Get the operator of this LogicalExpression. + + :return: The operator of this LogicalExpression. + :rtype: str + """ # noqa: E501 + return self._operator + + @operator.setter + def operator(self, operator): + """Set the operator of this LogicalExpression. + + :param operator: The operator of this LogicalExpression. + :type: str + """ # noqa: E501 + self._operator = operator + + @property + def left(self): + """Get the left of this LogicalExpression. + + :return: The left of this LogicalExpression. + :rtype: Expression + """ # noqa: E501 + return self._left + + @left.setter + def left(self, left): + """Set the left of this LogicalExpression. + + :param left: The left of this LogicalExpression. + :type: Expression + """ # noqa: E501 + self._left = left + + @property + def right(self): + """Get the right of this LogicalExpression. + + :return: The right of this LogicalExpression. + :rtype: Expression + """ # noqa: E501 + return self._right + + @right.setter + def right(self, right): + """Set the right of this LogicalExpression. + + :param right: The right of this LogicalExpression. + :type: Expression + """ # noqa: E501 + self._right = right + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, LogicalExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/logs.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/logs.py new file mode 100644 index 0000000..0dd2acf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/logs.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Logs(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'events': 'list[LogEvent]' + } + + attribute_map = { + 'events': 'events' + } + + def __init__(self, events=None): # noqa: E501,D401,D403 + """Logs - a model defined in OpenAPI.""" # noqa: E501 + self._events = None + self.discriminator = None + + if events is not None: + self.events = events + + @property + def events(self): + """Get the events of this Logs. + + :return: The events of this Logs. + :rtype: list[LogEvent] + """ # noqa: E501 + return self._events + + @events.setter + def events(self, events): + """Set the events of this Logs. + + :param events: The events of this Logs. + :type: list[LogEvent] + """ # noqa: E501 + self._events = events + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Logs): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/map_variable_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/map_variable_properties.py new file mode 100644 index 0000000..8648d9c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/map_variable_properties.py @@ -0,0 +1,134 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.variable_properties import VariableProperties + + +class MapVariableProperties(VariableProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'values': 'dict(str, str)' + } + + attribute_map = { + 'type': 'type', + 'values': 'values' + } + + def __init__(self, type=None, values=None): # noqa: E501,D401,D403 + """MapVariableProperties - a model defined in OpenAPI.""" # noqa: E501 + VariableProperties.__init__(self) # noqa: E501 + + self._type = None + self._values = None + self.discriminator = None + + if type is not None: + self.type = type + if values is not None: + self.values = values + + @property + def type(self): + """Get the type of this MapVariableProperties. + + :return: The type of this MapVariableProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this MapVariableProperties. + + :param type: The type of this MapVariableProperties. + :type: str + """ # noqa: E501 + self._type = type + + @property + def values(self): + """Get the values of this MapVariableProperties. + + :return: The values of this MapVariableProperties. + :rtype: dict(str, str) + """ # noqa: E501 + return self._values + + @values.setter + def values(self, values): + """Set the values of this MapVariableProperties. + + :param values: The values of this MapVariableProperties. + :type: dict(str, str) + """ # noqa: E501 + self._values = values + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, MapVariableProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/markdown_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/markdown_view_properties.py new file mode 100644 index 0000000..cc797f0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/markdown_view_properties.py @@ -0,0 +1,160 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class MarkdownViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'shape': 'str', + 'note': 'str' + } + + attribute_map = { + 'type': 'type', + 'shape': 'shape', + 'note': 'note' + } + + def __init__(self, type=None, shape=None, note=None): # noqa: E501,D401,D403 + """MarkdownViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._type = None + self._shape = None + self._note = None + self.discriminator = None + + self.type = type + self.shape = shape + self.note = note + + @property + def type(self): + """Get the type of this MarkdownViewProperties. + + :return: The type of this MarkdownViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this MarkdownViewProperties. + + :param type: The type of this MarkdownViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def shape(self): + """Get the shape of this MarkdownViewProperties. + + :return: The shape of this MarkdownViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this MarkdownViewProperties. + + :param shape: The shape of this MarkdownViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this MarkdownViewProperties. + + :return: The note of this MarkdownViewProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this MarkdownViewProperties. + + :param note: The note of this MarkdownViewProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, MarkdownViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema.py new file mode 100644 index 0000000..f2a48fa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema.py @@ -0,0 +1,262 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class MeasurementSchema(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'org_id': 'str', + 'bucket_id': 'str', + 'name': 'str', + 'columns': 'list[MeasurementSchemaColumn]', + 'created_at': 'datetime', + 'updated_at': 'datetime' + } + + attribute_map = { + 'id': 'id', + 'org_id': 'orgID', + 'bucket_id': 'bucketID', + 'name': 'name', + 'columns': 'columns', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt' + } + + def __init__(self, id=None, org_id=None, bucket_id=None, name=None, columns=None, created_at=None, updated_at=None): # noqa: E501,D401,D403 + """MeasurementSchema - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._org_id = None + self._bucket_id = None + self._name = None + self._columns = None + self._created_at = None + self._updated_at = None + self.discriminator = None + + self.id = id + if org_id is not None: + self.org_id = org_id + if bucket_id is not None: + self.bucket_id = bucket_id + self.name = name + self.columns = columns + self.created_at = created_at + self.updated_at = updated_at + + @property + def id(self): + """Get the id of this MeasurementSchema. + + :return: The id of this MeasurementSchema. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this MeasurementSchema. + + :param id: The id of this MeasurementSchema. + :type: str + """ # noqa: E501 + if id is None: + raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this MeasurementSchema. + + The ID of the organization. + + :return: The org_id of this MeasurementSchema. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this MeasurementSchema. + + The ID of the organization. + + :param org_id: The org_id of this MeasurementSchema. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def bucket_id(self): + """Get the bucket_id of this MeasurementSchema. + + The ID of the bucket that the measurement schema is associated with. + + :return: The bucket_id of this MeasurementSchema. + :rtype: str + """ # noqa: E501 + return self._bucket_id + + @bucket_id.setter + def bucket_id(self, bucket_id): + """Set the bucket_id of this MeasurementSchema. + + The ID of the bucket that the measurement schema is associated with. + + :param bucket_id: The bucket_id of this MeasurementSchema. + :type: str + """ # noqa: E501 + self._bucket_id = bucket_id + + @property + def name(self): + """Get the name of this MeasurementSchema. + + :return: The name of this MeasurementSchema. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this MeasurementSchema. + + :param name: The name of this MeasurementSchema. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def columns(self): + """Get the columns of this MeasurementSchema. + + Ordered collection of column definitions. + + :return: The columns of this MeasurementSchema. + :rtype: list[MeasurementSchemaColumn] + """ # noqa: E501 + return self._columns + + @columns.setter + def columns(self, columns): + """Set the columns of this MeasurementSchema. + + Ordered collection of column definitions. + + :param columns: The columns of this MeasurementSchema. + :type: list[MeasurementSchemaColumn] + """ # noqa: E501 + if columns is None: + raise ValueError("Invalid value for `columns`, must not be `None`") # noqa: E501 + self._columns = columns + + @property + def created_at(self): + """Get the created_at of this MeasurementSchema. + + :return: The created_at of this MeasurementSchema. + :rtype: datetime + """ # noqa: E501 + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Set the created_at of this MeasurementSchema. + + :param created_at: The created_at of this MeasurementSchema. + :type: datetime + """ # noqa: E501 + if created_at is None: + raise ValueError("Invalid value for `created_at`, must not be `None`") # noqa: E501 + self._created_at = created_at + + @property + def updated_at(self): + """Get the updated_at of this MeasurementSchema. + + :return: The updated_at of this MeasurementSchema. + :rtype: datetime + """ # noqa: E501 + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Set the updated_at of this MeasurementSchema. + + :param updated_at: The updated_at of this MeasurementSchema. + :type: datetime + """ # noqa: E501 + if updated_at is None: + raise ValueError("Invalid value for `updated_at`, must not be `None`") # noqa: E501 + self._updated_at = updated_at + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, MeasurementSchema): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_column.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_column.py new file mode 100644 index 0000000..79ee384 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_column.py @@ -0,0 +1,155 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class MeasurementSchemaColumn(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'type': 'ColumnSemanticType', + 'data_type': 'ColumnDataType' + } + + attribute_map = { + 'name': 'name', + 'type': 'type', + 'data_type': 'dataType' + } + + def __init__(self, name=None, type=None, data_type=None): # noqa: E501,D401,D403 + """MeasurementSchemaColumn - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._type = None + self._data_type = None + self.discriminator = None + + self.name = name + self.type = type + if data_type is not None: + self.data_type = data_type + + @property + def name(self): + """Get the name of this MeasurementSchemaColumn. + + :return: The name of this MeasurementSchemaColumn. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this MeasurementSchemaColumn. + + :param name: The name of this MeasurementSchemaColumn. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def type(self): + """Get the type of this MeasurementSchemaColumn. + + :return: The type of this MeasurementSchemaColumn. + :rtype: ColumnSemanticType + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this MeasurementSchemaColumn. + + :param type: The type of this MeasurementSchemaColumn. + :type: ColumnSemanticType + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def data_type(self): + """Get the data_type of this MeasurementSchemaColumn. + + :return: The data_type of this MeasurementSchemaColumn. + :rtype: ColumnDataType + """ # noqa: E501 + return self._data_type + + @data_type.setter + def data_type(self, data_type): + """Set the data_type of this MeasurementSchemaColumn. + + :param data_type: The data_type of this MeasurementSchemaColumn. + :type: ColumnDataType + """ # noqa: E501 + self._data_type = data_type + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, MeasurementSchemaColumn): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_create_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_create_request.py new file mode 100644 index 0000000..1308a2d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_create_request.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class MeasurementSchemaCreateRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'columns': 'list[MeasurementSchemaColumn]' + } + + attribute_map = { + 'name': 'name', + 'columns': 'columns' + } + + def __init__(self, name=None, columns=None): # noqa: E501,D401,D403 + """MeasurementSchemaCreateRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._columns = None + self.discriminator = None + + self.name = name + self.columns = columns + + @property + def name(self): + """Get the name of this MeasurementSchemaCreateRequest. + + The [measurement](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#measurement) name. + + :return: The name of this MeasurementSchemaCreateRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this MeasurementSchemaCreateRequest. + + The [measurement](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#measurement) name. + + :param name: The name of this MeasurementSchemaCreateRequest. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def columns(self): + """Get the columns of this MeasurementSchemaCreateRequest. + + Ordered collection of column definitions. + + :return: The columns of this MeasurementSchemaCreateRequest. + :rtype: list[MeasurementSchemaColumn] + """ # noqa: E501 + return self._columns + + @columns.setter + def columns(self, columns): + """Set the columns of this MeasurementSchemaCreateRequest. + + Ordered collection of column definitions. + + :param columns: The columns of this MeasurementSchemaCreateRequest. + :type: list[MeasurementSchemaColumn] + """ # noqa: E501 + if columns is None: + raise ValueError("Invalid value for `columns`, must not be `None`") # noqa: E501 + self._columns = columns + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, MeasurementSchemaCreateRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_list.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_list.py new file mode 100644 index 0000000..f8a83dd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_list.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class MeasurementSchemaList(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'measurement_schemas': 'list[MeasurementSchema]' + } + + attribute_map = { + 'measurement_schemas': 'measurementSchemas' + } + + def __init__(self, measurement_schemas=None): # noqa: E501,D401,D403 + """MeasurementSchemaList - a model defined in OpenAPI.""" # noqa: E501 + self._measurement_schemas = None + self.discriminator = None + + self.measurement_schemas = measurement_schemas + + @property + def measurement_schemas(self): + """Get the measurement_schemas of this MeasurementSchemaList. + + :return: The measurement_schemas of this MeasurementSchemaList. + :rtype: list[MeasurementSchema] + """ # noqa: E501 + return self._measurement_schemas + + @measurement_schemas.setter + def measurement_schemas(self, measurement_schemas): + """Set the measurement_schemas of this MeasurementSchemaList. + + :param measurement_schemas: The measurement_schemas of this MeasurementSchemaList. + :type: list[MeasurementSchema] + """ # noqa: E501 + if measurement_schemas is None: + raise ValueError("Invalid value for `measurement_schemas`, must not be `None`") # noqa: E501 + self._measurement_schemas = measurement_schemas + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, MeasurementSchemaList): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_update_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_update_request.py new file mode 100644 index 0000000..bdfa19c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/measurement_schema_update_request.py @@ -0,0 +1,112 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class MeasurementSchemaUpdateRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'columns': 'list[MeasurementSchemaColumn]' + } + + attribute_map = { + 'columns': 'columns' + } + + def __init__(self, columns=None): # noqa: E501,D401,D403 + """MeasurementSchemaUpdateRequest - a model defined in OpenAPI.""" # noqa: E501 + self._columns = None + self.discriminator = None + + self.columns = columns + + @property + def columns(self): + """Get the columns of this MeasurementSchemaUpdateRequest. + + An ordered collection of column definitions + + :return: The columns of this MeasurementSchemaUpdateRequest. + :rtype: list[MeasurementSchemaColumn] + """ # noqa: E501 + return self._columns + + @columns.setter + def columns(self, columns): + """Set the columns of this MeasurementSchemaUpdateRequest. + + An ordered collection of column definitions + + :param columns: The columns of this MeasurementSchemaUpdateRequest. + :type: list[MeasurementSchemaColumn] + """ # noqa: E501 + if columns is None: + raise ValueError("Invalid value for `columns`, must not be `None`") # noqa: E501 + self._columns = columns + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, MeasurementSchemaUpdateRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/member_assignment.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/member_assignment.py new file mode 100644 index 0000000..7773b0e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/member_assignment.py @@ -0,0 +1,161 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.statement import Statement + + +class MemberAssignment(Statement): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'member': 'MemberExpression', + 'init': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'member': 'member', + 'init': 'init' + } + + def __init__(self, type=None, member=None, init=None): # noqa: E501,D401,D403 + """MemberAssignment - a model defined in OpenAPI.""" # noqa: E501 + Statement.__init__(self) # noqa: E501 + + self._type = None + self._member = None + self._init = None + self.discriminator = None + + if type is not None: + self.type = type + if member is not None: + self.member = member + if init is not None: + self.init = init + + @property + def type(self): + """Get the type of this MemberAssignment. + + Type of AST node + + :return: The type of this MemberAssignment. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this MemberAssignment. + + Type of AST node + + :param type: The type of this MemberAssignment. + :type: str + """ # noqa: E501 + self._type = type + + @property + def member(self): + """Get the member of this MemberAssignment. + + :return: The member of this MemberAssignment. + :rtype: MemberExpression + """ # noqa: E501 + return self._member + + @member.setter + def member(self, member): + """Set the member of this MemberAssignment. + + :param member: The member of this MemberAssignment. + :type: MemberExpression + """ # noqa: E501 + self._member = member + + @property + def init(self): + """Get the init of this MemberAssignment. + + :return: The init of this MemberAssignment. + :rtype: Expression + """ # noqa: E501 + return self._init + + @init.setter + def init(self, init): + """Set the init of this MemberAssignment. + + :param init: The init of this MemberAssignment. + :type: Expression + """ # noqa: E501 + self._init = init + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, MemberAssignment): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/member_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/member_expression.py new file mode 100644 index 0000000..decf2c4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/member_expression.py @@ -0,0 +1,161 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class MemberExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'object': 'Expression', + '_property': 'PropertyKey' + } + + attribute_map = { + 'type': 'type', + 'object': 'object', + '_property': 'property' + } + + def __init__(self, type=None, object=None, _property=None): # noqa: E501,D401,D403 + """MemberExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._object = None + self.__property = None + self.discriminator = None + + if type is not None: + self.type = type + if object is not None: + self.object = object + if _property is not None: + self._property = _property + + @property + def type(self): + """Get the type of this MemberExpression. + + Type of AST node + + :return: The type of this MemberExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this MemberExpression. + + Type of AST node + + :param type: The type of this MemberExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def object(self): + """Get the object of this MemberExpression. + + :return: The object of this MemberExpression. + :rtype: Expression + """ # noqa: E501 + return self._object + + @object.setter + def object(self, object): + """Set the object of this MemberExpression. + + :param object: The object of this MemberExpression. + :type: Expression + """ # noqa: E501 + self._object = object + + @property + def _property(self): + """Get the _property of this MemberExpression. + + :return: The _property of this MemberExpression. + :rtype: PropertyKey + """ # noqa: E501 + return self.__property + + @_property.setter + def _property(self, _property): + """Set the _property of this MemberExpression. + + :param _property: The _property of this MemberExpression. + :type: PropertyKey + """ # noqa: E501 + self.__property = _property + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, MemberExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/metadata_backup.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/metadata_backup.py new file mode 100644 index 0000000..ebe498f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/metadata_backup.py @@ -0,0 +1,156 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class MetadataBackup(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kv': 'file', + 'sql': 'file', + 'buckets': 'list[BucketMetadataManifest]' + } + + attribute_map = { + 'kv': 'kv', + 'sql': 'sql', + 'buckets': 'buckets' + } + + def __init__(self, kv=None, sql=None, buckets=None): # noqa: E501,D401,D403 + """MetadataBackup - a model defined in OpenAPI.""" # noqa: E501 + self._kv = None + self._sql = None + self._buckets = None + self.discriminator = None + + self.kv = kv + self.sql = sql + self.buckets = buckets + + @property + def kv(self): + """Get the kv of this MetadataBackup. + + :return: The kv of this MetadataBackup. + :rtype: file + """ # noqa: E501 + return self._kv + + @kv.setter + def kv(self, kv): + """Set the kv of this MetadataBackup. + + :param kv: The kv of this MetadataBackup. + :type: file + """ # noqa: E501 + if kv is None: + raise ValueError("Invalid value for `kv`, must not be `None`") # noqa: E501 + self._kv = kv + + @property + def sql(self): + """Get the sql of this MetadataBackup. + + :return: The sql of this MetadataBackup. + :rtype: file + """ # noqa: E501 + return self._sql + + @sql.setter + def sql(self, sql): + """Set the sql of this MetadataBackup. + + :param sql: The sql of this MetadataBackup. + :type: file + """ # noqa: E501 + if sql is None: + raise ValueError("Invalid value for `sql`, must not be `None`") # noqa: E501 + self._sql = sql + + @property + def buckets(self): + """Get the buckets of this MetadataBackup. + + :return: The buckets of this MetadataBackup. + :rtype: list[BucketMetadataManifest] + """ # noqa: E501 + return self._buckets + + @buckets.setter + def buckets(self, buckets): + """Set the buckets of this MetadataBackup. + + :param buckets: The buckets of this MetadataBackup. + :type: list[BucketMetadataManifest] + """ # noqa: E501 + if buckets is None: + raise ValueError("Invalid value for `buckets`, must not be `None`") # noqa: E501 + self._buckets = buckets + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, MetadataBackup): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/model_property.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/model_property.py new file mode 100644 index 0000000..cbdcd24 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/model_property.py @@ -0,0 +1,157 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ModelProperty(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'key': 'PropertyKey', + 'value': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'key': 'key', + 'value': 'value' + } + + def __init__(self, type=None, key=None, value=None): # noqa: E501,D401,D403 + """ModelProperty - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self._key = None + self._value = None + self.discriminator = None + + if type is not None: + self.type = type + if key is not None: + self.key = key + if value is not None: + self.value = value + + @property + def type(self): + """Get the type of this ModelProperty. + + Type of AST node + + :return: The type of this ModelProperty. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ModelProperty. + + Type of AST node + + :param type: The type of this ModelProperty. + :type: str + """ # noqa: E501 + self._type = type + + @property + def key(self): + """Get the key of this ModelProperty. + + :return: The key of this ModelProperty. + :rtype: PropertyKey + """ # noqa: E501 + return self._key + + @key.setter + def key(self, key): + """Set the key of this ModelProperty. + + :param key: The key of this ModelProperty. + :type: PropertyKey + """ # noqa: E501 + self._key = key + + @property + def value(self): + """Get the value of this ModelProperty. + + :return: The value of this ModelProperty. + :rtype: Expression + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this ModelProperty. + + :param value: The value of this ModelProperty. + :type: Expression + """ # noqa: E501 + self._value = value + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ModelProperty): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/mosaic_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/mosaic_view_properties.py new file mode 100644 index 0000000..1ea8897 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/mosaic_view_properties.py @@ -0,0 +1,780 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class MosaicViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'time_format': 'str', + 'type': 'str', + 'queries': 'list[DashboardQuery]', + 'colors': 'list[str]', + 'shape': 'str', + 'note': 'str', + 'show_note_when_empty': 'bool', + 'x_column': 'str', + 'generate_x_axis_ticks': 'list[str]', + 'x_total_ticks': 'int', + 'x_tick_start': 'float', + 'x_tick_step': 'float', + 'y_label_column_separator': 'str', + 'y_label_columns': 'list[str]', + 'y_series_columns': 'list[str]', + 'fill_columns': 'list[str]', + 'x_domain': 'list[float]', + 'y_domain': 'list[float]', + 'x_axis_label': 'str', + 'y_axis_label': 'str', + 'x_prefix': 'str', + 'x_suffix': 'str', + 'y_prefix': 'str', + 'y_suffix': 'str', + 'hover_dimension': 'str', + 'legend_colorize_rows': 'bool', + 'legend_hide': 'bool', + 'legend_opacity': 'float', + 'legend_orientation_threshold': 'int' + } + + attribute_map = { + 'time_format': 'timeFormat', + 'type': 'type', + 'queries': 'queries', + 'colors': 'colors', + 'shape': 'shape', + 'note': 'note', + 'show_note_when_empty': 'showNoteWhenEmpty', + 'x_column': 'xColumn', + 'generate_x_axis_ticks': 'generateXAxisTicks', + 'x_total_ticks': 'xTotalTicks', + 'x_tick_start': 'xTickStart', + 'x_tick_step': 'xTickStep', + 'y_label_column_separator': 'yLabelColumnSeparator', + 'y_label_columns': 'yLabelColumns', + 'y_series_columns': 'ySeriesColumns', + 'fill_columns': 'fillColumns', + 'x_domain': 'xDomain', + 'y_domain': 'yDomain', + 'x_axis_label': 'xAxisLabel', + 'y_axis_label': 'yAxisLabel', + 'x_prefix': 'xPrefix', + 'x_suffix': 'xSuffix', + 'y_prefix': 'yPrefix', + 'y_suffix': 'ySuffix', + 'hover_dimension': 'hoverDimension', + 'legend_colorize_rows': 'legendColorizeRows', + 'legend_hide': 'legendHide', + 'legend_opacity': 'legendOpacity', + 'legend_orientation_threshold': 'legendOrientationThreshold' + } + + def __init__(self, time_format=None, type=None, queries=None, colors=None, shape=None, note=None, show_note_when_empty=None, x_column=None, generate_x_axis_ticks=None, x_total_ticks=None, x_tick_start=None, x_tick_step=None, y_label_column_separator=None, y_label_columns=None, y_series_columns=None, fill_columns=None, x_domain=None, y_domain=None, x_axis_label=None, y_axis_label=None, x_prefix=None, x_suffix=None, y_prefix=None, y_suffix=None, hover_dimension=None, legend_colorize_rows=None, legend_hide=None, legend_opacity=None, legend_orientation_threshold=None): # noqa: E501,D401,D403 + """MosaicViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._time_format = None + self._type = None + self._queries = None + self._colors = None + self._shape = None + self._note = None + self._show_note_when_empty = None + self._x_column = None + self._generate_x_axis_ticks = None + self._x_total_ticks = None + self._x_tick_start = None + self._x_tick_step = None + self._y_label_column_separator = None + self._y_label_columns = None + self._y_series_columns = None + self._fill_columns = None + self._x_domain = None + self._y_domain = None + self._x_axis_label = None + self._y_axis_label = None + self._x_prefix = None + self._x_suffix = None + self._y_prefix = None + self._y_suffix = None + self._hover_dimension = None + self._legend_colorize_rows = None + self._legend_hide = None + self._legend_opacity = None + self._legend_orientation_threshold = None + self.discriminator = None + + if time_format is not None: + self.time_format = time_format + self.type = type + self.queries = queries + self.colors = colors + self.shape = shape + self.note = note + self.show_note_when_empty = show_note_when_empty + self.x_column = x_column + if generate_x_axis_ticks is not None: + self.generate_x_axis_ticks = generate_x_axis_ticks + if x_total_ticks is not None: + self.x_total_ticks = x_total_ticks + if x_tick_start is not None: + self.x_tick_start = x_tick_start + if x_tick_step is not None: + self.x_tick_step = x_tick_step + if y_label_column_separator is not None: + self.y_label_column_separator = y_label_column_separator + if y_label_columns is not None: + self.y_label_columns = y_label_columns + self.y_series_columns = y_series_columns + self.fill_columns = fill_columns + self.x_domain = x_domain + self.y_domain = y_domain + self.x_axis_label = x_axis_label + self.y_axis_label = y_axis_label + self.x_prefix = x_prefix + self.x_suffix = x_suffix + self.y_prefix = y_prefix + self.y_suffix = y_suffix + if hover_dimension is not None: + self.hover_dimension = hover_dimension + if legend_colorize_rows is not None: + self.legend_colorize_rows = legend_colorize_rows + if legend_hide is not None: + self.legend_hide = legend_hide + if legend_opacity is not None: + self.legend_opacity = legend_opacity + if legend_orientation_threshold is not None: + self.legend_orientation_threshold = legend_orientation_threshold + + @property + def time_format(self): + """Get the time_format of this MosaicViewProperties. + + :return: The time_format of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._time_format + + @time_format.setter + def time_format(self, time_format): + """Set the time_format of this MosaicViewProperties. + + :param time_format: The time_format of this MosaicViewProperties. + :type: str + """ # noqa: E501 + self._time_format = time_format + + @property + def type(self): + """Get the type of this MosaicViewProperties. + + :return: The type of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this MosaicViewProperties. + + :param type: The type of this MosaicViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def queries(self): + """Get the queries of this MosaicViewProperties. + + :return: The queries of this MosaicViewProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this MosaicViewProperties. + + :param queries: The queries of this MosaicViewProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def colors(self): + """Get the colors of this MosaicViewProperties. + + Colors define color encoding of data into a visualization + + :return: The colors of this MosaicViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._colors + + @colors.setter + def colors(self, colors): + """Set the colors of this MosaicViewProperties. + + Colors define color encoding of data into a visualization + + :param colors: The colors of this MosaicViewProperties. + :type: list[str] + """ # noqa: E501 + if colors is None: + raise ValueError("Invalid value for `colors`, must not be `None`") # noqa: E501 + self._colors = colors + + @property + def shape(self): + """Get the shape of this MosaicViewProperties. + + :return: The shape of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this MosaicViewProperties. + + :param shape: The shape of this MosaicViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this MosaicViewProperties. + + :return: The note of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this MosaicViewProperties. + + :param note: The note of this MosaicViewProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + @property + def show_note_when_empty(self): + """Get the show_note_when_empty of this MosaicViewProperties. + + If true, will display note when empty + + :return: The show_note_when_empty of this MosaicViewProperties. + :rtype: bool + """ # noqa: E501 + return self._show_note_when_empty + + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty): + """Set the show_note_when_empty of this MosaicViewProperties. + + If true, will display note when empty + + :param show_note_when_empty: The show_note_when_empty of this MosaicViewProperties. + :type: bool + """ # noqa: E501 + if show_note_when_empty is None: + raise ValueError("Invalid value for `show_note_when_empty`, must not be `None`") # noqa: E501 + self._show_note_when_empty = show_note_when_empty + + @property + def x_column(self): + """Get the x_column of this MosaicViewProperties. + + :return: The x_column of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_column + + @x_column.setter + def x_column(self, x_column): + """Set the x_column of this MosaicViewProperties. + + :param x_column: The x_column of this MosaicViewProperties. + :type: str + """ # noqa: E501 + if x_column is None: + raise ValueError("Invalid value for `x_column`, must not be `None`") # noqa: E501 + self._x_column = x_column + + @property + def generate_x_axis_ticks(self): + """Get the generate_x_axis_ticks of this MosaicViewProperties. + + :return: The generate_x_axis_ticks of this MosaicViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._generate_x_axis_ticks + + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks): + """Set the generate_x_axis_ticks of this MosaicViewProperties. + + :param generate_x_axis_ticks: The generate_x_axis_ticks of this MosaicViewProperties. + :type: list[str] + """ # noqa: E501 + self._generate_x_axis_ticks = generate_x_axis_ticks + + @property + def x_total_ticks(self): + """Get the x_total_ticks of this MosaicViewProperties. + + :return: The x_total_ticks of this MosaicViewProperties. + :rtype: int + """ # noqa: E501 + return self._x_total_ticks + + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks): + """Set the x_total_ticks of this MosaicViewProperties. + + :param x_total_ticks: The x_total_ticks of this MosaicViewProperties. + :type: int + """ # noqa: E501 + self._x_total_ticks = x_total_ticks + + @property + def x_tick_start(self): + """Get the x_tick_start of this MosaicViewProperties. + + :return: The x_tick_start of this MosaicViewProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_start + + @x_tick_start.setter + def x_tick_start(self, x_tick_start): + """Set the x_tick_start of this MosaicViewProperties. + + :param x_tick_start: The x_tick_start of this MosaicViewProperties. + :type: float + """ # noqa: E501 + self._x_tick_start = x_tick_start + + @property + def x_tick_step(self): + """Get the x_tick_step of this MosaicViewProperties. + + :return: The x_tick_step of this MosaicViewProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_step + + @x_tick_step.setter + def x_tick_step(self, x_tick_step): + """Set the x_tick_step of this MosaicViewProperties. + + :param x_tick_step: The x_tick_step of this MosaicViewProperties. + :type: float + """ # noqa: E501 + self._x_tick_step = x_tick_step + + @property + def y_label_column_separator(self): + """Get the y_label_column_separator of this MosaicViewProperties. + + :return: The y_label_column_separator of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_label_column_separator + + @y_label_column_separator.setter + def y_label_column_separator(self, y_label_column_separator): + """Set the y_label_column_separator of this MosaicViewProperties. + + :param y_label_column_separator: The y_label_column_separator of this MosaicViewProperties. + :type: str + """ # noqa: E501 + self._y_label_column_separator = y_label_column_separator + + @property + def y_label_columns(self): + """Get the y_label_columns of this MosaicViewProperties. + + :return: The y_label_columns of this MosaicViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._y_label_columns + + @y_label_columns.setter + def y_label_columns(self, y_label_columns): + """Set the y_label_columns of this MosaicViewProperties. + + :param y_label_columns: The y_label_columns of this MosaicViewProperties. + :type: list[str] + """ # noqa: E501 + self._y_label_columns = y_label_columns + + @property + def y_series_columns(self): + """Get the y_series_columns of this MosaicViewProperties. + + :return: The y_series_columns of this MosaicViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._y_series_columns + + @y_series_columns.setter + def y_series_columns(self, y_series_columns): + """Set the y_series_columns of this MosaicViewProperties. + + :param y_series_columns: The y_series_columns of this MosaicViewProperties. + :type: list[str] + """ # noqa: E501 + if y_series_columns is None: + raise ValueError("Invalid value for `y_series_columns`, must not be `None`") # noqa: E501 + self._y_series_columns = y_series_columns + + @property + def fill_columns(self): + """Get the fill_columns of this MosaicViewProperties. + + :return: The fill_columns of this MosaicViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._fill_columns + + @fill_columns.setter + def fill_columns(self, fill_columns): + """Set the fill_columns of this MosaicViewProperties. + + :param fill_columns: The fill_columns of this MosaicViewProperties. + :type: list[str] + """ # noqa: E501 + if fill_columns is None: + raise ValueError("Invalid value for `fill_columns`, must not be `None`") # noqa: E501 + self._fill_columns = fill_columns + + @property + def x_domain(self): + """Get the x_domain of this MosaicViewProperties. + + :return: The x_domain of this MosaicViewProperties. + :rtype: list[float] + """ # noqa: E501 + return self._x_domain + + @x_domain.setter + def x_domain(self, x_domain): + """Set the x_domain of this MosaicViewProperties. + + :param x_domain: The x_domain of this MosaicViewProperties. + :type: list[float] + """ # noqa: E501 + if x_domain is None: + raise ValueError("Invalid value for `x_domain`, must not be `None`") # noqa: E501 + self._x_domain = x_domain + + @property + def y_domain(self): + """Get the y_domain of this MosaicViewProperties. + + :return: The y_domain of this MosaicViewProperties. + :rtype: list[float] + """ # noqa: E501 + return self._y_domain + + @y_domain.setter + def y_domain(self, y_domain): + """Set the y_domain of this MosaicViewProperties. + + :param y_domain: The y_domain of this MosaicViewProperties. + :type: list[float] + """ # noqa: E501 + if y_domain is None: + raise ValueError("Invalid value for `y_domain`, must not be `None`") # noqa: E501 + self._y_domain = y_domain + + @property + def x_axis_label(self): + """Get the x_axis_label of this MosaicViewProperties. + + :return: The x_axis_label of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_axis_label + + @x_axis_label.setter + def x_axis_label(self, x_axis_label): + """Set the x_axis_label of this MosaicViewProperties. + + :param x_axis_label: The x_axis_label of this MosaicViewProperties. + :type: str + """ # noqa: E501 + if x_axis_label is None: + raise ValueError("Invalid value for `x_axis_label`, must not be `None`") # noqa: E501 + self._x_axis_label = x_axis_label + + @property + def y_axis_label(self): + """Get the y_axis_label of this MosaicViewProperties. + + :return: The y_axis_label of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_axis_label + + @y_axis_label.setter + def y_axis_label(self, y_axis_label): + """Set the y_axis_label of this MosaicViewProperties. + + :param y_axis_label: The y_axis_label of this MosaicViewProperties. + :type: str + """ # noqa: E501 + if y_axis_label is None: + raise ValueError("Invalid value for `y_axis_label`, must not be `None`") # noqa: E501 + self._y_axis_label = y_axis_label + + @property + def x_prefix(self): + """Get the x_prefix of this MosaicViewProperties. + + :return: The x_prefix of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_prefix + + @x_prefix.setter + def x_prefix(self, x_prefix): + """Set the x_prefix of this MosaicViewProperties. + + :param x_prefix: The x_prefix of this MosaicViewProperties. + :type: str + """ # noqa: E501 + if x_prefix is None: + raise ValueError("Invalid value for `x_prefix`, must not be `None`") # noqa: E501 + self._x_prefix = x_prefix + + @property + def x_suffix(self): + """Get the x_suffix of this MosaicViewProperties. + + :return: The x_suffix of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_suffix + + @x_suffix.setter + def x_suffix(self, x_suffix): + """Set the x_suffix of this MosaicViewProperties. + + :param x_suffix: The x_suffix of this MosaicViewProperties. + :type: str + """ # noqa: E501 + if x_suffix is None: + raise ValueError("Invalid value for `x_suffix`, must not be `None`") # noqa: E501 + self._x_suffix = x_suffix + + @property + def y_prefix(self): + """Get the y_prefix of this MosaicViewProperties. + + :return: The y_prefix of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_prefix + + @y_prefix.setter + def y_prefix(self, y_prefix): + """Set the y_prefix of this MosaicViewProperties. + + :param y_prefix: The y_prefix of this MosaicViewProperties. + :type: str + """ # noqa: E501 + if y_prefix is None: + raise ValueError("Invalid value for `y_prefix`, must not be `None`") # noqa: E501 + self._y_prefix = y_prefix + + @property + def y_suffix(self): + """Get the y_suffix of this MosaicViewProperties. + + :return: The y_suffix of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_suffix + + @y_suffix.setter + def y_suffix(self, y_suffix): + """Set the y_suffix of this MosaicViewProperties. + + :param y_suffix: The y_suffix of this MosaicViewProperties. + :type: str + """ # noqa: E501 + if y_suffix is None: + raise ValueError("Invalid value for `y_suffix`, must not be `None`") # noqa: E501 + self._y_suffix = y_suffix + + @property + def hover_dimension(self): + """Get the hover_dimension of this MosaicViewProperties. + + :return: The hover_dimension of this MosaicViewProperties. + :rtype: str + """ # noqa: E501 + return self._hover_dimension + + @hover_dimension.setter + def hover_dimension(self, hover_dimension): + """Set the hover_dimension of this MosaicViewProperties. + + :param hover_dimension: The hover_dimension of this MosaicViewProperties. + :type: str + """ # noqa: E501 + self._hover_dimension = hover_dimension + + @property + def legend_colorize_rows(self): + """Get the legend_colorize_rows of this MosaicViewProperties. + + :return: The legend_colorize_rows of this MosaicViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_colorize_rows + + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows): + """Set the legend_colorize_rows of this MosaicViewProperties. + + :param legend_colorize_rows: The legend_colorize_rows of this MosaicViewProperties. + :type: bool + """ # noqa: E501 + self._legend_colorize_rows = legend_colorize_rows + + @property + def legend_hide(self): + """Get the legend_hide of this MosaicViewProperties. + + :return: The legend_hide of this MosaicViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_hide + + @legend_hide.setter + def legend_hide(self, legend_hide): + """Set the legend_hide of this MosaicViewProperties. + + :param legend_hide: The legend_hide of this MosaicViewProperties. + :type: bool + """ # noqa: E501 + self._legend_hide = legend_hide + + @property + def legend_opacity(self): + """Get the legend_opacity of this MosaicViewProperties. + + :return: The legend_opacity of this MosaicViewProperties. + :rtype: float + """ # noqa: E501 + return self._legend_opacity + + @legend_opacity.setter + def legend_opacity(self, legend_opacity): + """Set the legend_opacity of this MosaicViewProperties. + + :param legend_opacity: The legend_opacity of this MosaicViewProperties. + :type: float + """ # noqa: E501 + self._legend_opacity = legend_opacity + + @property + def legend_orientation_threshold(self): + """Get the legend_orientation_threshold of this MosaicViewProperties. + + :return: The legend_orientation_threshold of this MosaicViewProperties. + :rtype: int + """ # noqa: E501 + return self._legend_orientation_threshold + + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold): + """Set the legend_orientation_threshold of this MosaicViewProperties. + + :param legend_orientation_threshold: The legend_orientation_threshold of this MosaicViewProperties. + :type: int + """ # noqa: E501 + self._legend_orientation_threshold = legend_orientation_threshold + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, MosaicViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/node.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/node.py new file mode 100644 index 0000000..c254c62 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/node.py @@ -0,0 +1,82 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Node(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """Node - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Node): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint.py new file mode 100644 index 0000000..3d0a886 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class NotificationEndpoint(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'NotificationEndpointType' + } + + attribute_map = { + 'type': 'type' + } + + discriminator_value_class_map = { + 'slack': 'SlackNotificationEndpoint', + 'pagerduty': 'PagerDutyNotificationEndpoint', + 'http': 'HTTPNotificationEndpoint', + 'telegram': 'TelegramNotificationEndpoint' + } + + def __init__(self, type=None): # noqa: E501,D401,D403 + """NotificationEndpoint - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self.discriminator = 'type' + + self.type = type + + @property + def type(self): + """Get the type of this NotificationEndpoint. + + :return: The type of this NotificationEndpoint. + :rtype: NotificationEndpointType + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this NotificationEndpoint. + + :param type: The type of this NotificationEndpoint. + :type: NotificationEndpointType + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + def get_real_child_model(self, data): + """Return the real base class specified by the discriminator.""" + discriminator_key = self.attribute_map[self.discriminator] + discriminator_value = data[discriminator_key] + return self.discriminator_value_class_map.get(discriminator_value) + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationEndpoint): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_base.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_base.py new file mode 100644 index 0000000..3ac9da0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_base.py @@ -0,0 +1,347 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class NotificationEndpointBase(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'org_id': 'str', + 'user_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'description': 'str', + 'name': 'str', + 'status': 'str', + 'labels': 'list[Label]', + 'links': 'NotificationEndpointBaseLinks', + 'type': 'NotificationEndpointType' + } + + attribute_map = { + 'id': 'id', + 'org_id': 'orgID', + 'user_id': 'userID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'description': 'description', + 'name': 'name', + 'status': 'status', + 'labels': 'labels', + 'links': 'links', + 'type': 'type' + } + + def __init__(self, id=None, org_id=None, user_id=None, created_at=None, updated_at=None, description=None, name=None, status='active', labels=None, links=None, type=None): # noqa: E501,D401,D403 + """NotificationEndpointBase - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._org_id = None + self._user_id = None + self._created_at = None + self._updated_at = None + self._description = None + self._name = None + self._status = None + self._labels = None + self._links = None + self._type = None + self.discriminator = None + + if id is not None: + self.id = id + if org_id is not None: + self.org_id = org_id + if user_id is not None: + self.user_id = user_id + if created_at is not None: + self.created_at = created_at + if updated_at is not None: + self.updated_at = updated_at + if description is not None: + self.description = description + self.name = name + if status is not None: + self.status = status + if labels is not None: + self.labels = labels + if links is not None: + self.links = links + self.type = type + + @property + def id(self): + """Get the id of this NotificationEndpointBase. + + :return: The id of this NotificationEndpointBase. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this NotificationEndpointBase. + + :param id: The id of this NotificationEndpointBase. + :type: str + """ # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this NotificationEndpointBase. + + :return: The org_id of this NotificationEndpointBase. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this NotificationEndpointBase. + + :param org_id: The org_id of this NotificationEndpointBase. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def user_id(self): + """Get the user_id of this NotificationEndpointBase. + + :return: The user_id of this NotificationEndpointBase. + :rtype: str + """ # noqa: E501 + return self._user_id + + @user_id.setter + def user_id(self, user_id): + """Set the user_id of this NotificationEndpointBase. + + :param user_id: The user_id of this NotificationEndpointBase. + :type: str + """ # noqa: E501 + self._user_id = user_id + + @property + def created_at(self): + """Get the created_at of this NotificationEndpointBase. + + :return: The created_at of this NotificationEndpointBase. + :rtype: datetime + """ # noqa: E501 + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Set the created_at of this NotificationEndpointBase. + + :param created_at: The created_at of this NotificationEndpointBase. + :type: datetime + """ # noqa: E501 + self._created_at = created_at + + @property + def updated_at(self): + """Get the updated_at of this NotificationEndpointBase. + + :return: The updated_at of this NotificationEndpointBase. + :rtype: datetime + """ # noqa: E501 + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Set the updated_at of this NotificationEndpointBase. + + :param updated_at: The updated_at of this NotificationEndpointBase. + :type: datetime + """ # noqa: E501 + self._updated_at = updated_at + + @property + def description(self): + """Get the description of this NotificationEndpointBase. + + An optional description of the notification endpoint. + + :return: The description of this NotificationEndpointBase. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this NotificationEndpointBase. + + An optional description of the notification endpoint. + + :param description: The description of this NotificationEndpointBase. + :type: str + """ # noqa: E501 + self._description = description + + @property + def name(self): + """Get the name of this NotificationEndpointBase. + + :return: The name of this NotificationEndpointBase. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this NotificationEndpointBase. + + :param name: The name of this NotificationEndpointBase. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def status(self): + """Get the status of this NotificationEndpointBase. + + The status of the endpoint. + + :return: The status of this NotificationEndpointBase. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this NotificationEndpointBase. + + The status of the endpoint. + + :param status: The status of this NotificationEndpointBase. + :type: str + """ # noqa: E501 + self._status = status + + @property + def labels(self): + """Get the labels of this NotificationEndpointBase. + + :return: The labels of this NotificationEndpointBase. + :rtype: list[Label] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this NotificationEndpointBase. + + :param labels: The labels of this NotificationEndpointBase. + :type: list[Label] + """ # noqa: E501 + self._labels = labels + + @property + def links(self): + """Get the links of this NotificationEndpointBase. + + :return: The links of this NotificationEndpointBase. + :rtype: NotificationEndpointBaseLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this NotificationEndpointBase. + + :param links: The links of this NotificationEndpointBase. + :type: NotificationEndpointBaseLinks + """ # noqa: E501 + self._links = links + + @property + def type(self): + """Get the type of this NotificationEndpointBase. + + :return: The type of this NotificationEndpointBase. + :rtype: NotificationEndpointType + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this NotificationEndpointBase. + + :param type: The type of this NotificationEndpointBase. + :type: NotificationEndpointType + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationEndpointBase): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_base_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_base_links.py new file mode 100644 index 0000000..4ff93cc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_base_links.py @@ -0,0 +1,192 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class NotificationEndpointBaseLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str', + 'labels': 'str', + 'members': 'str', + 'owners': 'str' + } + + attribute_map = { + '_self': 'self', + 'labels': 'labels', + 'members': 'members', + 'owners': 'owners' + } + + def __init__(self, _self=None, labels=None, members=None, owners=None): # noqa: E501,D401,D403 + """NotificationEndpointBaseLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self._labels = None + self._members = None + self._owners = None + self.discriminator = None + + if _self is not None: + self._self = _self + if labels is not None: + self.labels = labels + if members is not None: + self.members = members + if owners is not None: + self.owners = owners + + @property + def _self(self): + """Get the _self of this NotificationEndpointBaseLinks. + + URI of resource. + + :return: The _self of this NotificationEndpointBaseLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this NotificationEndpointBaseLinks. + + URI of resource. + + :param _self: The _self of this NotificationEndpointBaseLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + @property + def labels(self): + """Get the labels of this NotificationEndpointBaseLinks. + + URI of resource. + + :return: The labels of this NotificationEndpointBaseLinks. + :rtype: str + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this NotificationEndpointBaseLinks. + + URI of resource. + + :param labels: The labels of this NotificationEndpointBaseLinks. + :type: str + """ # noqa: E501 + self._labels = labels + + @property + def members(self): + """Get the members of this NotificationEndpointBaseLinks. + + URI of resource. + + :return: The members of this NotificationEndpointBaseLinks. + :rtype: str + """ # noqa: E501 + return self._members + + @members.setter + def members(self, members): + """Set the members of this NotificationEndpointBaseLinks. + + URI of resource. + + :param members: The members of this NotificationEndpointBaseLinks. + :type: str + """ # noqa: E501 + self._members = members + + @property + def owners(self): + """Get the owners of this NotificationEndpointBaseLinks. + + URI of resource. + + :return: The owners of this NotificationEndpointBaseLinks. + :rtype: str + """ # noqa: E501 + return self._owners + + @owners.setter + def owners(self, owners): + """Set the owners of this NotificationEndpointBaseLinks. + + URI of resource. + + :param owners: The owners of this NotificationEndpointBaseLinks. + :type: str + """ # noqa: E501 + self._owners = owners + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationEndpointBaseLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_discriminator.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_discriminator.py new file mode 100644 index 0000000..b285c12 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_discriminator.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.notification_endpoint_base import NotificationEndpointBase + + +class NotificationEndpointDiscriminator(NotificationEndpointBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'org_id': 'str', + 'user_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'description': 'str', + 'name': 'str', + 'status': 'str', + 'labels': 'list[Label]', + 'links': 'NotificationEndpointBaseLinks', + 'type': 'NotificationEndpointType' + } + + attribute_map = { + 'id': 'id', + 'org_id': 'orgID', + 'user_id': 'userID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'description': 'description', + 'name': 'name', + 'status': 'status', + 'labels': 'labels', + 'links': 'links', + 'type': 'type' + } + + def __init__(self, id=None, org_id=None, user_id=None, created_at=None, updated_at=None, description=None, name=None, status='active', labels=None, links=None, type=None): # noqa: E501,D401,D403 + """NotificationEndpointDiscriminator - a model defined in OpenAPI.""" # noqa: E501 + NotificationEndpointBase.__init__(self, id=id, org_id=org_id, user_id=user_id, created_at=created_at, updated_at=updated_at, description=description, name=name, status=status, labels=labels, links=links, type=type) # noqa: E501 + self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationEndpointDiscriminator): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_type.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_type.py new file mode 100644 index 0000000..50ae338 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_type.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class NotificationEndpointType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + SLACK = "slack" + PAGERDUTY = "pagerduty" + HTTP = "http" + TELEGRAM = "telegram" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """NotificationEndpointType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationEndpointType): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_update.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_update.py new file mode 100644 index 0000000..4e63b1f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoint_update.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class NotificationEndpointUpdate(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'status': 'str' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'status': 'status' + } + + def __init__(self, name=None, description=None, status=None): # noqa: E501,D401,D403 + """NotificationEndpointUpdate - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._status = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if status is not None: + self.status = status + + @property + def name(self): + """Get the name of this NotificationEndpointUpdate. + + :return: The name of this NotificationEndpointUpdate. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this NotificationEndpointUpdate. + + :param name: The name of this NotificationEndpointUpdate. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this NotificationEndpointUpdate. + + :return: The description of this NotificationEndpointUpdate. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this NotificationEndpointUpdate. + + :param description: The description of this NotificationEndpointUpdate. + :type: str + """ # noqa: E501 + self._description = description + + @property + def status(self): + """Get the status of this NotificationEndpointUpdate. + + :return: The status of this NotificationEndpointUpdate. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this NotificationEndpointUpdate. + + :param status: The status of this NotificationEndpointUpdate. + :type: str + """ # noqa: E501 + self._status = status + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationEndpointUpdate): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoints.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoints.py new file mode 100644 index 0000000..55c2ec9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_endpoints.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class NotificationEndpoints(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'notification_endpoints': 'list[NotificationEndpoint]', + 'links': 'Links' + } + + attribute_map = { + 'notification_endpoints': 'notificationEndpoints', + 'links': 'links' + } + + def __init__(self, notification_endpoints=None, links=None): # noqa: E501,D401,D403 + """NotificationEndpoints - a model defined in OpenAPI.""" # noqa: E501 + self._notification_endpoints = None + self._links = None + self.discriminator = None + + if notification_endpoints is not None: + self.notification_endpoints = notification_endpoints + if links is not None: + self.links = links + + @property + def notification_endpoints(self): + """Get the notification_endpoints of this NotificationEndpoints. + + :return: The notification_endpoints of this NotificationEndpoints. + :rtype: list[NotificationEndpoint] + """ # noqa: E501 + return self._notification_endpoints + + @notification_endpoints.setter + def notification_endpoints(self, notification_endpoints): + """Set the notification_endpoints of this NotificationEndpoints. + + :param notification_endpoints: The notification_endpoints of this NotificationEndpoints. + :type: list[NotificationEndpoint] + """ # noqa: E501 + self._notification_endpoints = notification_endpoints + + @property + def links(self): + """Get the links of this NotificationEndpoints. + + :return: The links of this NotificationEndpoints. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this NotificationEndpoints. + + :param links: The links of this NotificationEndpoints. + :type: Links + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationEndpoints): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule.py new file mode 100644 index 0000000..b22e53b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule.py @@ -0,0 +1,126 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class NotificationRule(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str' + } + + attribute_map = { + 'type': 'type' + } + + discriminator_value_class_map = { + 'slack': 'SlackNotificationRule', + 'pagerduty': 'PagerDutyNotificationRule', + 'smtp': 'SMTPNotificationRule', + 'http': 'HTTPNotificationRule', + 'telegram': 'TelegramNotificationRule' + } + + def __init__(self, type=None): # noqa: E501,D401,D403 + """NotificationRule - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self.discriminator = 'type' + + self.type = type + + @property + def type(self): + """Get the type of this NotificationRule. + + The discriminator between other types of notification rules is "telegram". + + :return: The type of this NotificationRule. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this NotificationRule. + + The discriminator between other types of notification rules is "telegram". + + :param type: The type of this NotificationRule. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + def get_real_child_model(self, data): + """Return the real base class specified by the discriminator.""" + discriminator_key = self.attribute_map[self.discriminator] + discriminator_value = data[discriminator_key] + return self.discriminator_value_class_map.get(discriminator_value) + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationRule): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_base.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_base.py new file mode 100644 index 0000000..6f3de00 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_base.py @@ -0,0 +1,666 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class NotificationRuleBase(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """NotificationRuleBase - a model defined in OpenAPI.""" # noqa: E501 + self._latest_completed = None + self._last_run_status = None + self._last_run_error = None + self._id = None + self._endpoint_id = None + self._org_id = None + self._task_id = None + self._owner_id = None + self._created_at = None + self._updated_at = None + self._status = None + self._name = None + self._sleep_until = None + self._every = None + self._offset = None + self._runbook_link = None + self._limit_every = None + self._limit = None + self._tag_rules = None + self._description = None + self._status_rules = None + self._labels = None + self._links = None + self.discriminator = None + + if latest_completed is not None: + self.latest_completed = latest_completed + if last_run_status is not None: + self.last_run_status = last_run_status + if last_run_error is not None: + self.last_run_error = last_run_error + if id is not None: + self.id = id + self.endpoint_id = endpoint_id + self.org_id = org_id + if task_id is not None: + self.task_id = task_id + if owner_id is not None: + self.owner_id = owner_id + if created_at is not None: + self.created_at = created_at + if updated_at is not None: + self.updated_at = updated_at + self.status = status + self.name = name + if sleep_until is not None: + self.sleep_until = sleep_until + if every is not None: + self.every = every + if offset is not None: + self.offset = offset + if runbook_link is not None: + self.runbook_link = runbook_link + if limit_every is not None: + self.limit_every = limit_every + if limit is not None: + self.limit = limit + if tag_rules is not None: + self.tag_rules = tag_rules + if description is not None: + self.description = description + self.status_rules = status_rules + if labels is not None: + self.labels = labels + if links is not None: + self.links = links + + @property + def latest_completed(self): + """Get the latest_completed of this NotificationRuleBase. + + A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)) of the latest scheduled and completed run. + + :return: The latest_completed of this NotificationRuleBase. + :rtype: datetime + """ # noqa: E501 + return self._latest_completed + + @latest_completed.setter + def latest_completed(self, latest_completed): + """Set the latest_completed of this NotificationRuleBase. + + A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)) of the latest scheduled and completed run. + + :param latest_completed: The latest_completed of this NotificationRuleBase. + :type: datetime + """ # noqa: E501 + self._latest_completed = latest_completed + + @property + def last_run_status(self): + """Get the last_run_status of this NotificationRuleBase. + + :return: The last_run_status of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._last_run_status + + @last_run_status.setter + def last_run_status(self, last_run_status): + """Set the last_run_status of this NotificationRuleBase. + + :param last_run_status: The last_run_status of this NotificationRuleBase. + :type: str + """ # noqa: E501 + self._last_run_status = last_run_status + + @property + def last_run_error(self): + """Get the last_run_error of this NotificationRuleBase. + + :return: The last_run_error of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._last_run_error + + @last_run_error.setter + def last_run_error(self, last_run_error): + """Set the last_run_error of this NotificationRuleBase. + + :param last_run_error: The last_run_error of this NotificationRuleBase. + :type: str + """ # noqa: E501 + self._last_run_error = last_run_error + + @property + def id(self): + """Get the id of this NotificationRuleBase. + + :return: The id of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this NotificationRuleBase. + + :param id: The id of this NotificationRuleBase. + :type: str + """ # noqa: E501 + self._id = id + + @property + def endpoint_id(self): + """Get the endpoint_id of this NotificationRuleBase. + + :return: The endpoint_id of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._endpoint_id + + @endpoint_id.setter + def endpoint_id(self, endpoint_id): + """Set the endpoint_id of this NotificationRuleBase. + + :param endpoint_id: The endpoint_id of this NotificationRuleBase. + :type: str + """ # noqa: E501 + if endpoint_id is None: + raise ValueError("Invalid value for `endpoint_id`, must not be `None`") # noqa: E501 + self._endpoint_id = endpoint_id + + @property + def org_id(self): + """Get the org_id of this NotificationRuleBase. + + The ID of the organization that owns this notification rule. + + :return: The org_id of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this NotificationRuleBase. + + The ID of the organization that owns this notification rule. + + :param org_id: The org_id of this NotificationRuleBase. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def task_id(self): + """Get the task_id of this NotificationRuleBase. + + The ID of the task associated with this notification rule. + + :return: The task_id of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._task_id + + @task_id.setter + def task_id(self, task_id): + """Set the task_id of this NotificationRuleBase. + + The ID of the task associated with this notification rule. + + :param task_id: The task_id of this NotificationRuleBase. + :type: str + """ # noqa: E501 + self._task_id = task_id + + @property + def owner_id(self): + """Get the owner_id of this NotificationRuleBase. + + The ID of creator used to create this notification rule. + + :return: The owner_id of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._owner_id + + @owner_id.setter + def owner_id(self, owner_id): + """Set the owner_id of this NotificationRuleBase. + + The ID of creator used to create this notification rule. + + :param owner_id: The owner_id of this NotificationRuleBase. + :type: str + """ # noqa: E501 + self._owner_id = owner_id + + @property + def created_at(self): + """Get the created_at of this NotificationRuleBase. + + :return: The created_at of this NotificationRuleBase. + :rtype: datetime + """ # noqa: E501 + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Set the created_at of this NotificationRuleBase. + + :param created_at: The created_at of this NotificationRuleBase. + :type: datetime + """ # noqa: E501 + self._created_at = created_at + + @property + def updated_at(self): + """Get the updated_at of this NotificationRuleBase. + + :return: The updated_at of this NotificationRuleBase. + :rtype: datetime + """ # noqa: E501 + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Set the updated_at of this NotificationRuleBase. + + :param updated_at: The updated_at of this NotificationRuleBase. + :type: datetime + """ # noqa: E501 + self._updated_at = updated_at + + @property + def status(self): + """Get the status of this NotificationRuleBase. + + :return: The status of this NotificationRuleBase. + :rtype: TaskStatusType + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this NotificationRuleBase. + + :param status: The status of this NotificationRuleBase. + :type: TaskStatusType + """ # noqa: E501 + if status is None: + raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501 + self._status = status + + @property + def name(self): + """Get the name of this NotificationRuleBase. + + Human-readable name describing the notification rule. + + :return: The name of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this NotificationRuleBase. + + Human-readable name describing the notification rule. + + :param name: The name of this NotificationRuleBase. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def sleep_until(self): + """Get the sleep_until of this NotificationRuleBase. + + :return: The sleep_until of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._sleep_until + + @sleep_until.setter + def sleep_until(self, sleep_until): + """Set the sleep_until of this NotificationRuleBase. + + :param sleep_until: The sleep_until of this NotificationRuleBase. + :type: str + """ # noqa: E501 + self._sleep_until = sleep_until + + @property + def every(self): + """Get the every of this NotificationRuleBase. + + The notification repetition interval. + + :return: The every of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._every + + @every.setter + def every(self, every): + """Set the every of this NotificationRuleBase. + + The notification repetition interval. + + :param every: The every of this NotificationRuleBase. + :type: str + """ # noqa: E501 + self._every = every + + @property + def offset(self): + """Get the offset of this NotificationRuleBase. + + Duration to delay after the schedule, before executing check. + + :return: The offset of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._offset + + @offset.setter + def offset(self, offset): + """Set the offset of this NotificationRuleBase. + + Duration to delay after the schedule, before executing check. + + :param offset: The offset of this NotificationRuleBase. + :type: str + """ # noqa: E501 + self._offset = offset + + @property + def runbook_link(self): + """Get the runbook_link of this NotificationRuleBase. + + :return: The runbook_link of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._runbook_link + + @runbook_link.setter + def runbook_link(self, runbook_link): + """Set the runbook_link of this NotificationRuleBase. + + :param runbook_link: The runbook_link of this NotificationRuleBase. + :type: str + """ # noqa: E501 + self._runbook_link = runbook_link + + @property + def limit_every(self): + """Get the limit_every of this NotificationRuleBase. + + Don't notify me more than times every seconds. If set, limit cannot be empty. + + :return: The limit_every of this NotificationRuleBase. + :rtype: int + """ # noqa: E501 + return self._limit_every + + @limit_every.setter + def limit_every(self, limit_every): + """Set the limit_every of this NotificationRuleBase. + + Don't notify me more than times every seconds. If set, limit cannot be empty. + + :param limit_every: The limit_every of this NotificationRuleBase. + :type: int + """ # noqa: E501 + self._limit_every = limit_every + + @property + def limit(self): + """Get the limit of this NotificationRuleBase. + + Don't notify me more than times every seconds. If set, limitEvery cannot be empty. + + :return: The limit of this NotificationRuleBase. + :rtype: int + """ # noqa: E501 + return self._limit + + @limit.setter + def limit(self, limit): + """Set the limit of this NotificationRuleBase. + + Don't notify me more than times every seconds. If set, limitEvery cannot be empty. + + :param limit: The limit of this NotificationRuleBase. + :type: int + """ # noqa: E501 + self._limit = limit + + @property + def tag_rules(self): + """Get the tag_rules of this NotificationRuleBase. + + List of tag rules the notification rule attempts to match. + + :return: The tag_rules of this NotificationRuleBase. + :rtype: list[TagRule] + """ # noqa: E501 + return self._tag_rules + + @tag_rules.setter + def tag_rules(self, tag_rules): + """Set the tag_rules of this NotificationRuleBase. + + List of tag rules the notification rule attempts to match. + + :param tag_rules: The tag_rules of this NotificationRuleBase. + :type: list[TagRule] + """ # noqa: E501 + self._tag_rules = tag_rules + + @property + def description(self): + """Get the description of this NotificationRuleBase. + + An optional description of the notification rule. + + :return: The description of this NotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this NotificationRuleBase. + + An optional description of the notification rule. + + :param description: The description of this NotificationRuleBase. + :type: str + """ # noqa: E501 + self._description = description + + @property + def status_rules(self): + """Get the status_rules of this NotificationRuleBase. + + List of status rules the notification rule attempts to match. + + :return: The status_rules of this NotificationRuleBase. + :rtype: list[StatusRule] + """ # noqa: E501 + return self._status_rules + + @status_rules.setter + def status_rules(self, status_rules): + """Set the status_rules of this NotificationRuleBase. + + List of status rules the notification rule attempts to match. + + :param status_rules: The status_rules of this NotificationRuleBase. + :type: list[StatusRule] + """ # noqa: E501 + if status_rules is None: + raise ValueError("Invalid value for `status_rules`, must not be `None`") # noqa: E501 + self._status_rules = status_rules + + @property + def labels(self): + """Get the labels of this NotificationRuleBase. + + :return: The labels of this NotificationRuleBase. + :rtype: list[Label] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this NotificationRuleBase. + + :param labels: The labels of this NotificationRuleBase. + :type: list[Label] + """ # noqa: E501 + self._labels = labels + + @property + def links(self): + """Get the links of this NotificationRuleBase. + + :return: The links of this NotificationRuleBase. + :rtype: NotificationRuleBaseLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this NotificationRuleBase. + + :param links: The links of this NotificationRuleBase. + :type: NotificationRuleBaseLinks + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationRuleBase): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_base_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_base_links.py new file mode 100644 index 0000000..2f6e1c2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_base_links.py @@ -0,0 +1,219 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class NotificationRuleBaseLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str', + 'labels': 'str', + 'members': 'str', + 'owners': 'str', + 'query': 'str' + } + + attribute_map = { + '_self': 'self', + 'labels': 'labels', + 'members': 'members', + 'owners': 'owners', + 'query': 'query' + } + + def __init__(self, _self=None, labels=None, members=None, owners=None, query=None): # noqa: E501,D401,D403 + """NotificationRuleBaseLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self._labels = None + self._members = None + self._owners = None + self._query = None + self.discriminator = None + + if _self is not None: + self._self = _self + if labels is not None: + self.labels = labels + if members is not None: + self.members = members + if owners is not None: + self.owners = owners + if query is not None: + self.query = query + + @property + def _self(self): + """Get the _self of this NotificationRuleBaseLinks. + + URI of resource. + + :return: The _self of this NotificationRuleBaseLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this NotificationRuleBaseLinks. + + URI of resource. + + :param _self: The _self of this NotificationRuleBaseLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + @property + def labels(self): + """Get the labels of this NotificationRuleBaseLinks. + + URI of resource. + + :return: The labels of this NotificationRuleBaseLinks. + :rtype: str + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this NotificationRuleBaseLinks. + + URI of resource. + + :param labels: The labels of this NotificationRuleBaseLinks. + :type: str + """ # noqa: E501 + self._labels = labels + + @property + def members(self): + """Get the members of this NotificationRuleBaseLinks. + + URI of resource. + + :return: The members of this NotificationRuleBaseLinks. + :rtype: str + """ # noqa: E501 + return self._members + + @members.setter + def members(self, members): + """Set the members of this NotificationRuleBaseLinks. + + URI of resource. + + :param members: The members of this NotificationRuleBaseLinks. + :type: str + """ # noqa: E501 + self._members = members + + @property + def owners(self): + """Get the owners of this NotificationRuleBaseLinks. + + URI of resource. + + :return: The owners of this NotificationRuleBaseLinks. + :rtype: str + """ # noqa: E501 + return self._owners + + @owners.setter + def owners(self, owners): + """Set the owners of this NotificationRuleBaseLinks. + + URI of resource. + + :param owners: The owners of this NotificationRuleBaseLinks. + :type: str + """ # noqa: E501 + self._owners = owners + + @property + def query(self): + """Get the query of this NotificationRuleBaseLinks. + + URI of resource. + + :return: The query of this NotificationRuleBaseLinks. + :rtype: str + """ # noqa: E501 + return self._query + + @query.setter + def query(self, query): + """Set the query of this NotificationRuleBaseLinks. + + URI of resource. + + :param query: The query of this NotificationRuleBaseLinks. + :type: str + """ # noqa: E501 + self._query = query + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationRuleBaseLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_discriminator.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_discriminator.py new file mode 100644 index 0000000..a677fa5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_discriminator.py @@ -0,0 +1,132 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.notification_rule_base import NotificationRuleBase + + +class NotificationRuleDiscriminator(NotificationRuleBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """NotificationRuleDiscriminator - a model defined in OpenAPI.""" # noqa: E501 + NotificationRuleBase.__init__(self, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, id=id, endpoint_id=endpoint_id, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, status=status, name=name, sleep_until=sleep_until, every=every, offset=offset, runbook_link=runbook_link, limit_every=limit_every, limit=limit, tag_rules=tag_rules, description=description, status_rules=status_rules, labels=labels, links=links) # noqa: E501 + self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationRuleDiscriminator): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_update.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_update.py new file mode 100644 index 0000000..a8e9fa1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rule_update.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class NotificationRuleUpdate(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'status': 'str' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'status': 'status' + } + + def __init__(self, name=None, description=None, status=None): # noqa: E501,D401,D403 + """NotificationRuleUpdate - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._status = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if status is not None: + self.status = status + + @property + def name(self): + """Get the name of this NotificationRuleUpdate. + + :return: The name of this NotificationRuleUpdate. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this NotificationRuleUpdate. + + :param name: The name of this NotificationRuleUpdate. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this NotificationRuleUpdate. + + :return: The description of this NotificationRuleUpdate. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this NotificationRuleUpdate. + + :param description: The description of this NotificationRuleUpdate. + :type: str + """ # noqa: E501 + self._description = description + + @property + def status(self): + """Get the status of this NotificationRuleUpdate. + + :return: The status of this NotificationRuleUpdate. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this NotificationRuleUpdate. + + :param status: The status of this NotificationRuleUpdate. + :type: str + """ # noqa: E501 + self._status = status + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationRuleUpdate): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rules.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rules.py new file mode 100644 index 0000000..08eaf6d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/notification_rules.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class NotificationRules(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'notification_rules': 'list[NotificationRule]', + 'links': 'Links' + } + + attribute_map = { + 'notification_rules': 'notificationRules', + 'links': 'links' + } + + def __init__(self, notification_rules=None, links=None): # noqa: E501,D401,D403 + """NotificationRules - a model defined in OpenAPI.""" # noqa: E501 + self._notification_rules = None + self._links = None + self.discriminator = None + + if notification_rules is not None: + self.notification_rules = notification_rules + if links is not None: + self.links = links + + @property + def notification_rules(self): + """Get the notification_rules of this NotificationRules. + + :return: The notification_rules of this NotificationRules. + :rtype: list[NotificationRule] + """ # noqa: E501 + return self._notification_rules + + @notification_rules.setter + def notification_rules(self, notification_rules): + """Set the notification_rules of this NotificationRules. + + :param notification_rules: The notification_rules of this NotificationRules. + :type: list[NotificationRule] + """ # noqa: E501 + self._notification_rules = notification_rules + + @property + def links(self): + """Get the links of this NotificationRules. + + :return: The links of this NotificationRules. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this NotificationRules. + + :param links: The links of this NotificationRules. + :type: Links + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, NotificationRules): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/object_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/object_expression.py new file mode 100644 index 0000000..05ca2a0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/object_expression.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class ObjectExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'properties': 'list[ModelProperty]' + } + + attribute_map = { + 'type': 'type', + 'properties': 'properties' + } + + def __init__(self, type=None, properties=None): # noqa: E501,D401,D403 + """ObjectExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._properties = None + self.discriminator = None + + if type is not None: + self.type = type + if properties is not None: + self.properties = properties + + @property + def type(self): + """Get the type of this ObjectExpression. + + Type of AST node + + :return: The type of this ObjectExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ObjectExpression. + + Type of AST node + + :param type: The type of this ObjectExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def properties(self): + """Get the properties of this ObjectExpression. + + Object properties + + :return: The properties of this ObjectExpression. + :rtype: list[ModelProperty] + """ # noqa: E501 + return self._properties + + @properties.setter + def properties(self, properties): + """Set the properties of this ObjectExpression. + + Object properties + + :param properties: The properties of this ObjectExpression. + :type: list[ModelProperty] + """ # noqa: E501 + self._properties = properties + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ObjectExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/onboarding_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/onboarding_request.py new file mode 100644 index 0000000..7ca4bf0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/onboarding_request.py @@ -0,0 +1,256 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class OnboardingRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'username': 'str', + 'password': 'str', + 'org': 'str', + 'bucket': 'str', + 'retention_period_seconds': 'int', + 'retention_period_hrs': 'int', + 'token': 'str' + } + + attribute_map = { + 'username': 'username', + 'password': 'password', + 'org': 'org', + 'bucket': 'bucket', + 'retention_period_seconds': 'retentionPeriodSeconds', + 'retention_period_hrs': 'retentionPeriodHrs', + 'token': 'token' + } + + def __init__(self, username=None, password=None, org=None, bucket=None, retention_period_seconds=None, retention_period_hrs=None, token=None): # noqa: E501,D401,D403 + """OnboardingRequest - a model defined in OpenAPI.""" # noqa: E501 + self._username = None + self._password = None + self._org = None + self._bucket = None + self._retention_period_seconds = None + self._retention_period_hrs = None + self._token = None + self.discriminator = None + + self.username = username + if password is not None: + self.password = password + self.org = org + self.bucket = bucket + if retention_period_seconds is not None: + self.retention_period_seconds = retention_period_seconds + if retention_period_hrs is not None: + self.retention_period_hrs = retention_period_hrs + if token is not None: + self.token = token + + @property + def username(self): + """Get the username of this OnboardingRequest. + + :return: The username of this OnboardingRequest. + :rtype: str + """ # noqa: E501 + return self._username + + @username.setter + def username(self, username): + """Set the username of this OnboardingRequest. + + :param username: The username of this OnboardingRequest. + :type: str + """ # noqa: E501 + if username is None: + raise ValueError("Invalid value for `username`, must not be `None`") # noqa: E501 + self._username = username + + @property + def password(self): + """Get the password of this OnboardingRequest. + + :return: The password of this OnboardingRequest. + :rtype: str + """ # noqa: E501 + return self._password + + @password.setter + def password(self, password): + """Set the password of this OnboardingRequest. + + :param password: The password of this OnboardingRequest. + :type: str + """ # noqa: E501 + self._password = password + + @property + def org(self): + """Get the org of this OnboardingRequest. + + :return: The org of this OnboardingRequest. + :rtype: str + """ # noqa: E501 + return self._org + + @org.setter + def org(self, org): + """Set the org of this OnboardingRequest. + + :param org: The org of this OnboardingRequest. + :type: str + """ # noqa: E501 + if org is None: + raise ValueError("Invalid value for `org`, must not be `None`") # noqa: E501 + self._org = org + + @property + def bucket(self): + """Get the bucket of this OnboardingRequest. + + :return: The bucket of this OnboardingRequest. + :rtype: str + """ # noqa: E501 + return self._bucket + + @bucket.setter + def bucket(self, bucket): + """Set the bucket of this OnboardingRequest. + + :param bucket: The bucket of this OnboardingRequest. + :type: str + """ # noqa: E501 + if bucket is None: + raise ValueError("Invalid value for `bucket`, must not be `None`") # noqa: E501 + self._bucket = bucket + + @property + def retention_period_seconds(self): + """Get the retention_period_seconds of this OnboardingRequest. + + :return: The retention_period_seconds of this OnboardingRequest. + :rtype: int + """ # noqa: E501 + return self._retention_period_seconds + + @retention_period_seconds.setter + def retention_period_seconds(self, retention_period_seconds): + """Set the retention_period_seconds of this OnboardingRequest. + + :param retention_period_seconds: The retention_period_seconds of this OnboardingRequest. + :type: int + """ # noqa: E501 + self._retention_period_seconds = retention_period_seconds + + @property + def retention_period_hrs(self): + """Get the retention_period_hrs of this OnboardingRequest. + + Retention period *in nanoseconds* for the new bucket. This key's name has been misleading since OSS 2.0 GA, please transition to use `retentionPeriodSeconds` + + :return: The retention_period_hrs of this OnboardingRequest. + :rtype: int + """ # noqa: E501 + return self._retention_period_hrs + + @retention_period_hrs.setter + def retention_period_hrs(self, retention_period_hrs): + """Set the retention_period_hrs of this OnboardingRequest. + + Retention period *in nanoseconds* for the new bucket. This key's name has been misleading since OSS 2.0 GA, please transition to use `retentionPeriodSeconds` + + :param retention_period_hrs: The retention_period_hrs of this OnboardingRequest. + :type: int + """ # noqa: E501 + self._retention_period_hrs = retention_period_hrs + + @property + def token(self): + """Get the token of this OnboardingRequest. + + Authentication token to set on the initial user. If not specified, the server will generate a token. + + :return: The token of this OnboardingRequest. + :rtype: str + """ # noqa: E501 + return self._token + + @token.setter + def token(self, token): + """Set the token of this OnboardingRequest. + + Authentication token to set on the initial user. If not specified, the server will generate a token. + + :param token: The token of this OnboardingRequest. + :type: str + """ # noqa: E501 + self._token = token + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, OnboardingRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/onboarding_response.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/onboarding_response.py new file mode 100644 index 0000000..62db52f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/onboarding_response.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class OnboardingResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'user': 'UserResponse', + 'org': 'Organization', + 'bucket': 'Bucket', + 'auth': 'Authorization' + } + + attribute_map = { + 'user': 'user', + 'org': 'org', + 'bucket': 'bucket', + 'auth': 'auth' + } + + def __init__(self, user=None, org=None, bucket=None, auth=None): # noqa: E501,D401,D403 + """OnboardingResponse - a model defined in OpenAPI.""" # noqa: E501 + self._user = None + self._org = None + self._bucket = None + self._auth = None + self.discriminator = None + + if user is not None: + self.user = user + if org is not None: + self.org = org + if bucket is not None: + self.bucket = bucket + if auth is not None: + self.auth = auth + + @property + def user(self): + """Get the user of this OnboardingResponse. + + :return: The user of this OnboardingResponse. + :rtype: UserResponse + """ # noqa: E501 + return self._user + + @user.setter + def user(self, user): + """Set the user of this OnboardingResponse. + + :param user: The user of this OnboardingResponse. + :type: UserResponse + """ # noqa: E501 + self._user = user + + @property + def org(self): + """Get the org of this OnboardingResponse. + + :return: The org of this OnboardingResponse. + :rtype: Organization + """ # noqa: E501 + return self._org + + @org.setter + def org(self, org): + """Set the org of this OnboardingResponse. + + :param org: The org of this OnboardingResponse. + :type: Organization + """ # noqa: E501 + self._org = org + + @property + def bucket(self): + """Get the bucket of this OnboardingResponse. + + :return: The bucket of this OnboardingResponse. + :rtype: Bucket + """ # noqa: E501 + return self._bucket + + @bucket.setter + def bucket(self, bucket): + """Set the bucket of this OnboardingResponse. + + :param bucket: The bucket of this OnboardingResponse. + :type: Bucket + """ # noqa: E501 + self._bucket = bucket + + @property + def auth(self): + """Get the auth of this OnboardingResponse. + + :return: The auth of this OnboardingResponse. + :rtype: Authorization + """ # noqa: E501 + return self._auth + + @auth.setter + def auth(self, auth): + """Set the auth of this OnboardingResponse. + + :param auth: The auth of this OnboardingResponse. + :type: Authorization + """ # noqa: E501 + self._auth = auth + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, OnboardingResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/option_statement.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/option_statement.py new file mode 100644 index 0000000..4ab1569 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/option_statement.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.statement import Statement + + +class OptionStatement(Statement): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'assignment': 'object' + } + + attribute_map = { + 'type': 'type', + 'assignment': 'assignment' + } + + def __init__(self, type=None, assignment=None): # noqa: E501,D401,D403 + """OptionStatement - a model defined in OpenAPI.""" # noqa: E501 + Statement.__init__(self) # noqa: E501 + + self._type = None + self._assignment = None + self.discriminator = None + + if type is not None: + self.type = type + if assignment is not None: + self.assignment = assignment + + @property + def type(self): + """Get the type of this OptionStatement. + + Type of AST node + + :return: The type of this OptionStatement. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this OptionStatement. + + Type of AST node + + :param type: The type of this OptionStatement. + :type: str + """ # noqa: E501 + self._type = type + + @property + def assignment(self): + """Get the assignment of this OptionStatement. + + :return: The assignment of this OptionStatement. + :rtype: object + """ # noqa: E501 + return self._assignment + + @assignment.setter + def assignment(self, assignment): + """Set the assignment of this OptionStatement. + + :param assignment: The assignment of this OptionStatement. + :type: object + """ # noqa: E501 + self._assignment = assignment + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, OptionStatement): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/organization.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/organization.py new file mode 100644 index 0000000..4071964 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/organization.py @@ -0,0 +1,277 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Organization(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'OrganizationLinks', + 'id': 'str', + 'name': 'str', + 'default_storage_type': 'str', + 'description': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'str' + } + + attribute_map = { + 'links': 'links', + 'id': 'id', + 'name': 'name', + 'default_storage_type': 'defaultStorageType', + 'description': 'description', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status' + } + + def __init__(self, links=None, id=None, name=None, default_storage_type=None, description=None, created_at=None, updated_at=None, status='active'): # noqa: E501,D401,D403 + """Organization - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._id = None + self._name = None + self._default_storage_type = None + self._description = None + self._created_at = None + self._updated_at = None + self._status = None + self.discriminator = None + + if links is not None: + self.links = links + if id is not None: + self.id = id + self.name = name + if default_storage_type is not None: + self.default_storage_type = default_storage_type + if description is not None: + self.description = description + if created_at is not None: + self.created_at = created_at + if updated_at is not None: + self.updated_at = updated_at + if status is not None: + self.status = status + + @property + def links(self): + """Get the links of this Organization. + + :return: The links of this Organization. + :rtype: OrganizationLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Organization. + + :param links: The links of this Organization. + :type: OrganizationLinks + """ # noqa: E501 + self._links = links + + @property + def id(self): + """Get the id of this Organization. + + :return: The id of this Organization. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Organization. + + :param id: The id of this Organization. + :type: str + """ # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this Organization. + + :return: The name of this Organization. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this Organization. + + :param name: The name of this Organization. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def default_storage_type(self): + """Get the default_storage_type of this Organization. + + Discloses whether the organization uses TSM or IOx. + + :return: The default_storage_type of this Organization. + :rtype: str + """ # noqa: E501 + return self._default_storage_type + + @default_storage_type.setter + def default_storage_type(self, default_storage_type): + """Set the default_storage_type of this Organization. + + Discloses whether the organization uses TSM or IOx. + + :param default_storage_type: The default_storage_type of this Organization. + :type: str + """ # noqa: E501 + self._default_storage_type = default_storage_type + + @property + def description(self): + """Get the description of this Organization. + + :return: The description of this Organization. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this Organization. + + :param description: The description of this Organization. + :type: str + """ # noqa: E501 + self._description = description + + @property + def created_at(self): + """Get the created_at of this Organization. + + :return: The created_at of this Organization. + :rtype: datetime + """ # noqa: E501 + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Set the created_at of this Organization. + + :param created_at: The created_at of this Organization. + :type: datetime + """ # noqa: E501 + self._created_at = created_at + + @property + def updated_at(self): + """Get the updated_at of this Organization. + + :return: The updated_at of this Organization. + :rtype: datetime + """ # noqa: E501 + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Set the updated_at of this Organization. + + :param updated_at: The updated_at of this Organization. + :type: datetime + """ # noqa: E501 + self._updated_at = updated_at + + @property + def status(self): + """Get the status of this Organization. + + If inactive, the organization is inactive. + + :return: The status of this Organization. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this Organization. + + If inactive, the organization is inactive. + + :param status: The status of this Organization. + :type: str + """ # noqa: E501 + self._status = status + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Organization): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/organization_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/organization_links.py new file mode 100644 index 0000000..a2cca04 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/organization_links.py @@ -0,0 +1,300 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class OrganizationLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str', + 'members': 'str', + 'owners': 'str', + 'labels': 'str', + 'secrets': 'str', + 'buckets': 'str', + 'tasks': 'str', + 'dashboards': 'str' + } + + attribute_map = { + '_self': 'self', + 'members': 'members', + 'owners': 'owners', + 'labels': 'labels', + 'secrets': 'secrets', + 'buckets': 'buckets', + 'tasks': 'tasks', + 'dashboards': 'dashboards' + } + + def __init__(self, _self=None, members=None, owners=None, labels=None, secrets=None, buckets=None, tasks=None, dashboards=None): # noqa: E501,D401,D403 + """OrganizationLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self._members = None + self._owners = None + self._labels = None + self._secrets = None + self._buckets = None + self._tasks = None + self._dashboards = None + self.discriminator = None + + if _self is not None: + self._self = _self + if members is not None: + self.members = members + if owners is not None: + self.owners = owners + if labels is not None: + self.labels = labels + if secrets is not None: + self.secrets = secrets + if buckets is not None: + self.buckets = buckets + if tasks is not None: + self.tasks = tasks + if dashboards is not None: + self.dashboards = dashboards + + @property + def _self(self): + """Get the _self of this OrganizationLinks. + + URI of resource. + + :return: The _self of this OrganizationLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this OrganizationLinks. + + URI of resource. + + :param _self: The _self of this OrganizationLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + @property + def members(self): + """Get the members of this OrganizationLinks. + + URI of resource. + + :return: The members of this OrganizationLinks. + :rtype: str + """ # noqa: E501 + return self._members + + @members.setter + def members(self, members): + """Set the members of this OrganizationLinks. + + URI of resource. + + :param members: The members of this OrganizationLinks. + :type: str + """ # noqa: E501 + self._members = members + + @property + def owners(self): + """Get the owners of this OrganizationLinks. + + URI of resource. + + :return: The owners of this OrganizationLinks. + :rtype: str + """ # noqa: E501 + return self._owners + + @owners.setter + def owners(self, owners): + """Set the owners of this OrganizationLinks. + + URI of resource. + + :param owners: The owners of this OrganizationLinks. + :type: str + """ # noqa: E501 + self._owners = owners + + @property + def labels(self): + """Get the labels of this OrganizationLinks. + + URI of resource. + + :return: The labels of this OrganizationLinks. + :rtype: str + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this OrganizationLinks. + + URI of resource. + + :param labels: The labels of this OrganizationLinks. + :type: str + """ # noqa: E501 + self._labels = labels + + @property + def secrets(self): + """Get the secrets of this OrganizationLinks. + + URI of resource. + + :return: The secrets of this OrganizationLinks. + :rtype: str + """ # noqa: E501 + return self._secrets + + @secrets.setter + def secrets(self, secrets): + """Set the secrets of this OrganizationLinks. + + URI of resource. + + :param secrets: The secrets of this OrganizationLinks. + :type: str + """ # noqa: E501 + self._secrets = secrets + + @property + def buckets(self): + """Get the buckets of this OrganizationLinks. + + URI of resource. + + :return: The buckets of this OrganizationLinks. + :rtype: str + """ # noqa: E501 + return self._buckets + + @buckets.setter + def buckets(self, buckets): + """Set the buckets of this OrganizationLinks. + + URI of resource. + + :param buckets: The buckets of this OrganizationLinks. + :type: str + """ # noqa: E501 + self._buckets = buckets + + @property + def tasks(self): + """Get the tasks of this OrganizationLinks. + + URI of resource. + + :return: The tasks of this OrganizationLinks. + :rtype: str + """ # noqa: E501 + return self._tasks + + @tasks.setter + def tasks(self, tasks): + """Set the tasks of this OrganizationLinks. + + URI of resource. + + :param tasks: The tasks of this OrganizationLinks. + :type: str + """ # noqa: E501 + self._tasks = tasks + + @property + def dashboards(self): + """Get the dashboards of this OrganizationLinks. + + URI of resource. + + :return: The dashboards of this OrganizationLinks. + :rtype: str + """ # noqa: E501 + return self._dashboards + + @dashboards.setter + def dashboards(self, dashboards): + """Set the dashboards of this OrganizationLinks. + + URI of resource. + + :param dashboards: The dashboards of this OrganizationLinks. + :type: str + """ # noqa: E501 + self._dashboards = dashboards + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, OrganizationLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/organizations.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/organizations.py new file mode 100644 index 0000000..844c7fa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/organizations.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Organizations(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'Links', + 'orgs': 'list[Organization]' + } + + attribute_map = { + 'links': 'links', + 'orgs': 'orgs' + } + + def __init__(self, links=None, orgs=None): # noqa: E501,D401,D403 + """Organizations - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._orgs = None + self.discriminator = None + + if links is not None: + self.links = links + if orgs is not None: + self.orgs = orgs + + @property + def links(self): + """Get the links of this Organizations. + + :return: The links of this Organizations. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Organizations. + + :param links: The links of this Organizations. + :type: Links + """ # noqa: E501 + self._links = links + + @property + def orgs(self): + """Get the orgs of this Organizations. + + :return: The orgs of this Organizations. + :rtype: list[Organization] + """ # noqa: E501 + return self._orgs + + @orgs.setter + def orgs(self, orgs): + """Set the orgs of this Organizations. + + :param orgs: The orgs of this Organizations. + :type: list[Organization] + """ # noqa: E501 + self._orgs = orgs + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Organizations): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/package.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/package.py new file mode 100644 index 0000000..9c328e6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/package.py @@ -0,0 +1,192 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Package(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'path': 'str', + 'package': 'str', + 'files': 'list[File]' + } + + attribute_map = { + 'type': 'type', + 'path': 'path', + 'package': 'package', + 'files': 'files' + } + + def __init__(self, type=None, path=None, package=None, files=None): # noqa: E501,D401,D403 + """Package - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self._path = None + self._package = None + self._files = None + self.discriminator = None + + if type is not None: + self.type = type + if path is not None: + self.path = path + if package is not None: + self.package = package + if files is not None: + self.files = files + + @property + def type(self): + """Get the type of this Package. + + Type of AST node + + :return: The type of this Package. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this Package. + + Type of AST node + + :param type: The type of this Package. + :type: str + """ # noqa: E501 + self._type = type + + @property + def path(self): + """Get the path of this Package. + + Package import path + + :return: The path of this Package. + :rtype: str + """ # noqa: E501 + return self._path + + @path.setter + def path(self, path): + """Set the path of this Package. + + Package import path + + :param path: The path of this Package. + :type: str + """ # noqa: E501 + self._path = path + + @property + def package(self): + """Get the package of this Package. + + Package name + + :return: The package of this Package. + :rtype: str + """ # noqa: E501 + return self._package + + @package.setter + def package(self, package): + """Set the package of this Package. + + Package name + + :param package: The package of this Package. + :type: str + """ # noqa: E501 + self._package = package + + @property + def files(self): + """Get the files of this Package. + + Package files + + :return: The files of this Package. + :rtype: list[File] + """ # noqa: E501 + return self._files + + @files.setter + def files(self, files): + """Set the files of this Package. + + Package files + + :param files: The files of this Package. + :type: list[File] + """ # noqa: E501 + self._files = files + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Package): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/package_clause.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/package_clause.py new file mode 100644 index 0000000..b9cb6ab --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/package_clause.py @@ -0,0 +1,134 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PackageClause(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'name': 'Identifier' + } + + attribute_map = { + 'type': 'type', + 'name': 'name' + } + + def __init__(self, type=None, name=None): # noqa: E501,D401,D403 + """PackageClause - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self._name = None + self.discriminator = None + + if type is not None: + self.type = type + if name is not None: + self.name = name + + @property + def type(self): + """Get the type of this PackageClause. + + Type of AST node + + :return: The type of this PackageClause. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this PackageClause. + + Type of AST node + + :param type: The type of this PackageClause. + :type: str + """ # noqa: E501 + self._type = type + + @property + def name(self): + """Get the name of this PackageClause. + + :return: The name of this PackageClause. + :rtype: Identifier + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this PackageClause. + + :param name: The name of this PackageClause. + :type: Identifier + """ # noqa: E501 + self._name = name + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PackageClause): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/pager_duty_notification_endpoint.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/pager_duty_notification_endpoint.py new file mode 100644 index 0000000..f57b6c0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/pager_duty_notification_endpoint.py @@ -0,0 +1,157 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator + + +class PagerDutyNotificationEndpoint(NotificationEndpointDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'client_url': 'str', + 'routing_key': 'str', + 'id': 'str', + 'org_id': 'str', + 'user_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'description': 'str', + 'name': 'str', + 'status': 'str', + 'labels': 'list[Label]', + 'links': 'NotificationEndpointBaseLinks', + 'type': 'NotificationEndpointType' + } + + attribute_map = { + 'client_url': 'clientURL', + 'routing_key': 'routingKey', + 'id': 'id', + 'org_id': 'orgID', + 'user_id': 'userID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'description': 'description', + 'name': 'name', + 'status': 'status', + 'labels': 'labels', + 'links': 'links', + 'type': 'type' + } + + def __init__(self, client_url=None, routing_key=None, id=None, org_id=None, user_id=None, created_at=None, updated_at=None, description=None, name=None, status='active', labels=None, links=None, type="pagerduty"): # noqa: E501,D401,D403 + """PagerDutyNotificationEndpoint - a model defined in OpenAPI.""" # noqa: E501 + NotificationEndpointDiscriminator.__init__(self, id=id, org_id=org_id, user_id=user_id, created_at=created_at, updated_at=updated_at, description=description, name=name, status=status, labels=labels, links=links, type=type) # noqa: E501 + + self._client_url = None + self._routing_key = None + self.discriminator = None + + if client_url is not None: + self.client_url = client_url + self.routing_key = routing_key + + @property + def client_url(self): + """Get the client_url of this PagerDutyNotificationEndpoint. + + :return: The client_url of this PagerDutyNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._client_url + + @client_url.setter + def client_url(self, client_url): + """Set the client_url of this PagerDutyNotificationEndpoint. + + :param client_url: The client_url of this PagerDutyNotificationEndpoint. + :type: str + """ # noqa: E501 + self._client_url = client_url + + @property + def routing_key(self): + """Get the routing_key of this PagerDutyNotificationEndpoint. + + :return: The routing_key of this PagerDutyNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._routing_key + + @routing_key.setter + def routing_key(self, routing_key): + """Set the routing_key of this PagerDutyNotificationEndpoint. + + :param routing_key: The routing_key of this PagerDutyNotificationEndpoint. + :type: str + """ # noqa: E501 + if routing_key is None: + raise ValueError("Invalid value for `routing_key`, must not be `None`") # noqa: E501 + self._routing_key = routing_key + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PagerDutyNotificationEndpoint): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/pager_duty_notification_rule.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/pager_duty_notification_rule.py new file mode 100644 index 0000000..8ccf2bd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/pager_duty_notification_rule.py @@ -0,0 +1,136 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.pager_duty_notification_rule_base import PagerDutyNotificationRuleBase + + +class PagerDutyNotificationRule(PagerDutyNotificationRuleBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'message_template': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'message_template': 'messageTemplate', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type="pagerduty", message_template=None, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """PagerDutyNotificationRule - a model defined in OpenAPI.""" # noqa: E501 + PagerDutyNotificationRuleBase.__init__(self, type=type, message_template=message_template, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, id=id, endpoint_id=endpoint_id, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, status=status, name=name, sleep_until=sleep_until, every=every, offset=offset, runbook_link=runbook_link, limit_every=limit_every, limit=limit, tag_rules=tag_rules, description=description, status_rules=status_rules, labels=labels, links=links) # noqa: E501 + self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PagerDutyNotificationRule): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/pager_duty_notification_rule_base.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/pager_duty_notification_rule_base.py new file mode 100644 index 0000000..8e44774 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/pager_duty_notification_rule_base.py @@ -0,0 +1,182 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator + + +class PagerDutyNotificationRuleBase(NotificationRuleDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'message_template': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'message_template': 'messageTemplate', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type=None, message_template=None, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """PagerDutyNotificationRuleBase - a model defined in OpenAPI.""" # noqa: E501 + NotificationRuleDiscriminator.__init__(self, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, id=id, endpoint_id=endpoint_id, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, status=status, name=name, sleep_until=sleep_until, every=every, offset=offset, runbook_link=runbook_link, limit_every=limit_every, limit=limit, tag_rules=tag_rules, description=description, status_rules=status_rules, labels=labels, links=links) # noqa: E501 + + self._type = None + self._message_template = None + self.discriminator = None + + self.type = type + self.message_template = message_template + + @property + def type(self): + """Get the type of this PagerDutyNotificationRuleBase. + + :return: The type of this PagerDutyNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this PagerDutyNotificationRuleBase. + + :param type: The type of this PagerDutyNotificationRuleBase. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def message_template(self): + """Get the message_template of this PagerDutyNotificationRuleBase. + + :return: The message_template of this PagerDutyNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._message_template + + @message_template.setter + def message_template(self, message_template): + """Set the message_template of this PagerDutyNotificationRuleBase. + + :param message_template: The message_template of this PagerDutyNotificationRuleBase. + :type: str + """ # noqa: E501 + if message_template is None: + raise ValueError("Invalid value for `message_template`, must not be `None`") # noqa: E501 + self._message_template = message_template + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PagerDutyNotificationRuleBase): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/paren_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/paren_expression.py new file mode 100644 index 0000000..68bcfe0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/paren_expression.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class ParenExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'expression': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'expression': 'expression' + } + + def __init__(self, type=None, expression=None): # noqa: E501,D401,D403 + """ParenExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._expression = None + self.discriminator = None + + if type is not None: + self.type = type + if expression is not None: + self.expression = expression + + @property + def type(self): + """Get the type of this ParenExpression. + + Type of AST node + + :return: The type of this ParenExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ParenExpression. + + Type of AST node + + :param type: The type of this ParenExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def expression(self): + """Get the expression of this ParenExpression. + + :return: The expression of this ParenExpression. + :rtype: Expression + """ # noqa: E501 + return self._expression + + @expression.setter + def expression(self, expression): + """Set the expression of this ParenExpression. + + :param expression: The expression of this ParenExpression. + :type: Expression + """ # noqa: E501 + self._expression = expression + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ParenExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/password_reset_body.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/password_reset_body.py new file mode 100644 index 0000000..22dc5c9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/password_reset_body.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PasswordResetBody(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'password': 'str' + } + + attribute_map = { + 'password': 'password' + } + + def __init__(self, password=None): # noqa: E501,D401,D403 + """PasswordResetBody - a model defined in OpenAPI.""" # noqa: E501 + self._password = None + self.discriminator = None + + self.password = password + + @property + def password(self): + """Get the password of this PasswordResetBody. + + :return: The password of this PasswordResetBody. + :rtype: str + """ # noqa: E501 + return self._password + + @password.setter + def password(self, password): + """Set the password of this PasswordResetBody. + + :param password: The password of this PasswordResetBody. + :type: str + """ # noqa: E501 + if password is None: + raise ValueError("Invalid value for `password`, must not be `None`") # noqa: E501 + self._password = password + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PasswordResetBody): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_bucket_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_bucket_request.py new file mode 100644 index 0000000..d2416a5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_bucket_request.py @@ -0,0 +1,165 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PatchBucketRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'retention_rules': 'list[PatchRetentionRule]' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'retention_rules': 'retentionRules' + } + + def __init__(self, name=None, description=None, retention_rules=None): # noqa: E501,D401,D403 + """PatchBucketRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._retention_rules = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if retention_rules is not None: + self.retention_rules = retention_rules + + @property + def name(self): + """Get the name of this PatchBucketRequest. + + The name of the bucket. + + :return: The name of this PatchBucketRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this PatchBucketRequest. + + The name of the bucket. + + :param name: The name of this PatchBucketRequest. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this PatchBucketRequest. + + A description of the bucket. + + :return: The description of this PatchBucketRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this PatchBucketRequest. + + A description of the bucket. + + :param description: The description of this PatchBucketRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def retention_rules(self): + """Get the retention_rules of this PatchBucketRequest. + + Updates to rules to expire or retain data. No rules means no updates. + + :return: The retention_rules of this PatchBucketRequest. + :rtype: list[PatchRetentionRule] + """ # noqa: E501 + return self._retention_rules + + @retention_rules.setter + def retention_rules(self, retention_rules): + """Set the retention_rules of this PatchBucketRequest. + + Updates to rules to expire or retain data. No rules means no updates. + + :param retention_rules: The retention_rules of this PatchBucketRequest. + :type: list[PatchRetentionRule] + """ # noqa: E501 + self._retention_rules = retention_rules + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PatchBucketRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_dashboard_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_dashboard_request.py new file mode 100644 index 0000000..8405c3b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_dashboard_request.py @@ -0,0 +1,161 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PatchDashboardRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'cells': 'CellWithViewProperties' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'cells': 'cells' + } + + def __init__(self, name=None, description=None, cells=None): # noqa: E501,D401,D403 + """PatchDashboardRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._cells = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if cells is not None: + self.cells = cells + + @property + def name(self): + """Get the name of this PatchDashboardRequest. + + optional, when provided will replace the name + + :return: The name of this PatchDashboardRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this PatchDashboardRequest. + + optional, when provided will replace the name + + :param name: The name of this PatchDashboardRequest. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this PatchDashboardRequest. + + optional, when provided will replace the description + + :return: The description of this PatchDashboardRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this PatchDashboardRequest. + + optional, when provided will replace the description + + :param description: The description of this PatchDashboardRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def cells(self): + """Get the cells of this PatchDashboardRequest. + + :return: The cells of this PatchDashboardRequest. + :rtype: CellWithViewProperties + """ # noqa: E501 + return self._cells + + @cells.setter + def cells(self, cells): + """Set the cells of this PatchDashboardRequest. + + :param cells: The cells of this PatchDashboardRequest. + :type: CellWithViewProperties + """ # noqa: E501 + self._cells = cells + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PatchDashboardRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_organization_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_organization_request.py new file mode 100644 index 0000000..ff74e17 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_organization_request.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PatchOrganizationRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str' + } + + attribute_map = { + 'name': 'name', + 'description': 'description' + } + + def __init__(self, name=None, description=None): # noqa: E501,D401,D403 + """PatchOrganizationRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + + @property + def name(self): + """Get the name of this PatchOrganizationRequest. + + The name of the organization. + + :return: The name of this PatchOrganizationRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this PatchOrganizationRequest. + + The name of the organization. + + :param name: The name of this PatchOrganizationRequest. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this PatchOrganizationRequest. + + The description of the organization. + + :return: The description of this PatchOrganizationRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this PatchOrganizationRequest. + + The description of the organization. + + :param description: The description of this PatchOrganizationRequest. + :type: str + """ # noqa: E501 + self._description = description + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PatchOrganizationRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_retention_rule.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_retention_rule.py new file mode 100644 index 0000000..d02335a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_retention_rule.py @@ -0,0 +1,164 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PatchRetentionRule(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'every_seconds': 'int', + 'shard_group_duration_seconds': 'int' + } + + attribute_map = { + 'type': 'type', + 'every_seconds': 'everySeconds', + 'shard_group_duration_seconds': 'shardGroupDurationSeconds' + } + + def __init__(self, type='expire', every_seconds=2592000, shard_group_duration_seconds=None): # noqa: E501,D401,D403 + """PatchRetentionRule - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self._every_seconds = None + self._shard_group_duration_seconds = None + self.discriminator = None + + if type is not None: + self.type = type + self.every_seconds = every_seconds + if shard_group_duration_seconds is not None: + self.shard_group_duration_seconds = shard_group_duration_seconds + + @property + def type(self): + """Get the type of this PatchRetentionRule. + + :return: The type of this PatchRetentionRule. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this PatchRetentionRule. + + :param type: The type of this PatchRetentionRule. + :type: str + """ # noqa: E501 + self._type = type + + @property + def every_seconds(self): + """Get the every_seconds of this PatchRetentionRule. + + The number of seconds to keep data. Default duration is `2592000` (30 days). `0` represents infinite retention. + + :return: The every_seconds of this PatchRetentionRule. + :rtype: int + """ # noqa: E501 + return self._every_seconds + + @every_seconds.setter + def every_seconds(self, every_seconds): + """Set the every_seconds of this PatchRetentionRule. + + The number of seconds to keep data. Default duration is `2592000` (30 days). `0` represents infinite retention. + + :param every_seconds: The every_seconds of this PatchRetentionRule. + :type: int + """ # noqa: E501 + if every_seconds is None: + raise ValueError("Invalid value for `every_seconds`, must not be `None`") # noqa: E501 + if every_seconds is not None and every_seconds < 0: # noqa: E501 + raise ValueError("Invalid value for `every_seconds`, must be a value greater than or equal to `0`") # noqa: E501 + self._every_seconds = every_seconds + + @property + def shard_group_duration_seconds(self): + """Get the shard_group_duration_seconds of this PatchRetentionRule. + + The [shard group duration](https://docs.influxdata.com/influxdb/latest/reference/glossary/#shard). The number of seconds that each shard group covers. #### InfluxDB Cloud - Doesn't use `shardGroupDurationsSeconds`. #### InfluxDB OSS - Default value depends on the [bucket retention period](https://docs.influxdata.com/influxdb/latest/reference/internals/shards/#shard-group-duration). #### Related guides - InfluxDB [shards and shard groups](https://docs.influxdata.com/influxdb/latest/reference/internals/shards/) + + :return: The shard_group_duration_seconds of this PatchRetentionRule. + :rtype: int + """ # noqa: E501 + return self._shard_group_duration_seconds + + @shard_group_duration_seconds.setter + def shard_group_duration_seconds(self, shard_group_duration_seconds): + """Set the shard_group_duration_seconds of this PatchRetentionRule. + + The [shard group duration](https://docs.influxdata.com/influxdb/latest/reference/glossary/#shard). The number of seconds that each shard group covers. #### InfluxDB Cloud - Doesn't use `shardGroupDurationsSeconds`. #### InfluxDB OSS - Default value depends on the [bucket retention period](https://docs.influxdata.com/influxdb/latest/reference/internals/shards/#shard-group-duration). #### Related guides - InfluxDB [shards and shard groups](https://docs.influxdata.com/influxdb/latest/reference/internals/shards/) + + :param shard_group_duration_seconds: The shard_group_duration_seconds of this PatchRetentionRule. + :type: int + """ # noqa: E501 + self._shard_group_duration_seconds = shard_group_duration_seconds + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PatchRetentionRule): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_stack_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_stack_request.py new file mode 100644 index 0000000..917cc31 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_stack_request.py @@ -0,0 +1,173 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PatchStackRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'template_ur_ls': 'list[str]', + 'additional_resources': 'list[PatchStackRequestAdditionalResources]' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'template_ur_ls': 'templateURLs', + 'additional_resources': 'additionalResources' + } + + def __init__(self, name=None, description=None, template_ur_ls=None, additional_resources=None): # noqa: E501,D401,D403 + """PatchStackRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._template_ur_ls = None + self._additional_resources = None + self.discriminator = None + + self.name = name + self.description = description + self.template_ur_ls = template_ur_ls + if additional_resources is not None: + self.additional_resources = additional_resources + + @property + def name(self): + """Get the name of this PatchStackRequest. + + :return: The name of this PatchStackRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this PatchStackRequest. + + :param name: The name of this PatchStackRequest. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this PatchStackRequest. + + :return: The description of this PatchStackRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this PatchStackRequest. + + :param description: The description of this PatchStackRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def template_ur_ls(self): + """Get the template_ur_ls of this PatchStackRequest. + + :return: The template_ur_ls of this PatchStackRequest. + :rtype: list[str] + """ # noqa: E501 + return self._template_ur_ls + + @template_ur_ls.setter + def template_ur_ls(self, template_ur_ls): + """Set the template_ur_ls of this PatchStackRequest. + + :param template_ur_ls: The template_ur_ls of this PatchStackRequest. + :type: list[str] + """ # noqa: E501 + self._template_ur_ls = template_ur_ls + + @property + def additional_resources(self): + """Get the additional_resources of this PatchStackRequest. + + :return: The additional_resources of this PatchStackRequest. + :rtype: list[PatchStackRequestAdditionalResources] + """ # noqa: E501 + return self._additional_resources + + @additional_resources.setter + def additional_resources(self, additional_resources): + """Set the additional_resources of this PatchStackRequest. + + :param additional_resources: The additional_resources of this PatchStackRequest. + :type: list[PatchStackRequestAdditionalResources] + """ # noqa: E501 + self._additional_resources = additional_resources + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PatchStackRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_stack_request_additional_resources.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_stack_request_additional_resources.py new file mode 100644 index 0000000..6df857b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/patch_stack_request_additional_resources.py @@ -0,0 +1,155 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PatchStackRequestAdditionalResources(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'resource_id': 'str', + 'kind': 'str', + 'template_meta_name': 'str' + } + + attribute_map = { + 'resource_id': 'resourceID', + 'kind': 'kind', + 'template_meta_name': 'templateMetaName' + } + + def __init__(self, resource_id=None, kind=None, template_meta_name=None): # noqa: E501,D401,D403 + """PatchStackRequestAdditionalResources - a model defined in OpenAPI.""" # noqa: E501 + self._resource_id = None + self._kind = None + self._template_meta_name = None + self.discriminator = None + + self.resource_id = resource_id + self.kind = kind + if template_meta_name is not None: + self.template_meta_name = template_meta_name + + @property + def resource_id(self): + """Get the resource_id of this PatchStackRequestAdditionalResources. + + :return: The resource_id of this PatchStackRequestAdditionalResources. + :rtype: str + """ # noqa: E501 + return self._resource_id + + @resource_id.setter + def resource_id(self, resource_id): + """Set the resource_id of this PatchStackRequestAdditionalResources. + + :param resource_id: The resource_id of this PatchStackRequestAdditionalResources. + :type: str + """ # noqa: E501 + if resource_id is None: + raise ValueError("Invalid value for `resource_id`, must not be `None`") # noqa: E501 + self._resource_id = resource_id + + @property + def kind(self): + """Get the kind of this PatchStackRequestAdditionalResources. + + :return: The kind of this PatchStackRequestAdditionalResources. + :rtype: str + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this PatchStackRequestAdditionalResources. + + :param kind: The kind of this PatchStackRequestAdditionalResources. + :type: str + """ # noqa: E501 + if kind is None: + raise ValueError("Invalid value for `kind`, must not be `None`") # noqa: E501 + self._kind = kind + + @property + def template_meta_name(self): + """Get the template_meta_name of this PatchStackRequestAdditionalResources. + + :return: The template_meta_name of this PatchStackRequestAdditionalResources. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this PatchStackRequestAdditionalResources. + + :param template_meta_name: The template_meta_name of this PatchStackRequestAdditionalResources. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PatchStackRequestAdditionalResources): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/permission.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/permission.py new file mode 100644 index 0000000..4d25db7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/permission.py @@ -0,0 +1,132 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Permission(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'action': 'str', + 'resource': 'PermissionResource' + } + + attribute_map = { + 'action': 'action', + 'resource': 'resource' + } + + def __init__(self, action=None, resource=None): # noqa: E501,D401,D403 + """Permission - a model defined in OpenAPI.""" # noqa: E501 + self._action = None + self._resource = None + self.discriminator = None + + self.action = action + self.resource = resource + + @property + def action(self): + """Get the action of this Permission. + + :return: The action of this Permission. + :rtype: str + """ # noqa: E501 + return self._action + + @action.setter + def action(self, action): + """Set the action of this Permission. + + :param action: The action of this Permission. + :type: str + """ # noqa: E501 + if action is None: + raise ValueError("Invalid value for `action`, must not be `None`") # noqa: E501 + self._action = action + + @property + def resource(self): + """Get the resource of this Permission. + + :return: The resource of this Permission. + :rtype: PermissionResource + """ # noqa: E501 + return self._resource + + @resource.setter + def resource(self, resource): + """Set the resource of this Permission. + + :param resource: The resource of this Permission. + :type: PermissionResource + """ # noqa: E501 + if resource is None: + raise ValueError("Invalid value for `resource`, must not be `None`") # noqa: E501 + self._resource = resource + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Permission): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/permission_resource.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/permission_resource.py new file mode 100644 index 0000000..07634a0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/permission_resource.py @@ -0,0 +1,220 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PermissionResource(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'id': 'str', + 'name': 'str', + 'org_id': 'str', + 'org': 'str' + } + + attribute_map = { + 'type': 'type', + 'id': 'id', + 'name': 'name', + 'org_id': 'orgID', + 'org': 'org' + } + + def __init__(self, type=None, id=None, name=None, org_id=None, org=None): # noqa: E501,D401,D403 + """PermissionResource - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self._id = None + self._name = None + self._org_id = None + self._org = None + self.discriminator = None + + self.type = type + if id is not None: + self.id = id + if name is not None: + self.name = name + if org_id is not None: + self.org_id = org_id + if org is not None: + self.org = org + + @property + def type(self): + """Get the type of this PermissionResource. + + A resource type. Identifies the API resource's type (or _kind_). + + :return: The type of this PermissionResource. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this PermissionResource. + + A resource type. Identifies the API resource's type (or _kind_). + + :param type: The type of this PermissionResource. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def id(self): + """Get the id of this PermissionResource. + + A resource ID. Identifies a specific resource. + + :return: The id of this PermissionResource. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this PermissionResource. + + A resource ID. Identifies a specific resource. + + :param id: The id of this PermissionResource. + :type: str + """ # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this PermissionResource. + + The name of the resource. _Note: not all resource types have a `name` property_. + + :return: The name of this PermissionResource. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this PermissionResource. + + The name of the resource. _Note: not all resource types have a `name` property_. + + :param name: The name of this PermissionResource. + :type: str + """ # noqa: E501 + self._name = name + + @property + def org_id(self): + """Get the org_id of this PermissionResource. + + An organization ID. Identifies the organization that owns the resource. + + :return: The org_id of this PermissionResource. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this PermissionResource. + + An organization ID. Identifies the organization that owns the resource. + + :param org_id: The org_id of this PermissionResource. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def org(self): + """Get the org of this PermissionResource. + + An organization name. The organization that owns the resource. + + :return: The org of this PermissionResource. + :rtype: str + """ # noqa: E501 + return self._org + + @org.setter + def org(self, org): + """Set the org of this PermissionResource. + + An organization name. The organization that owns the resource. + + :param org: The org of this PermissionResource. + :type: str + """ # noqa: E501 + self._org = org + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PermissionResource): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/pipe_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/pipe_expression.py new file mode 100644 index 0000000..3f98411 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/pipe_expression.py @@ -0,0 +1,161 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class PipeExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'argument': 'Expression', + 'call': 'CallExpression' + } + + attribute_map = { + 'type': 'type', + 'argument': 'argument', + 'call': 'call' + } + + def __init__(self, type=None, argument=None, call=None): # noqa: E501,D401,D403 + """PipeExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._argument = None + self._call = None + self.discriminator = None + + if type is not None: + self.type = type + if argument is not None: + self.argument = argument + if call is not None: + self.call = call + + @property + def type(self): + """Get the type of this PipeExpression. + + Type of AST node + + :return: The type of this PipeExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this PipeExpression. + + Type of AST node + + :param type: The type of this PipeExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def argument(self): + """Get the argument of this PipeExpression. + + :return: The argument of this PipeExpression. + :rtype: Expression + """ # noqa: E501 + return self._argument + + @argument.setter + def argument(self, argument): + """Set the argument of this PipeExpression. + + :param argument: The argument of this PipeExpression. + :type: Expression + """ # noqa: E501 + self._argument = argument + + @property + def call(self): + """Get the call of this PipeExpression. + + :return: The call of this PipeExpression. + :rtype: CallExpression + """ # noqa: E501 + return self._call + + @call.setter + def call(self, call): + """Set the call of this PipeExpression. + + :param call: The call of this PipeExpression. + :type: CallExpression + """ # noqa: E501 + self._call = call + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PipeExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/pipe_literal.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/pipe_literal.py new file mode 100644 index 0000000..eea0243 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/pipe_literal.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class PipeLiteral(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str' + } + + attribute_map = { + 'type': 'type' + } + + def __init__(self, type=None): # noqa: E501,D401,D403 + """PipeLiteral - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self.discriminator = None + + if type is not None: + self.type = type + + @property + def type(self): + """Get the type of this PipeLiteral. + + Type of AST node + + :return: The type of this PipeLiteral. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this PipeLiteral. + + Type of AST node + + :param type: The type of this PipeLiteral. + :type: str + """ # noqa: E501 + self._type = type + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PipeLiteral): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_bucket_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_bucket_request.py new file mode 100644 index 0000000..83215a7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_bucket_request.py @@ -0,0 +1,244 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PostBucketRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'org_id': 'str', + 'name': 'str', + 'description': 'str', + 'rp': 'str', + 'retention_rules': 'list[BucketRetentionRules]', + 'schema_type': 'SchemaType' + } + + attribute_map = { + 'org_id': 'orgID', + 'name': 'name', + 'description': 'description', + 'rp': 'rp', + 'retention_rules': 'retentionRules', + 'schema_type': 'schemaType' + } + + def __init__(self, org_id=None, name=None, description=None, rp='0', retention_rules=None, schema_type=None): # noqa: E501,D401,D403 + """PostBucketRequest - a model defined in OpenAPI.""" # noqa: E501 + self._org_id = None + self._name = None + self._description = None + self._rp = None + self._retention_rules = None + self._schema_type = None + self.discriminator = None + + self.org_id = org_id + self.name = name + if description is not None: + self.description = description + if rp is not None: + self.rp = rp + if retention_rules is not None: + self.retention_rules = retention_rules + if schema_type is not None: + self.schema_type = schema_type + + @property + def org_id(self): + """Get the org_id of this PostBucketRequest. + + The organization ID. Specifies the organization that owns the bucket. + + :return: The org_id of this PostBucketRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this PostBucketRequest. + + The organization ID. Specifies the organization that owns the bucket. + + :param org_id: The org_id of this PostBucketRequest. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def name(self): + """Get the name of this PostBucketRequest. + + The bucket name. + + :return: The name of this PostBucketRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this PostBucketRequest. + + The bucket name. + + :param name: The name of this PostBucketRequest. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this PostBucketRequest. + + A description of the bucket. + + :return: The description of this PostBucketRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this PostBucketRequest. + + A description of the bucket. + + :param description: The description of this PostBucketRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def rp(self): + """Get the rp of this PostBucketRequest. + + The retention policy for the bucket. For InfluxDB 1.x, specifies the duration of time that each data point in the retention policy persists. If you need compatibility with InfluxDB 1.x, specify a value for the `rp` property; otherwise, see the `retentionRules` property. [Retention policy](https://docs.influxdata.com/influxdb/v1.8/concepts/glossary/#retention-policy-rp) is an InfluxDB 1.x concept. The InfluxDB 2.x and Cloud equivalent is [retention period](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-period). The InfluxDB `/api/v2` API uses `RetentionRules` to configure the retention period. + + :return: The rp of this PostBucketRequest. + :rtype: str + """ # noqa: E501 + return self._rp + + @rp.setter + def rp(self, rp): + """Set the rp of this PostBucketRequest. + + The retention policy for the bucket. For InfluxDB 1.x, specifies the duration of time that each data point in the retention policy persists. If you need compatibility with InfluxDB 1.x, specify a value for the `rp` property; otherwise, see the `retentionRules` property. [Retention policy](https://docs.influxdata.com/influxdb/v1.8/concepts/glossary/#retention-policy-rp) is an InfluxDB 1.x concept. The InfluxDB 2.x and Cloud equivalent is [retention period](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-period). The InfluxDB `/api/v2` API uses `RetentionRules` to configure the retention period. + + :param rp: The rp of this PostBucketRequest. + :type: str + """ # noqa: E501 + self._rp = rp + + @property + def retention_rules(self): + """Get the retention_rules of this PostBucketRequest. + + Retention rules to expire or retain data. The InfluxDB `/api/v2` API uses `RetentionRules` to configure the [retention period](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-period). #### InfluxDB Cloud - `retentionRules` is required. #### InfluxDB OSS - `retentionRules` isn't required. + + :return: The retention_rules of this PostBucketRequest. + :rtype: list[BucketRetentionRules] + """ # noqa: E501 + return self._retention_rules + + @retention_rules.setter + def retention_rules(self, retention_rules): + """Set the retention_rules of this PostBucketRequest. + + Retention rules to expire or retain data. The InfluxDB `/api/v2` API uses `RetentionRules` to configure the [retention period](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-period). #### InfluxDB Cloud - `retentionRules` is required. #### InfluxDB OSS - `retentionRules` isn't required. + + :param retention_rules: The retention_rules of this PostBucketRequest. + :type: list[BucketRetentionRules] + """ # noqa: E501 + self._retention_rules = retention_rules + + @property + def schema_type(self): + """Get the schema_type of this PostBucketRequest. + + :return: The schema_type of this PostBucketRequest. + :rtype: SchemaType + """ # noqa: E501 + return self._schema_type + + @schema_type.setter + def schema_type(self, schema_type): + """Set the schema_type of this PostBucketRequest. + + :param schema_type: The schema_type of this PostBucketRequest. + :type: SchemaType + """ # noqa: E501 + self._schema_type = schema_type + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PostBucketRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_check.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_check.py new file mode 100644 index 0000000..144a72e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_check.py @@ -0,0 +1,120 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PostCheck(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str' + } + + attribute_map = { + 'type': 'type' + } + + discriminator_value_class_map = { + 'deadman': 'DeadmanCheck', + 'custom': 'CustomCheck', + 'threshold': 'ThresholdCheck' + } + + def __init__(self, type=None): # noqa: E501,D401,D403 + """PostCheck - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self.discriminator = 'type' + + self.type = type + + @property + def type(self): + """Get the type of this PostCheck. + + :return: The type of this PostCheck. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this PostCheck. + + :param type: The type of this PostCheck. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + def get_real_child_model(self, data): + """Return the real base class specified by the discriminator.""" + discriminator_key = self.attribute_map[self.discriminator] + discriminator_value = data[discriminator_key] + return self.discriminator_value_class_map.get(discriminator_value) + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PostCheck): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_notification_endpoint.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_notification_endpoint.py new file mode 100644 index 0000000..d2d0b3c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_notification_endpoint.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PostNotificationEndpoint(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'NotificationEndpointType' + } + + attribute_map = { + 'type': 'type' + } + + discriminator_value_class_map = { + 'slack': 'SlackNotificationEndpoint', + 'pagerduty': 'PagerDutyNotificationEndpoint', + 'http': 'HTTPNotificationEndpoint', + 'telegram': 'TelegramNotificationEndpoint' + } + + def __init__(self, type=None): # noqa: E501,D401,D403 + """PostNotificationEndpoint - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self.discriminator = 'type' + + self.type = type + + @property + def type(self): + """Get the type of this PostNotificationEndpoint. + + :return: The type of this PostNotificationEndpoint. + :rtype: NotificationEndpointType + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this PostNotificationEndpoint. + + :param type: The type of this PostNotificationEndpoint. + :type: NotificationEndpointType + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + def get_real_child_model(self, data): + """Return the real base class specified by the discriminator.""" + discriminator_key = self.attribute_map[self.discriminator] + discriminator_value = data[discriminator_key] + return self.discriminator_value_class_map.get(discriminator_value) + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PostNotificationEndpoint): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_notification_rule.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_notification_rule.py new file mode 100644 index 0000000..d0a54a0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_notification_rule.py @@ -0,0 +1,126 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PostNotificationRule(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str' + } + + attribute_map = { + 'type': 'type' + } + + discriminator_value_class_map = { + 'slack': 'SlackNotificationRule', + 'pagerduty': 'PagerDutyNotificationRule', + 'smtp': 'SMTPNotificationRule', + 'http': 'HTTPNotificationRule', + 'telegram': 'TelegramNotificationRule' + } + + def __init__(self, type=None): # noqa: E501,D401,D403 + """PostNotificationRule - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self.discriminator = 'type' + + self.type = type + + @property + def type(self): + """Get the type of this PostNotificationRule. + + The discriminator between other types of notification rules is "telegram". + + :return: The type of this PostNotificationRule. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this PostNotificationRule. + + The discriminator between other types of notification rules is "telegram". + + :param type: The type of this PostNotificationRule. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + def get_real_child_model(self, data): + """Return the real base class specified by the discriminator.""" + discriminator_key = self.attribute_map[self.discriminator] + discriminator_value = data[discriminator_key] + return self.discriminator_value_class_map.get(discriminator_value) + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PostNotificationRule): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_organization_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_organization_request.py new file mode 100644 index 0000000..5e152d3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_organization_request.py @@ -0,0 +1,139 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PostOrganizationRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str' + } + + attribute_map = { + 'name': 'name', + 'description': 'description' + } + + def __init__(self, name=None, description=None): # noqa: E501,D401,D403 + """PostOrganizationRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self.discriminator = None + + self.name = name + if description is not None: + self.description = description + + @property + def name(self): + """Get the name of this PostOrganizationRequest. + + The name of the organization. + + :return: The name of this PostOrganizationRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this PostOrganizationRequest. + + The name of the organization. + + :param name: The name of this PostOrganizationRequest. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this PostOrganizationRequest. + + The description of the organization. + + :return: The description of this PostOrganizationRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this PostOrganizationRequest. + + The description of the organization. + + :param description: The description of this PostOrganizationRequest. + :type: str + """ # noqa: E501 + self._description = description + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PostOrganizationRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_restore_kv_response.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_restore_kv_response.py new file mode 100644 index 0000000..5cd5c16 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_restore_kv_response.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PostRestoreKVResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'token': 'str' + } + + attribute_map = { + 'token': 'token' + } + + def __init__(self, token=None): # noqa: E501,D401,D403 + """PostRestoreKVResponse - a model defined in OpenAPI.""" # noqa: E501 + self._token = None + self.discriminator = None + + if token is not None: + self.token = token + + @property + def token(self): + """Get the token of this PostRestoreKVResponse. + + token is the root token for the instance after restore (this is overwritten during the restore) + + :return: The token of this PostRestoreKVResponse. + :rtype: str + """ # noqa: E501 + return self._token + + @token.setter + def token(self, token): + """Set the token of this PostRestoreKVResponse. + + token is the root token for the instance after restore (this is overwritten during the restore) + + :param token: The token of this PostRestoreKVResponse. + :type: str + """ # noqa: E501 + self._token = token + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PostRestoreKVResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_stack_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_stack_request.py new file mode 100644 index 0000000..ce68303 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/post_stack_request.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class PostStackRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'org_id': 'str', + 'name': 'str', + 'description': 'str', + 'urls': 'list[str]' + } + + attribute_map = { + 'org_id': 'orgID', + 'name': 'name', + 'description': 'description', + 'urls': 'urls' + } + + def __init__(self, org_id=None, name=None, description=None, urls=None): # noqa: E501,D401,D403 + """PostStackRequest - a model defined in OpenAPI.""" # noqa: E501 + self._org_id = None + self._name = None + self._description = None + self._urls = None + self.discriminator = None + + if org_id is not None: + self.org_id = org_id + if name is not None: + self.name = name + if description is not None: + self.description = description + if urls is not None: + self.urls = urls + + @property + def org_id(self): + """Get the org_id of this PostStackRequest. + + :return: The org_id of this PostStackRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this PostStackRequest. + + :param org_id: The org_id of this PostStackRequest. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def name(self): + """Get the name of this PostStackRequest. + + :return: The name of this PostStackRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this PostStackRequest. + + :param name: The name of this PostStackRequest. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this PostStackRequest. + + :return: The description of this PostStackRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this PostStackRequest. + + :param description: The description of this PostStackRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def urls(self): + """Get the urls of this PostStackRequest. + + :return: The urls of this PostStackRequest. + :rtype: list[str] + """ # noqa: E501 + return self._urls + + @urls.setter + def urls(self, urls): + """Set the urls of this PostStackRequest. + + :param urls: The urls of this PostStackRequest. + :type: list[str] + """ # noqa: E501 + self._urls = urls + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PostStackRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/property_key.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/property_key.py new file mode 100644 index 0000000..e072527 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/property_key.py @@ -0,0 +1,86 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class PropertyKey(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """PropertyKey - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, PropertyKey): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/query.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/query.py new file mode 100644 index 0000000..9b69972 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/query.py @@ -0,0 +1,239 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Query(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'extern': 'File', + 'query': 'str', + 'type': 'str', + 'params': 'dict(str, object)', + 'dialect': 'Dialect', + 'now': 'datetime' + } + + attribute_map = { + 'extern': 'extern', + 'query': 'query', + 'type': 'type', + 'params': 'params', + 'dialect': 'dialect', + 'now': 'now' + } + + def __init__(self, extern=None, query=None, type=None, params=None, dialect=None, now=None): # noqa: E501,D401,D403 + """Query - a model defined in OpenAPI.""" # noqa: E501 + self._extern = None + self._query = None + self._type = None + self._params = None + self._dialect = None + self._now = None + self.discriminator = None + + if extern is not None: + self.extern = extern + self.query = query + if type is not None: + self.type = type + if params is not None: + self.params = params + if dialect is not None: + self.dialect = dialect + if now is not None: + self.now = now + + @property + def extern(self): + """Get the extern of this Query. + + :return: The extern of this Query. + :rtype: File + """ # noqa: E501 + return self._extern + + @extern.setter + def extern(self, extern): + """Set the extern of this Query. + + :param extern: The extern of this Query. + :type: File + """ # noqa: E501 + self._extern = extern + + @property + def query(self): + """Get the query of this Query. + + The query script to execute. + + :return: The query of this Query. + :rtype: str + """ # noqa: E501 + return self._query + + @query.setter + def query(self, query): + """Set the query of this Query. + + The query script to execute. + + :param query: The query of this Query. + :type: str + """ # noqa: E501 + if query is None: + raise ValueError("Invalid value for `query`, must not be `None`") # noqa: E501 + self._query = query + + @property + def type(self): + """Get the type of this Query. + + The type of query. Must be "flux". + + :return: The type of this Query. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this Query. + + The type of query. Must be "flux". + + :param type: The type of this Query. + :type: str + """ # noqa: E501 + self._type = type + + @property + def params(self): + r"""Get the params of this Query. + + Key-value pairs passed as parameters during query execution. To use parameters in your query, pass a _`query`_ with `params` references (in dot notation)--for example: ```json query: "from(bucket: params.mybucket)\\ |> range(start: params.rangeStart) |> limit(n:1)" ``` and pass _`params`_ with the key-value pairs--for example: ```json params: { "mybucket": "environment", "rangeStart": "-30d" } ``` During query execution, InfluxDB passes _`params`_ to your script and substitutes the values. #### Limitations - If you use _`params`_, you can't use _`extern`_. + + :return: The params of this Query. + :rtype: dict(str, object) + """ # noqa: E501 + return self._params + + @params.setter + def params(self, params): + r"""Set the params of this Query. + + Key-value pairs passed as parameters during query execution. To use parameters in your query, pass a _`query`_ with `params` references (in dot notation)--for example: ```json query: "from(bucket: params.mybucket)\\ |> range(start: params.rangeStart) |> limit(n:1)" ``` and pass _`params`_ with the key-value pairs--for example: ```json params: { "mybucket": "environment", "rangeStart": "-30d" } ``` During query execution, InfluxDB passes _`params`_ to your script and substitutes the values. #### Limitations - If you use _`params`_, you can't use _`extern`_. + + :param params: The params of this Query. + :type: dict(str, object) + """ # noqa: E501 + self._params = params + + @property + def dialect(self): + """Get the dialect of this Query. + + :return: The dialect of this Query. + :rtype: Dialect + """ # noqa: E501 + return self._dialect + + @dialect.setter + def dialect(self, dialect): + """Set the dialect of this Query. + + :param dialect: The dialect of this Query. + :type: Dialect + """ # noqa: E501 + self._dialect = dialect + + @property + def now(self): + """Get the now of this Query. + + Specifies the time that should be reported as `now` in the query. Default is the server `now` time. + + :return: The now of this Query. + :rtype: datetime + """ # noqa: E501 + return self._now + + @now.setter + def now(self, now): + """Set the now of this Query. + + Specifies the time that should be reported as `now` in the query. Default is the server `now` time. + + :param now: The now of this Query. + :type: datetime + """ # noqa: E501 + self._now = now + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Query): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/query_edit_mode.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/query_edit_mode.py new file mode 100644 index 0000000..8a7fba8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/query_edit_mode.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class QueryEditMode(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + BUILDER = "builder" + ADVANCED = "advanced" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """QueryEditMode - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, QueryEditMode): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/query_variable_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/query_variable_properties.py new file mode 100644 index 0000000..143bb01 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/query_variable_properties.py @@ -0,0 +1,134 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.variable_properties import VariableProperties + + +class QueryVariableProperties(VariableProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'values': 'QueryVariablePropertiesValues' + } + + attribute_map = { + 'type': 'type', + 'values': 'values' + } + + def __init__(self, type=None, values=None): # noqa: E501,D401,D403 + """QueryVariableProperties - a model defined in OpenAPI.""" # noqa: E501 + VariableProperties.__init__(self) # noqa: E501 + + self._type = None + self._values = None + self.discriminator = None + + if type is not None: + self.type = type + if values is not None: + self.values = values + + @property + def type(self): + """Get the type of this QueryVariableProperties. + + :return: The type of this QueryVariableProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this QueryVariableProperties. + + :param type: The type of this QueryVariableProperties. + :type: str + """ # noqa: E501 + self._type = type + + @property + def values(self): + """Get the values of this QueryVariableProperties. + + :return: The values of this QueryVariableProperties. + :rtype: QueryVariablePropertiesValues + """ # noqa: E501 + return self._values + + @values.setter + def values(self, values): + """Set the values of this QueryVariableProperties. + + :param values: The values of this QueryVariableProperties. + :type: QueryVariablePropertiesValues + """ # noqa: E501 + self._values = values + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, QueryVariableProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/query_variable_properties_values.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/query_variable_properties_values.py new file mode 100644 index 0000000..a06a174 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/query_variable_properties_values.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class QueryVariablePropertiesValues(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'query': 'str', + 'language': 'str' + } + + attribute_map = { + 'query': 'query', + 'language': 'language' + } + + def __init__(self, query=None, language=None): # noqa: E501,D401,D403 + """QueryVariablePropertiesValues - a model defined in OpenAPI.""" # noqa: E501 + self._query = None + self._language = None + self.discriminator = None + + if query is not None: + self.query = query + if language is not None: + self.language = language + + @property + def query(self): + """Get the query of this QueryVariablePropertiesValues. + + :return: The query of this QueryVariablePropertiesValues. + :rtype: str + """ # noqa: E501 + return self._query + + @query.setter + def query(self, query): + """Set the query of this QueryVariablePropertiesValues. + + :param query: The query of this QueryVariablePropertiesValues. + :type: str + """ # noqa: E501 + self._query = query + + @property + def language(self): + """Get the language of this QueryVariablePropertiesValues. + + :return: The language of this QueryVariablePropertiesValues. + :rtype: str + """ # noqa: E501 + return self._language + + @language.setter + def language(self, language): + """Set the language of this QueryVariablePropertiesValues. + + :param language: The language of this QueryVariablePropertiesValues. + :type: str + """ # noqa: E501 + self._language = language + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, QueryVariablePropertiesValues): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/range_threshold.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/range_threshold.py new file mode 100644 index 0000000..68e1986 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/range_threshold.py @@ -0,0 +1,188 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.threshold_base import ThresholdBase + + +class RangeThreshold(ThresholdBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'min': 'float', + 'max': 'float', + 'within': 'bool', + 'level': 'CheckStatusLevel', + 'all_values': 'bool' + } + + attribute_map = { + 'type': 'type', + 'min': 'min', + 'max': 'max', + 'within': 'within', + 'level': 'level', + 'all_values': 'allValues' + } + + def __init__(self, type="range", min=None, max=None, within=None, level=None, all_values=None): # noqa: E501,D401,D403 + """RangeThreshold - a model defined in OpenAPI.""" # noqa: E501 + ThresholdBase.__init__(self, level=level, all_values=all_values) # noqa: E501 + + self._type = None + self._min = None + self._max = None + self._within = None + self.discriminator = None + + self.type = type + self.min = min + self.max = max + self.within = within + + @property + def type(self): + """Get the type of this RangeThreshold. + + :return: The type of this RangeThreshold. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this RangeThreshold. + + :param type: The type of this RangeThreshold. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def min(self): + """Get the min of this RangeThreshold. + + :return: The min of this RangeThreshold. + :rtype: float + """ # noqa: E501 + return self._min + + @min.setter + def min(self, min): + """Set the min of this RangeThreshold. + + :param min: The min of this RangeThreshold. + :type: float + """ # noqa: E501 + if min is None: + raise ValueError("Invalid value for `min`, must not be `None`") # noqa: E501 + self._min = min + + @property + def max(self): + """Get the max of this RangeThreshold. + + :return: The max of this RangeThreshold. + :rtype: float + """ # noqa: E501 + return self._max + + @max.setter + def max(self, max): + """Set the max of this RangeThreshold. + + :param max: The max of this RangeThreshold. + :type: float + """ # noqa: E501 + if max is None: + raise ValueError("Invalid value for `max`, must not be `None`") # noqa: E501 + self._max = max + + @property + def within(self): + """Get the within of this RangeThreshold. + + :return: The within of this RangeThreshold. + :rtype: bool + """ # noqa: E501 + return self._within + + @within.setter + def within(self, within): + """Set the within of this RangeThreshold. + + :param within: The within of this RangeThreshold. + :type: bool + """ # noqa: E501 + if within is None: + raise ValueError("Invalid value for `within`, must not be `None`") # noqa: E501 + self._within = within + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RangeThreshold): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/ready.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/ready.py new file mode 100644 index 0000000..46c4d29 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/ready.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Ready(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'status': 'str', + 'started': 'datetime', + 'up': 'str' + } + + attribute_map = { + 'status': 'status', + 'started': 'started', + 'up': 'up' + } + + def __init__(self, status=None, started=None, up=None): # noqa: E501,D401,D403 + """Ready - a model defined in OpenAPI.""" # noqa: E501 + self._status = None + self._started = None + self._up = None + self.discriminator = None + + if status is not None: + self.status = status + if started is not None: + self.started = started + if up is not None: + self.up = up + + @property + def status(self): + """Get the status of this Ready. + + :return: The status of this Ready. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this Ready. + + :param status: The status of this Ready. + :type: str + """ # noqa: E501 + self._status = status + + @property + def started(self): + """Get the started of this Ready. + + :return: The started of this Ready. + :rtype: datetime + """ # noqa: E501 + return self._started + + @started.setter + def started(self, started): + """Set the started of this Ready. + + :param started: The started of this Ready. + :type: datetime + """ # noqa: E501 + self._started = started + + @property + def up(self): + """Get the up of this Ready. + + :return: The up of this Ready. + :rtype: str + """ # noqa: E501 + return self._up + + @up.setter + def up(self, up): + """Set the up of this Ready. + + :param up: The up of this Ready. + :type: str + """ # noqa: E501 + self._up = up + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Ready): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/regexp_literal.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/regexp_literal.py new file mode 100644 index 0000000..95fe4cb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/regexp_literal.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class RegexpLiteral(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'value': 'str' + } + + attribute_map = { + 'type': 'type', + 'value': 'value' + } + + def __init__(self, type=None, value=None): # noqa: E501,D401,D403 + """RegexpLiteral - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._value = None + self.discriminator = None + + if type is not None: + self.type = type + if value is not None: + self.value = value + + @property + def type(self): + """Get the type of this RegexpLiteral. + + Type of AST node + + :return: The type of this RegexpLiteral. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this RegexpLiteral. + + Type of AST node + + :param type: The type of this RegexpLiteral. + :type: str + """ # noqa: E501 + self._type = type + + @property + def value(self): + """Get the value of this RegexpLiteral. + + :return: The value of this RegexpLiteral. + :rtype: str + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this RegexpLiteral. + + :param value: The value of this RegexpLiteral. + :type: str + """ # noqa: E501 + self._value = value + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RegexpLiteral): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connection.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connection.py new file mode 100644 index 0000000..e0c1785 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connection.py @@ -0,0 +1,250 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RemoteConnection(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'org_id': 'str', + 'description': 'str', + 'remote_url': 'str', + 'remote_org_id': 'str', + 'allow_insecure_tls': 'bool' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'org_id': 'orgID', + 'description': 'description', + 'remote_url': 'remoteURL', + 'remote_org_id': 'remoteOrgID', + 'allow_insecure_tls': 'allowInsecureTLS' + } + + def __init__(self, id=None, name=None, org_id=None, description=None, remote_url=None, remote_org_id=None, allow_insecure_tls=False): # noqa: E501,D401,D403 + """RemoteConnection - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._name = None + self._org_id = None + self._description = None + self._remote_url = None + self._remote_org_id = None + self._allow_insecure_tls = None + self.discriminator = None + + self.id = id + self.name = name + self.org_id = org_id + if description is not None: + self.description = description + self.remote_url = remote_url + if remote_org_id is not None: + self.remote_org_id = remote_org_id + self.allow_insecure_tls = allow_insecure_tls + + @property + def id(self): + """Get the id of this RemoteConnection. + + :return: The id of this RemoteConnection. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this RemoteConnection. + + :param id: The id of this RemoteConnection. + :type: str + """ # noqa: E501 + if id is None: + raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this RemoteConnection. + + :return: The name of this RemoteConnection. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this RemoteConnection. + + :param name: The name of this RemoteConnection. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def org_id(self): + """Get the org_id of this RemoteConnection. + + :return: The org_id of this RemoteConnection. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this RemoteConnection. + + :param org_id: The org_id of this RemoteConnection. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def description(self): + """Get the description of this RemoteConnection. + + :return: The description of this RemoteConnection. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this RemoteConnection. + + :param description: The description of this RemoteConnection. + :type: str + """ # noqa: E501 + self._description = description + + @property + def remote_url(self): + """Get the remote_url of this RemoteConnection. + + :return: The remote_url of this RemoteConnection. + :rtype: str + """ # noqa: E501 + return self._remote_url + + @remote_url.setter + def remote_url(self, remote_url): + """Set the remote_url of this RemoteConnection. + + :param remote_url: The remote_url of this RemoteConnection. + :type: str + """ # noqa: E501 + if remote_url is None: + raise ValueError("Invalid value for `remote_url`, must not be `None`") # noqa: E501 + self._remote_url = remote_url + + @property + def remote_org_id(self): + """Get the remote_org_id of this RemoteConnection. + + :return: The remote_org_id of this RemoteConnection. + :rtype: str + """ # noqa: E501 + return self._remote_org_id + + @remote_org_id.setter + def remote_org_id(self, remote_org_id): + """Set the remote_org_id of this RemoteConnection. + + :param remote_org_id: The remote_org_id of this RemoteConnection. + :type: str + """ # noqa: E501 + self._remote_org_id = remote_org_id + + @property + def allow_insecure_tls(self): + """Get the allow_insecure_tls of this RemoteConnection. + + :return: The allow_insecure_tls of this RemoteConnection. + :rtype: bool + """ # noqa: E501 + return self._allow_insecure_tls + + @allow_insecure_tls.setter + def allow_insecure_tls(self, allow_insecure_tls): + """Set the allow_insecure_tls of this RemoteConnection. + + :param allow_insecure_tls: The allow_insecure_tls of this RemoteConnection. + :type: bool + """ # noqa: E501 + if allow_insecure_tls is None: + raise ValueError("Invalid value for `allow_insecure_tls`, must not be `None`") # noqa: E501 + self._allow_insecure_tls = allow_insecure_tls + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RemoteConnection): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connection_creation_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connection_creation_request.py new file mode 100644 index 0000000..ba77536 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connection_creation_request.py @@ -0,0 +1,250 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RemoteConnectionCreationRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'org_id': 'str', + 'remote_url': 'str', + 'remote_api_token': 'str', + 'remote_org_id': 'str', + 'allow_insecure_tls': 'bool' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'org_id': 'orgID', + 'remote_url': 'remoteURL', + 'remote_api_token': 'remoteAPIToken', + 'remote_org_id': 'remoteOrgID', + 'allow_insecure_tls': 'allowInsecureTLS' + } + + def __init__(self, name=None, description=None, org_id=None, remote_url=None, remote_api_token=None, remote_org_id=None, allow_insecure_tls=False): # noqa: E501,D401,D403 + """RemoteConnectionCreationRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._org_id = None + self._remote_url = None + self._remote_api_token = None + self._remote_org_id = None + self._allow_insecure_tls = None + self.discriminator = None + + self.name = name + if description is not None: + self.description = description + self.org_id = org_id + self.remote_url = remote_url + self.remote_api_token = remote_api_token + if remote_org_id is not None: + self.remote_org_id = remote_org_id + self.allow_insecure_tls = allow_insecure_tls + + @property + def name(self): + """Get the name of this RemoteConnectionCreationRequest. + + :return: The name of this RemoteConnectionCreationRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this RemoteConnectionCreationRequest. + + :param name: The name of this RemoteConnectionCreationRequest. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this RemoteConnectionCreationRequest. + + :return: The description of this RemoteConnectionCreationRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this RemoteConnectionCreationRequest. + + :param description: The description of this RemoteConnectionCreationRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def org_id(self): + """Get the org_id of this RemoteConnectionCreationRequest. + + :return: The org_id of this RemoteConnectionCreationRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this RemoteConnectionCreationRequest. + + :param org_id: The org_id of this RemoteConnectionCreationRequest. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def remote_url(self): + """Get the remote_url of this RemoteConnectionCreationRequest. + + :return: The remote_url of this RemoteConnectionCreationRequest. + :rtype: str + """ # noqa: E501 + return self._remote_url + + @remote_url.setter + def remote_url(self, remote_url): + """Set the remote_url of this RemoteConnectionCreationRequest. + + :param remote_url: The remote_url of this RemoteConnectionCreationRequest. + :type: str + """ # noqa: E501 + if remote_url is None: + raise ValueError("Invalid value for `remote_url`, must not be `None`") # noqa: E501 + self._remote_url = remote_url + + @property + def remote_api_token(self): + """Get the remote_api_token of this RemoteConnectionCreationRequest. + + :return: The remote_api_token of this RemoteConnectionCreationRequest. + :rtype: str + """ # noqa: E501 + return self._remote_api_token + + @remote_api_token.setter + def remote_api_token(self, remote_api_token): + """Set the remote_api_token of this RemoteConnectionCreationRequest. + + :param remote_api_token: The remote_api_token of this RemoteConnectionCreationRequest. + :type: str + """ # noqa: E501 + if remote_api_token is None: + raise ValueError("Invalid value for `remote_api_token`, must not be `None`") # noqa: E501 + self._remote_api_token = remote_api_token + + @property + def remote_org_id(self): + """Get the remote_org_id of this RemoteConnectionCreationRequest. + + :return: The remote_org_id of this RemoteConnectionCreationRequest. + :rtype: str + """ # noqa: E501 + return self._remote_org_id + + @remote_org_id.setter + def remote_org_id(self, remote_org_id): + """Set the remote_org_id of this RemoteConnectionCreationRequest. + + :param remote_org_id: The remote_org_id of this RemoteConnectionCreationRequest. + :type: str + """ # noqa: E501 + self._remote_org_id = remote_org_id + + @property + def allow_insecure_tls(self): + """Get the allow_insecure_tls of this RemoteConnectionCreationRequest. + + :return: The allow_insecure_tls of this RemoteConnectionCreationRequest. + :rtype: bool + """ # noqa: E501 + return self._allow_insecure_tls + + @allow_insecure_tls.setter + def allow_insecure_tls(self, allow_insecure_tls): + """Set the allow_insecure_tls of this RemoteConnectionCreationRequest. + + :param allow_insecure_tls: The allow_insecure_tls of this RemoteConnectionCreationRequest. + :type: bool + """ # noqa: E501 + if allow_insecure_tls is None: + raise ValueError("Invalid value for `allow_insecure_tls`, must not be `None`") # noqa: E501 + self._allow_insecure_tls = allow_insecure_tls + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RemoteConnectionCreationRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connection_update_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connection_update_request.py new file mode 100644 index 0000000..54fd9f1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connection_update_request.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RemoteConnectionUpdateRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'remote_url': 'str', + 'remote_api_token': 'str', + 'remote_org_id': 'str', + 'allow_insecure_tls': 'bool' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'remote_url': 'remoteURL', + 'remote_api_token': 'remoteAPIToken', + 'remote_org_id': 'remoteOrgID', + 'allow_insecure_tls': 'allowInsecureTLS' + } + + def __init__(self, name=None, description=None, remote_url=None, remote_api_token=None, remote_org_id=None, allow_insecure_tls=False): # noqa: E501,D401,D403 + """RemoteConnectionUpdateRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._remote_url = None + self._remote_api_token = None + self._remote_org_id = None + self._allow_insecure_tls = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if remote_url is not None: + self.remote_url = remote_url + if remote_api_token is not None: + self.remote_api_token = remote_api_token + if remote_org_id is not None: + self.remote_org_id = remote_org_id + if allow_insecure_tls is not None: + self.allow_insecure_tls = allow_insecure_tls + + @property + def name(self): + """Get the name of this RemoteConnectionUpdateRequest. + + :return: The name of this RemoteConnectionUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this RemoteConnectionUpdateRequest. + + :param name: The name of this RemoteConnectionUpdateRequest. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this RemoteConnectionUpdateRequest. + + :return: The description of this RemoteConnectionUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this RemoteConnectionUpdateRequest. + + :param description: The description of this RemoteConnectionUpdateRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def remote_url(self): + """Get the remote_url of this RemoteConnectionUpdateRequest. + + :return: The remote_url of this RemoteConnectionUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._remote_url + + @remote_url.setter + def remote_url(self, remote_url): + """Set the remote_url of this RemoteConnectionUpdateRequest. + + :param remote_url: The remote_url of this RemoteConnectionUpdateRequest. + :type: str + """ # noqa: E501 + self._remote_url = remote_url + + @property + def remote_api_token(self): + """Get the remote_api_token of this RemoteConnectionUpdateRequest. + + :return: The remote_api_token of this RemoteConnectionUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._remote_api_token + + @remote_api_token.setter + def remote_api_token(self, remote_api_token): + """Set the remote_api_token of this RemoteConnectionUpdateRequest. + + :param remote_api_token: The remote_api_token of this RemoteConnectionUpdateRequest. + :type: str + """ # noqa: E501 + self._remote_api_token = remote_api_token + + @property + def remote_org_id(self): + """Get the remote_org_id of this RemoteConnectionUpdateRequest. + + :return: The remote_org_id of this RemoteConnectionUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._remote_org_id + + @remote_org_id.setter + def remote_org_id(self, remote_org_id): + """Set the remote_org_id of this RemoteConnectionUpdateRequest. + + :param remote_org_id: The remote_org_id of this RemoteConnectionUpdateRequest. + :type: str + """ # noqa: E501 + self._remote_org_id = remote_org_id + + @property + def allow_insecure_tls(self): + """Get the allow_insecure_tls of this RemoteConnectionUpdateRequest. + + :return: The allow_insecure_tls of this RemoteConnectionUpdateRequest. + :rtype: bool + """ # noqa: E501 + return self._allow_insecure_tls + + @allow_insecure_tls.setter + def allow_insecure_tls(self, allow_insecure_tls): + """Set the allow_insecure_tls of this RemoteConnectionUpdateRequest. + + :param allow_insecure_tls: The allow_insecure_tls of this RemoteConnectionUpdateRequest. + :type: bool + """ # noqa: E501 + self._allow_insecure_tls = allow_insecure_tls + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RemoteConnectionUpdateRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connections.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connections.py new file mode 100644 index 0000000..ca6d30c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/remote_connections.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RemoteConnections(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'remotes': 'list[RemoteConnection]' + } + + attribute_map = { + 'remotes': 'remotes' + } + + def __init__(self, remotes=None): # noqa: E501,D401,D403 + """RemoteConnections - a model defined in OpenAPI.""" # noqa: E501 + self._remotes = None + self.discriminator = None + + if remotes is not None: + self.remotes = remotes + + @property + def remotes(self): + """Get the remotes of this RemoteConnections. + + :return: The remotes of this RemoteConnections. + :rtype: list[RemoteConnection] + """ # noqa: E501 + return self._remotes + + @remotes.setter + def remotes(self, remotes): + """Set the remotes of this RemoteConnections. + + :param remotes: The remotes of this RemoteConnections. + :type: list[RemoteConnection] + """ # noqa: E501 + self._remotes = remotes + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RemoteConnections): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/renamable_field.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/renamable_field.py new file mode 100644 index 0000000..088cea3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/renamable_field.py @@ -0,0 +1,165 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RenamableField(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'internal_name': 'str', + 'display_name': 'str', + 'visible': 'bool' + } + + attribute_map = { + 'internal_name': 'internalName', + 'display_name': 'displayName', + 'visible': 'visible' + } + + def __init__(self, internal_name=None, display_name=None, visible=None): # noqa: E501,D401,D403 + """RenamableField - a model defined in OpenAPI.""" # noqa: E501 + self._internal_name = None + self._display_name = None + self._visible = None + self.discriminator = None + + if internal_name is not None: + self.internal_name = internal_name + if display_name is not None: + self.display_name = display_name + if visible is not None: + self.visible = visible + + @property + def internal_name(self): + """Get the internal_name of this RenamableField. + + The calculated name of a field. + + :return: The internal_name of this RenamableField. + :rtype: str + """ # noqa: E501 + return self._internal_name + + @internal_name.setter + def internal_name(self, internal_name): + """Set the internal_name of this RenamableField. + + The calculated name of a field. + + :param internal_name: The internal_name of this RenamableField. + :type: str + """ # noqa: E501 + self._internal_name = internal_name + + @property + def display_name(self): + """Get the display_name of this RenamableField. + + The name that a field is renamed to by the user. + + :return: The display_name of this RenamableField. + :rtype: str + """ # noqa: E501 + return self._display_name + + @display_name.setter + def display_name(self, display_name): + """Set the display_name of this RenamableField. + + The name that a field is renamed to by the user. + + :param display_name: The display_name of this RenamableField. + :type: str + """ # noqa: E501 + self._display_name = display_name + + @property + def visible(self): + """Get the visible of this RenamableField. + + Indicates whether this field should be visible on the table. + + :return: The visible of this RenamableField. + :rtype: bool + """ # noqa: E501 + return self._visible + + @visible.setter + def visible(self, visible): + """Set the visible of this RenamableField. + + Indicates whether this field should be visible on the table. + + :param visible: The visible of this RenamableField. + :type: bool + """ # noqa: E501 + self._visible = visible + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RenamableField): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/replication.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/replication.py new file mode 100644 index 0000000..fea45ca --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/replication.py @@ -0,0 +1,412 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Replication(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'description': 'str', + 'org_id': 'str', + 'remote_id': 'str', + 'local_bucket_id': 'str', + 'remote_bucket_id': 'str', + 'remote_bucket_name': 'str', + 'max_queue_size_bytes': 'int', + 'current_queue_size_bytes': 'int', + 'remaining_bytes_to_be_synced': 'int', + 'latest_response_code': 'int', + 'latest_error_message': 'str', + 'drop_non_retryable_data': 'bool' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'description': 'description', + 'org_id': 'orgID', + 'remote_id': 'remoteID', + 'local_bucket_id': 'localBucketID', + 'remote_bucket_id': 'remoteBucketID', + 'remote_bucket_name': 'remoteBucketName', + 'max_queue_size_bytes': 'maxQueueSizeBytes', + 'current_queue_size_bytes': 'currentQueueSizeBytes', + 'remaining_bytes_to_be_synced': 'remainingBytesToBeSynced', + 'latest_response_code': 'latestResponseCode', + 'latest_error_message': 'latestErrorMessage', + 'drop_non_retryable_data': 'dropNonRetryableData' + } + + def __init__(self, id=None, name=None, description=None, org_id=None, remote_id=None, local_bucket_id=None, remote_bucket_id=None, remote_bucket_name=None, max_queue_size_bytes=None, current_queue_size_bytes=None, remaining_bytes_to_be_synced=None, latest_response_code=None, latest_error_message=None, drop_non_retryable_data=None): # noqa: E501,D401,D403 + """Replication - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._name = None + self._description = None + self._org_id = None + self._remote_id = None + self._local_bucket_id = None + self._remote_bucket_id = None + self._remote_bucket_name = None + self._max_queue_size_bytes = None + self._current_queue_size_bytes = None + self._remaining_bytes_to_be_synced = None + self._latest_response_code = None + self._latest_error_message = None + self._drop_non_retryable_data = None + self.discriminator = None + + self.id = id + self.name = name + if description is not None: + self.description = description + self.org_id = org_id + self.remote_id = remote_id + self.local_bucket_id = local_bucket_id + if remote_bucket_id is not None: + self.remote_bucket_id = remote_bucket_id + if remote_bucket_name is not None: + self.remote_bucket_name = remote_bucket_name + self.max_queue_size_bytes = max_queue_size_bytes + if current_queue_size_bytes is not None: + self.current_queue_size_bytes = current_queue_size_bytes + if remaining_bytes_to_be_synced is not None: + self.remaining_bytes_to_be_synced = remaining_bytes_to_be_synced + if latest_response_code is not None: + self.latest_response_code = latest_response_code + if latest_error_message is not None: + self.latest_error_message = latest_error_message + if drop_non_retryable_data is not None: + self.drop_non_retryable_data = drop_non_retryable_data + + @property + def id(self): + """Get the id of this Replication. + + :return: The id of this Replication. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Replication. + + :param id: The id of this Replication. + :type: str + """ # noqa: E501 + if id is None: + raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this Replication. + + :return: The name of this Replication. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this Replication. + + :param name: The name of this Replication. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this Replication. + + :return: The description of this Replication. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this Replication. + + :param description: The description of this Replication. + :type: str + """ # noqa: E501 + self._description = description + + @property + def org_id(self): + """Get the org_id of this Replication. + + :return: The org_id of this Replication. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this Replication. + + :param org_id: The org_id of this Replication. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def remote_id(self): + """Get the remote_id of this Replication. + + :return: The remote_id of this Replication. + :rtype: str + """ # noqa: E501 + return self._remote_id + + @remote_id.setter + def remote_id(self, remote_id): + """Set the remote_id of this Replication. + + :param remote_id: The remote_id of this Replication. + :type: str + """ # noqa: E501 + if remote_id is None: + raise ValueError("Invalid value for `remote_id`, must not be `None`") # noqa: E501 + self._remote_id = remote_id + + @property + def local_bucket_id(self): + """Get the local_bucket_id of this Replication. + + :return: The local_bucket_id of this Replication. + :rtype: str + """ # noqa: E501 + return self._local_bucket_id + + @local_bucket_id.setter + def local_bucket_id(self, local_bucket_id): + """Set the local_bucket_id of this Replication. + + :param local_bucket_id: The local_bucket_id of this Replication. + :type: str + """ # noqa: E501 + if local_bucket_id is None: + raise ValueError("Invalid value for `local_bucket_id`, must not be `None`") # noqa: E501 + self._local_bucket_id = local_bucket_id + + @property + def remote_bucket_id(self): + """Get the remote_bucket_id of this Replication. + + :return: The remote_bucket_id of this Replication. + :rtype: str + """ # noqa: E501 + return self._remote_bucket_id + + @remote_bucket_id.setter + def remote_bucket_id(self, remote_bucket_id): + """Set the remote_bucket_id of this Replication. + + :param remote_bucket_id: The remote_bucket_id of this Replication. + :type: str + """ # noqa: E501 + self._remote_bucket_id = remote_bucket_id + + @property + def remote_bucket_name(self): + """Get the remote_bucket_name of this Replication. + + :return: The remote_bucket_name of this Replication. + :rtype: str + """ # noqa: E501 + return self._remote_bucket_name + + @remote_bucket_name.setter + def remote_bucket_name(self, remote_bucket_name): + """Set the remote_bucket_name of this Replication. + + :param remote_bucket_name: The remote_bucket_name of this Replication. + :type: str + """ # noqa: E501 + self._remote_bucket_name = remote_bucket_name + + @property + def max_queue_size_bytes(self): + """Get the max_queue_size_bytes of this Replication. + + :return: The max_queue_size_bytes of this Replication. + :rtype: int + """ # noqa: E501 + return self._max_queue_size_bytes + + @max_queue_size_bytes.setter + def max_queue_size_bytes(self, max_queue_size_bytes): + """Set the max_queue_size_bytes of this Replication. + + :param max_queue_size_bytes: The max_queue_size_bytes of this Replication. + :type: int + """ # noqa: E501 + if max_queue_size_bytes is None: + raise ValueError("Invalid value for `max_queue_size_bytes`, must not be `None`") # noqa: E501 + self._max_queue_size_bytes = max_queue_size_bytes + + @property + def current_queue_size_bytes(self): + """Get the current_queue_size_bytes of this Replication. + + :return: The current_queue_size_bytes of this Replication. + :rtype: int + """ # noqa: E501 + return self._current_queue_size_bytes + + @current_queue_size_bytes.setter + def current_queue_size_bytes(self, current_queue_size_bytes): + """Set the current_queue_size_bytes of this Replication. + + :param current_queue_size_bytes: The current_queue_size_bytes of this Replication. + :type: int + """ # noqa: E501 + self._current_queue_size_bytes = current_queue_size_bytes + + @property + def remaining_bytes_to_be_synced(self): + """Get the remaining_bytes_to_be_synced of this Replication. + + :return: The remaining_bytes_to_be_synced of this Replication. + :rtype: int + """ # noqa: E501 + return self._remaining_bytes_to_be_synced + + @remaining_bytes_to_be_synced.setter + def remaining_bytes_to_be_synced(self, remaining_bytes_to_be_synced): + """Set the remaining_bytes_to_be_synced of this Replication. + + :param remaining_bytes_to_be_synced: The remaining_bytes_to_be_synced of this Replication. + :type: int + """ # noqa: E501 + self._remaining_bytes_to_be_synced = remaining_bytes_to_be_synced + + @property + def latest_response_code(self): + """Get the latest_response_code of this Replication. + + :return: The latest_response_code of this Replication. + :rtype: int + """ # noqa: E501 + return self._latest_response_code + + @latest_response_code.setter + def latest_response_code(self, latest_response_code): + """Set the latest_response_code of this Replication. + + :param latest_response_code: The latest_response_code of this Replication. + :type: int + """ # noqa: E501 + self._latest_response_code = latest_response_code + + @property + def latest_error_message(self): + """Get the latest_error_message of this Replication. + + :return: The latest_error_message of this Replication. + :rtype: str + """ # noqa: E501 + return self._latest_error_message + + @latest_error_message.setter + def latest_error_message(self, latest_error_message): + """Set the latest_error_message of this Replication. + + :param latest_error_message: The latest_error_message of this Replication. + :type: str + """ # noqa: E501 + self._latest_error_message = latest_error_message + + @property + def drop_non_retryable_data(self): + """Get the drop_non_retryable_data of this Replication. + + :return: The drop_non_retryable_data of this Replication. + :rtype: bool + """ # noqa: E501 + return self._drop_non_retryable_data + + @drop_non_retryable_data.setter + def drop_non_retryable_data(self, drop_non_retryable_data): + """Set the drop_non_retryable_data of this Replication. + + :param drop_non_retryable_data: The drop_non_retryable_data of this Replication. + :type: bool + """ # noqa: E501 + self._drop_non_retryable_data = drop_non_retryable_data + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Replication): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/replication_creation_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/replication_creation_request.py new file mode 100644 index 0000000..946f2c9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/replication_creation_request.py @@ -0,0 +1,324 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ReplicationCreationRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'org_id': 'str', + 'remote_id': 'str', + 'local_bucket_id': 'str', + 'remote_bucket_id': 'str', + 'remote_bucket_name': 'str', + 'max_queue_size_bytes': 'int', + 'drop_non_retryable_data': 'bool', + 'max_age_seconds': 'int' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'org_id': 'orgID', + 'remote_id': 'remoteID', + 'local_bucket_id': 'localBucketID', + 'remote_bucket_id': 'remoteBucketID', + 'remote_bucket_name': 'remoteBucketName', + 'max_queue_size_bytes': 'maxQueueSizeBytes', + 'drop_non_retryable_data': 'dropNonRetryableData', + 'max_age_seconds': 'maxAgeSeconds' + } + + def __init__(self, name=None, description=None, org_id=None, remote_id=None, local_bucket_id=None, remote_bucket_id=None, remote_bucket_name=None, max_queue_size_bytes=67108860, drop_non_retryable_data=False, max_age_seconds=604800): # noqa: E501,D401,D403 + """ReplicationCreationRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._org_id = None + self._remote_id = None + self._local_bucket_id = None + self._remote_bucket_id = None + self._remote_bucket_name = None + self._max_queue_size_bytes = None + self._drop_non_retryable_data = None + self._max_age_seconds = None + self.discriminator = None + + self.name = name + if description is not None: + self.description = description + self.org_id = org_id + self.remote_id = remote_id + self.local_bucket_id = local_bucket_id + if remote_bucket_id is not None: + self.remote_bucket_id = remote_bucket_id + if remote_bucket_name is not None: + self.remote_bucket_name = remote_bucket_name + self.max_queue_size_bytes = max_queue_size_bytes + if drop_non_retryable_data is not None: + self.drop_non_retryable_data = drop_non_retryable_data + self.max_age_seconds = max_age_seconds + + @property + def name(self): + """Get the name of this ReplicationCreationRequest. + + :return: The name of this ReplicationCreationRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this ReplicationCreationRequest. + + :param name: The name of this ReplicationCreationRequest. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this ReplicationCreationRequest. + + :return: The description of this ReplicationCreationRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this ReplicationCreationRequest. + + :param description: The description of this ReplicationCreationRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def org_id(self): + """Get the org_id of this ReplicationCreationRequest. + + :return: The org_id of this ReplicationCreationRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this ReplicationCreationRequest. + + :param org_id: The org_id of this ReplicationCreationRequest. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def remote_id(self): + """Get the remote_id of this ReplicationCreationRequest. + + :return: The remote_id of this ReplicationCreationRequest. + :rtype: str + """ # noqa: E501 + return self._remote_id + + @remote_id.setter + def remote_id(self, remote_id): + """Set the remote_id of this ReplicationCreationRequest. + + :param remote_id: The remote_id of this ReplicationCreationRequest. + :type: str + """ # noqa: E501 + if remote_id is None: + raise ValueError("Invalid value for `remote_id`, must not be `None`") # noqa: E501 + self._remote_id = remote_id + + @property + def local_bucket_id(self): + """Get the local_bucket_id of this ReplicationCreationRequest. + + :return: The local_bucket_id of this ReplicationCreationRequest. + :rtype: str + """ # noqa: E501 + return self._local_bucket_id + + @local_bucket_id.setter + def local_bucket_id(self, local_bucket_id): + """Set the local_bucket_id of this ReplicationCreationRequest. + + :param local_bucket_id: The local_bucket_id of this ReplicationCreationRequest. + :type: str + """ # noqa: E501 + if local_bucket_id is None: + raise ValueError("Invalid value for `local_bucket_id`, must not be `None`") # noqa: E501 + self._local_bucket_id = local_bucket_id + + @property + def remote_bucket_id(self): + """Get the remote_bucket_id of this ReplicationCreationRequest. + + :return: The remote_bucket_id of this ReplicationCreationRequest. + :rtype: str + """ # noqa: E501 + return self._remote_bucket_id + + @remote_bucket_id.setter + def remote_bucket_id(self, remote_bucket_id): + """Set the remote_bucket_id of this ReplicationCreationRequest. + + :param remote_bucket_id: The remote_bucket_id of this ReplicationCreationRequest. + :type: str + """ # noqa: E501 + self._remote_bucket_id = remote_bucket_id + + @property + def remote_bucket_name(self): + """Get the remote_bucket_name of this ReplicationCreationRequest. + + :return: The remote_bucket_name of this ReplicationCreationRequest. + :rtype: str + """ # noqa: E501 + return self._remote_bucket_name + + @remote_bucket_name.setter + def remote_bucket_name(self, remote_bucket_name): + """Set the remote_bucket_name of this ReplicationCreationRequest. + + :param remote_bucket_name: The remote_bucket_name of this ReplicationCreationRequest. + :type: str + """ # noqa: E501 + self._remote_bucket_name = remote_bucket_name + + @property + def max_queue_size_bytes(self): + """Get the max_queue_size_bytes of this ReplicationCreationRequest. + + :return: The max_queue_size_bytes of this ReplicationCreationRequest. + :rtype: int + """ # noqa: E501 + return self._max_queue_size_bytes + + @max_queue_size_bytes.setter + def max_queue_size_bytes(self, max_queue_size_bytes): + """Set the max_queue_size_bytes of this ReplicationCreationRequest. + + :param max_queue_size_bytes: The max_queue_size_bytes of this ReplicationCreationRequest. + :type: int + """ # noqa: E501 + if max_queue_size_bytes is None: + raise ValueError("Invalid value for `max_queue_size_bytes`, must not be `None`") # noqa: E501 + if max_queue_size_bytes is not None and max_queue_size_bytes < 33554430: # noqa: E501 + raise ValueError("Invalid value for `max_queue_size_bytes`, must be a value greater than or equal to `33554430`") # noqa: E501 + self._max_queue_size_bytes = max_queue_size_bytes + + @property + def drop_non_retryable_data(self): + """Get the drop_non_retryable_data of this ReplicationCreationRequest. + + :return: The drop_non_retryable_data of this ReplicationCreationRequest. + :rtype: bool + """ # noqa: E501 + return self._drop_non_retryable_data + + @drop_non_retryable_data.setter + def drop_non_retryable_data(self, drop_non_retryable_data): + """Set the drop_non_retryable_data of this ReplicationCreationRequest. + + :param drop_non_retryable_data: The drop_non_retryable_data of this ReplicationCreationRequest. + :type: bool + """ # noqa: E501 + self._drop_non_retryable_data = drop_non_retryable_data + + @property + def max_age_seconds(self): + """Get the max_age_seconds of this ReplicationCreationRequest. + + :return: The max_age_seconds of this ReplicationCreationRequest. + :rtype: int + """ # noqa: E501 + return self._max_age_seconds + + @max_age_seconds.setter + def max_age_seconds(self, max_age_seconds): + """Set the max_age_seconds of this ReplicationCreationRequest. + + :param max_age_seconds: The max_age_seconds of this ReplicationCreationRequest. + :type: int + """ # noqa: E501 + if max_age_seconds is None: + raise ValueError("Invalid value for `max_age_seconds`, must not be `None`") # noqa: E501 + if max_age_seconds is not None and max_age_seconds < 0: # noqa: E501 + raise ValueError("Invalid value for `max_age_seconds`, must be a value greater than or equal to `0`") # noqa: E501 + self._max_age_seconds = max_age_seconds + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ReplicationCreationRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/replication_update_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/replication_update_request.py new file mode 100644 index 0000000..0c40c11 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/replication_update_request.py @@ -0,0 +1,272 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ReplicationUpdateRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'remote_id': 'str', + 'remote_bucket_id': 'str', + 'remote_bucket_name': 'str', + 'max_queue_size_bytes': 'int', + 'drop_non_retryable_data': 'bool', + 'max_age_seconds': 'int' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'remote_id': 'remoteID', + 'remote_bucket_id': 'remoteBucketID', + 'remote_bucket_name': 'remoteBucketName', + 'max_queue_size_bytes': 'maxQueueSizeBytes', + 'drop_non_retryable_data': 'dropNonRetryableData', + 'max_age_seconds': 'maxAgeSeconds' + } + + def __init__(self, name=None, description=None, remote_id=None, remote_bucket_id=None, remote_bucket_name=None, max_queue_size_bytes=None, drop_non_retryable_data=None, max_age_seconds=None): # noqa: E501,D401,D403 + """ReplicationUpdateRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._remote_id = None + self._remote_bucket_id = None + self._remote_bucket_name = None + self._max_queue_size_bytes = None + self._drop_non_retryable_data = None + self._max_age_seconds = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if remote_id is not None: + self.remote_id = remote_id + if remote_bucket_id is not None: + self.remote_bucket_id = remote_bucket_id + if remote_bucket_name is not None: + self.remote_bucket_name = remote_bucket_name + if max_queue_size_bytes is not None: + self.max_queue_size_bytes = max_queue_size_bytes + if drop_non_retryable_data is not None: + self.drop_non_retryable_data = drop_non_retryable_data + if max_age_seconds is not None: + self.max_age_seconds = max_age_seconds + + @property + def name(self): + """Get the name of this ReplicationUpdateRequest. + + :return: The name of this ReplicationUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this ReplicationUpdateRequest. + + :param name: The name of this ReplicationUpdateRequest. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this ReplicationUpdateRequest. + + :return: The description of this ReplicationUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this ReplicationUpdateRequest. + + :param description: The description of this ReplicationUpdateRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def remote_id(self): + """Get the remote_id of this ReplicationUpdateRequest. + + :return: The remote_id of this ReplicationUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._remote_id + + @remote_id.setter + def remote_id(self, remote_id): + """Set the remote_id of this ReplicationUpdateRequest. + + :param remote_id: The remote_id of this ReplicationUpdateRequest. + :type: str + """ # noqa: E501 + self._remote_id = remote_id + + @property + def remote_bucket_id(self): + """Get the remote_bucket_id of this ReplicationUpdateRequest. + + :return: The remote_bucket_id of this ReplicationUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._remote_bucket_id + + @remote_bucket_id.setter + def remote_bucket_id(self, remote_bucket_id): + """Set the remote_bucket_id of this ReplicationUpdateRequest. + + :param remote_bucket_id: The remote_bucket_id of this ReplicationUpdateRequest. + :type: str + """ # noqa: E501 + self._remote_bucket_id = remote_bucket_id + + @property + def remote_bucket_name(self): + """Get the remote_bucket_name of this ReplicationUpdateRequest. + + :return: The remote_bucket_name of this ReplicationUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._remote_bucket_name + + @remote_bucket_name.setter + def remote_bucket_name(self, remote_bucket_name): + """Set the remote_bucket_name of this ReplicationUpdateRequest. + + :param remote_bucket_name: The remote_bucket_name of this ReplicationUpdateRequest. + :type: str + """ # noqa: E501 + self._remote_bucket_name = remote_bucket_name + + @property + def max_queue_size_bytes(self): + """Get the max_queue_size_bytes of this ReplicationUpdateRequest. + + :return: The max_queue_size_bytes of this ReplicationUpdateRequest. + :rtype: int + """ # noqa: E501 + return self._max_queue_size_bytes + + @max_queue_size_bytes.setter + def max_queue_size_bytes(self, max_queue_size_bytes): + """Set the max_queue_size_bytes of this ReplicationUpdateRequest. + + :param max_queue_size_bytes: The max_queue_size_bytes of this ReplicationUpdateRequest. + :type: int + """ # noqa: E501 + if max_queue_size_bytes is not None and max_queue_size_bytes < 33554430: # noqa: E501 + raise ValueError("Invalid value for `max_queue_size_bytes`, must be a value greater than or equal to `33554430`") # noqa: E501 + self._max_queue_size_bytes = max_queue_size_bytes + + @property + def drop_non_retryable_data(self): + """Get the drop_non_retryable_data of this ReplicationUpdateRequest. + + :return: The drop_non_retryable_data of this ReplicationUpdateRequest. + :rtype: bool + """ # noqa: E501 + return self._drop_non_retryable_data + + @drop_non_retryable_data.setter + def drop_non_retryable_data(self, drop_non_retryable_data): + """Set the drop_non_retryable_data of this ReplicationUpdateRequest. + + :param drop_non_retryable_data: The drop_non_retryable_data of this ReplicationUpdateRequest. + :type: bool + """ # noqa: E501 + self._drop_non_retryable_data = drop_non_retryable_data + + @property + def max_age_seconds(self): + """Get the max_age_seconds of this ReplicationUpdateRequest. + + :return: The max_age_seconds of this ReplicationUpdateRequest. + :rtype: int + """ # noqa: E501 + return self._max_age_seconds + + @max_age_seconds.setter + def max_age_seconds(self, max_age_seconds): + """Set the max_age_seconds of this ReplicationUpdateRequest. + + :param max_age_seconds: The max_age_seconds of this ReplicationUpdateRequest. + :type: int + """ # noqa: E501 + if max_age_seconds is not None and max_age_seconds < 0: # noqa: E501 + raise ValueError("Invalid value for `max_age_seconds`, must be a value greater than or equal to `0`") # noqa: E501 + self._max_age_seconds = max_age_seconds + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ReplicationUpdateRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/replications.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/replications.py new file mode 100644 index 0000000..d81cd45 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/replications.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Replications(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'replications': 'list[Replication]' + } + + attribute_map = { + 'replications': 'replications' + } + + def __init__(self, replications=None): # noqa: E501,D401,D403 + """Replications - a model defined in OpenAPI.""" # noqa: E501 + self._replications = None + self.discriminator = None + + if replications is not None: + self.replications = replications + + @property + def replications(self): + """Get the replications of this Replications. + + :return: The replications of this Replications. + :rtype: list[Replication] + """ # noqa: E501 + return self._replications + + @replications.setter + def replications(self, replications): + """Set the replications of this Replications. + + :param replications: The replications of this Replications. + :type: list[Replication] + """ # noqa: E501 + self._replications = replications + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Replications): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_member.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_member.py new file mode 100644 index 0000000..26e26d7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_member.py @@ -0,0 +1,119 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.user_response import UserResponse + + +class ResourceMember(UserResponse): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'role': 'str', + 'id': 'str', + 'name': 'str', + 'status': 'str', + 'links': 'UserResponseLinks' + } + + attribute_map = { + 'role': 'role', + 'id': 'id', + 'name': 'name', + 'status': 'status', + 'links': 'links' + } + + def __init__(self, role='member', id=None, name=None, status='active', links=None): # noqa: E501,D401,D403 + """ResourceMember - a model defined in OpenAPI.""" # noqa: E501 + UserResponse.__init__(self, id=id, name=name, status=status, links=links) # noqa: E501 + + self._role = None + self.discriminator = None + + if role is not None: + self.role = role + + @property + def role(self): + """Get the role of this ResourceMember. + + :return: The role of this ResourceMember. + :rtype: str + """ # noqa: E501 + return self._role + + @role.setter + def role(self, role): + """Set the role of this ResourceMember. + + :param role: The role of this ResourceMember. + :type: str + """ # noqa: E501 + self._role = role + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ResourceMember): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_members.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_members.py new file mode 100644 index 0000000..5fde29a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_members.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ResourceMembers(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'ResourceMembersLinks', + 'users': 'list[ResourceMember]' + } + + attribute_map = { + 'links': 'links', + 'users': 'users' + } + + def __init__(self, links=None, users=None): # noqa: E501,D401,D403 + """ResourceMembers - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._users = None + self.discriminator = None + + if links is not None: + self.links = links + if users is not None: + self.users = users + + @property + def links(self): + """Get the links of this ResourceMembers. + + :return: The links of this ResourceMembers. + :rtype: ResourceMembersLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this ResourceMembers. + + :param links: The links of this ResourceMembers. + :type: ResourceMembersLinks + """ # noqa: E501 + self._links = links + + @property + def users(self): + """Get the users of this ResourceMembers. + + :return: The users of this ResourceMembers. + :rtype: list[ResourceMember] + """ # noqa: E501 + return self._users + + @users.setter + def users(self, users): + """Set the users of this ResourceMembers. + + :param users: The users of this ResourceMembers. + :type: list[ResourceMember] + """ # noqa: E501 + self._users = users + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ResourceMembers): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_members_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_members_links.py new file mode 100644 index 0000000..26820bb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_members_links.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ResourceMembersLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str' + } + + attribute_map = { + '_self': 'self' + } + + def __init__(self, _self=None): # noqa: E501,D401,D403 + """ResourceMembersLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self.discriminator = None + + if _self is not None: + self._self = _self + + @property + def _self(self): + """Get the _self of this ResourceMembersLinks. + + :return: The _self of this ResourceMembersLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this ResourceMembersLinks. + + :param _self: The _self of this ResourceMembersLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ResourceMembersLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_owner.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_owner.py new file mode 100644 index 0000000..2f73cd7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_owner.py @@ -0,0 +1,119 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.user_response import UserResponse + + +class ResourceOwner(UserResponse): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'role': 'str', + 'id': 'str', + 'name': 'str', + 'status': 'str', + 'links': 'UserResponseLinks' + } + + attribute_map = { + 'role': 'role', + 'id': 'id', + 'name': 'name', + 'status': 'status', + 'links': 'links' + } + + def __init__(self, role='owner', id=None, name=None, status='active', links=None): # noqa: E501,D401,D403 + """ResourceOwner - a model defined in OpenAPI.""" # noqa: E501 + UserResponse.__init__(self, id=id, name=name, status=status, links=links) # noqa: E501 + + self._role = None + self.discriminator = None + + if role is not None: + self.role = role + + @property + def role(self): + """Get the role of this ResourceOwner. + + :return: The role of this ResourceOwner. + :rtype: str + """ # noqa: E501 + return self._role + + @role.setter + def role(self, role): + """Set the role of this ResourceOwner. + + :param role: The role of this ResourceOwner. + :type: str + """ # noqa: E501 + self._role = role + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ResourceOwner): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_owners.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_owners.py new file mode 100644 index 0000000..84a85ff --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/resource_owners.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ResourceOwners(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'ResourceMembersLinks', + 'users': 'list[ResourceOwner]' + } + + attribute_map = { + 'links': 'links', + 'users': 'users' + } + + def __init__(self, links=None, users=None): # noqa: E501,D401,D403 + """ResourceOwners - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._users = None + self.discriminator = None + + if links is not None: + self.links = links + if users is not None: + self.users = users + + @property + def links(self): + """Get the links of this ResourceOwners. + + :return: The links of this ResourceOwners. + :rtype: ResourceMembersLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this ResourceOwners. + + :param links: The links of this ResourceOwners. + :type: ResourceMembersLinks + """ # noqa: E501 + self._links = links + + @property + def users(self): + """Get the users of this ResourceOwners. + + :return: The users of this ResourceOwners. + :rtype: list[ResourceOwner] + """ # noqa: E501 + return self._users + + @users.setter + def users(self, users): + """Set the users of this ResourceOwners. + + :param users: The users of this ResourceOwners. + :type: list[ResourceOwner] + """ # noqa: E501 + self._users = users + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ResourceOwners): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/restored_bucket_mappings.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/restored_bucket_mappings.py new file mode 100644 index 0000000..2059cbc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/restored_bucket_mappings.py @@ -0,0 +1,160 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RestoredBucketMappings(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'shard_mappings': 'list[BucketShardMapping]' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'shard_mappings': 'shardMappings' + } + + def __init__(self, id=None, name=None, shard_mappings=None): # noqa: E501,D401,D403 + """RestoredBucketMappings - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._name = None + self._shard_mappings = None + self.discriminator = None + + self.id = id + self.name = name + self.shard_mappings = shard_mappings + + @property + def id(self): + """Get the id of this RestoredBucketMappings. + + New ID of the restored bucket + + :return: The id of this RestoredBucketMappings. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this RestoredBucketMappings. + + New ID of the restored bucket + + :param id: The id of this RestoredBucketMappings. + :type: str + """ # noqa: E501 + if id is None: + raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this RestoredBucketMappings. + + :return: The name of this RestoredBucketMappings. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this RestoredBucketMappings. + + :param name: The name of this RestoredBucketMappings. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def shard_mappings(self): + """Get the shard_mappings of this RestoredBucketMappings. + + :return: The shard_mappings of this RestoredBucketMappings. + :rtype: list[BucketShardMapping] + """ # noqa: E501 + return self._shard_mappings + + @shard_mappings.setter + def shard_mappings(self, shard_mappings): + """Set the shard_mappings of this RestoredBucketMappings. + + :param shard_mappings: The shard_mappings of this RestoredBucketMappings. + :type: list[BucketShardMapping] + """ # noqa: E501 + if shard_mappings is None: + raise ValueError("Invalid value for `shard_mappings`, must not be `None`") # noqa: E501 + self._shard_mappings = shard_mappings + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RestoredBucketMappings): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/retention_policy_manifest.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/retention_policy_manifest.py new file mode 100644 index 0000000..a0b314c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/retention_policy_manifest.py @@ -0,0 +1,228 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RetentionPolicyManifest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'replica_n': 'int', + 'duration': 'int', + 'shard_group_duration': 'int', + 'shard_groups': 'list[ShardGroupManifest]', + 'subscriptions': 'list[SubscriptionManifest]' + } + + attribute_map = { + 'name': 'name', + 'replica_n': 'replicaN', + 'duration': 'duration', + 'shard_group_duration': 'shardGroupDuration', + 'shard_groups': 'shardGroups', + 'subscriptions': 'subscriptions' + } + + def __init__(self, name=None, replica_n=None, duration=None, shard_group_duration=None, shard_groups=None, subscriptions=None): # noqa: E501,D401,D403 + """RetentionPolicyManifest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._replica_n = None + self._duration = None + self._shard_group_duration = None + self._shard_groups = None + self._subscriptions = None + self.discriminator = None + + self.name = name + self.replica_n = replica_n + self.duration = duration + self.shard_group_duration = shard_group_duration + self.shard_groups = shard_groups + self.subscriptions = subscriptions + + @property + def name(self): + """Get the name of this RetentionPolicyManifest. + + :return: The name of this RetentionPolicyManifest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this RetentionPolicyManifest. + + :param name: The name of this RetentionPolicyManifest. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def replica_n(self): + """Get the replica_n of this RetentionPolicyManifest. + + :return: The replica_n of this RetentionPolicyManifest. + :rtype: int + """ # noqa: E501 + return self._replica_n + + @replica_n.setter + def replica_n(self, replica_n): + """Set the replica_n of this RetentionPolicyManifest. + + :param replica_n: The replica_n of this RetentionPolicyManifest. + :type: int + """ # noqa: E501 + if replica_n is None: + raise ValueError("Invalid value for `replica_n`, must not be `None`") # noqa: E501 + self._replica_n = replica_n + + @property + def duration(self): + """Get the duration of this RetentionPolicyManifest. + + :return: The duration of this RetentionPolicyManifest. + :rtype: int + """ # noqa: E501 + return self._duration + + @duration.setter + def duration(self, duration): + """Set the duration of this RetentionPolicyManifest. + + :param duration: The duration of this RetentionPolicyManifest. + :type: int + """ # noqa: E501 + if duration is None: + raise ValueError("Invalid value for `duration`, must not be `None`") # noqa: E501 + self._duration = duration + + @property + def shard_group_duration(self): + """Get the shard_group_duration of this RetentionPolicyManifest. + + :return: The shard_group_duration of this RetentionPolicyManifest. + :rtype: int + """ # noqa: E501 + return self._shard_group_duration + + @shard_group_duration.setter + def shard_group_duration(self, shard_group_duration): + """Set the shard_group_duration of this RetentionPolicyManifest. + + :param shard_group_duration: The shard_group_duration of this RetentionPolicyManifest. + :type: int + """ # noqa: E501 + if shard_group_duration is None: + raise ValueError("Invalid value for `shard_group_duration`, must not be `None`") # noqa: E501 + self._shard_group_duration = shard_group_duration + + @property + def shard_groups(self): + """Get the shard_groups of this RetentionPolicyManifest. + + :return: The shard_groups of this RetentionPolicyManifest. + :rtype: list[ShardGroupManifest] + """ # noqa: E501 + return self._shard_groups + + @shard_groups.setter + def shard_groups(self, shard_groups): + """Set the shard_groups of this RetentionPolicyManifest. + + :param shard_groups: The shard_groups of this RetentionPolicyManifest. + :type: list[ShardGroupManifest] + """ # noqa: E501 + if shard_groups is None: + raise ValueError("Invalid value for `shard_groups`, must not be `None`") # noqa: E501 + self._shard_groups = shard_groups + + @property + def subscriptions(self): + """Get the subscriptions of this RetentionPolicyManifest. + + :return: The subscriptions of this RetentionPolicyManifest. + :rtype: list[SubscriptionManifest] + """ # noqa: E501 + return self._subscriptions + + @subscriptions.setter + def subscriptions(self, subscriptions): + """Set the subscriptions of this RetentionPolicyManifest. + + :param subscriptions: The subscriptions of this RetentionPolicyManifest. + :type: list[SubscriptionManifest] + """ # noqa: E501 + if subscriptions is None: + raise ValueError("Invalid value for `subscriptions`, must not be `None`") # noqa: E501 + self._subscriptions = subscriptions + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RetentionPolicyManifest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/return_statement.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/return_statement.py new file mode 100644 index 0000000..aa4b48d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/return_statement.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.statement import Statement + + +class ReturnStatement(Statement): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'argument': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'argument': 'argument' + } + + def __init__(self, type=None, argument=None): # noqa: E501,D401,D403 + """ReturnStatement - a model defined in OpenAPI.""" # noqa: E501 + Statement.__init__(self) # noqa: E501 + + self._type = None + self._argument = None + self.discriminator = None + + if type is not None: + self.type = type + if argument is not None: + self.argument = argument + + @property + def type(self): + """Get the type of this ReturnStatement. + + Type of AST node + + :return: The type of this ReturnStatement. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ReturnStatement. + + Type of AST node + + :param type: The type of this ReturnStatement. + :type: str + """ # noqa: E501 + self._type = type + + @property + def argument(self): + """Get the argument of this ReturnStatement. + + :return: The argument of this ReturnStatement. + :rtype: Expression + """ # noqa: E501 + return self._argument + + @argument.setter + def argument(self, argument): + """Set the argument of this ReturnStatement. + + :param argument: The argument of this ReturnStatement. + :type: Expression + """ # noqa: E501 + self._argument = argument + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ReturnStatement): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes.py new file mode 100644 index 0000000..2a7a77e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes.py @@ -0,0 +1,498 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Routes(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'authorizations': 'str', + 'buckets': 'str', + 'dashboards': 'str', + 'external': 'RoutesExternal', + 'variables': 'str', + 'me': 'str', + 'flags': 'str', + 'orgs': 'str', + 'query': 'RoutesQuery', + 'setup': 'str', + 'signin': 'str', + 'signout': 'str', + 'sources': 'str', + 'system': 'RoutesSystem', + 'tasks': 'str', + 'telegrafs': 'str', + 'users': 'str', + 'write': 'str' + } + + attribute_map = { + 'authorizations': 'authorizations', + 'buckets': 'buckets', + 'dashboards': 'dashboards', + 'external': 'external', + 'variables': 'variables', + 'me': 'me', + 'flags': 'flags', + 'orgs': 'orgs', + 'query': 'query', + 'setup': 'setup', + 'signin': 'signin', + 'signout': 'signout', + 'sources': 'sources', + 'system': 'system', + 'tasks': 'tasks', + 'telegrafs': 'telegrafs', + 'users': 'users', + 'write': 'write' + } + + def __init__(self, authorizations=None, buckets=None, dashboards=None, external=None, variables=None, me=None, flags=None, orgs=None, query=None, setup=None, signin=None, signout=None, sources=None, system=None, tasks=None, telegrafs=None, users=None, write=None): # noqa: E501,D401,D403 + """Routes - a model defined in OpenAPI.""" # noqa: E501 + self._authorizations = None + self._buckets = None + self._dashboards = None + self._external = None + self._variables = None + self._me = None + self._flags = None + self._orgs = None + self._query = None + self._setup = None + self._signin = None + self._signout = None + self._sources = None + self._system = None + self._tasks = None + self._telegrafs = None + self._users = None + self._write = None + self.discriminator = None + + if authorizations is not None: + self.authorizations = authorizations + if buckets is not None: + self.buckets = buckets + if dashboards is not None: + self.dashboards = dashboards + if external is not None: + self.external = external + if variables is not None: + self.variables = variables + if me is not None: + self.me = me + if flags is not None: + self.flags = flags + if orgs is not None: + self.orgs = orgs + if query is not None: + self.query = query + if setup is not None: + self.setup = setup + if signin is not None: + self.signin = signin + if signout is not None: + self.signout = signout + if sources is not None: + self.sources = sources + if system is not None: + self.system = system + if tasks is not None: + self.tasks = tasks + if telegrafs is not None: + self.telegrafs = telegrafs + if users is not None: + self.users = users + if write is not None: + self.write = write + + @property + def authorizations(self): + """Get the authorizations of this Routes. + + :return: The authorizations of this Routes. + :rtype: str + """ # noqa: E501 + return self._authorizations + + @authorizations.setter + def authorizations(self, authorizations): + """Set the authorizations of this Routes. + + :param authorizations: The authorizations of this Routes. + :type: str + """ # noqa: E501 + self._authorizations = authorizations + + @property + def buckets(self): + """Get the buckets of this Routes. + + :return: The buckets of this Routes. + :rtype: str + """ # noqa: E501 + return self._buckets + + @buckets.setter + def buckets(self, buckets): + """Set the buckets of this Routes. + + :param buckets: The buckets of this Routes. + :type: str + """ # noqa: E501 + self._buckets = buckets + + @property + def dashboards(self): + """Get the dashboards of this Routes. + + :return: The dashboards of this Routes. + :rtype: str + """ # noqa: E501 + return self._dashboards + + @dashboards.setter + def dashboards(self, dashboards): + """Set the dashboards of this Routes. + + :param dashboards: The dashboards of this Routes. + :type: str + """ # noqa: E501 + self._dashboards = dashboards + + @property + def external(self): + """Get the external of this Routes. + + :return: The external of this Routes. + :rtype: RoutesExternal + """ # noqa: E501 + return self._external + + @external.setter + def external(self, external): + """Set the external of this Routes. + + :param external: The external of this Routes. + :type: RoutesExternal + """ # noqa: E501 + self._external = external + + @property + def variables(self): + """Get the variables of this Routes. + + :return: The variables of this Routes. + :rtype: str + """ # noqa: E501 + return self._variables + + @variables.setter + def variables(self, variables): + """Set the variables of this Routes. + + :param variables: The variables of this Routes. + :type: str + """ # noqa: E501 + self._variables = variables + + @property + def me(self): + """Get the me of this Routes. + + :return: The me of this Routes. + :rtype: str + """ # noqa: E501 + return self._me + + @me.setter + def me(self, me): + """Set the me of this Routes. + + :param me: The me of this Routes. + :type: str + """ # noqa: E501 + self._me = me + + @property + def flags(self): + """Get the flags of this Routes. + + :return: The flags of this Routes. + :rtype: str + """ # noqa: E501 + return self._flags + + @flags.setter + def flags(self, flags): + """Set the flags of this Routes. + + :param flags: The flags of this Routes. + :type: str + """ # noqa: E501 + self._flags = flags + + @property + def orgs(self): + """Get the orgs of this Routes. + + :return: The orgs of this Routes. + :rtype: str + """ # noqa: E501 + return self._orgs + + @orgs.setter + def orgs(self, orgs): + """Set the orgs of this Routes. + + :param orgs: The orgs of this Routes. + :type: str + """ # noqa: E501 + self._orgs = orgs + + @property + def query(self): + """Get the query of this Routes. + + :return: The query of this Routes. + :rtype: RoutesQuery + """ # noqa: E501 + return self._query + + @query.setter + def query(self, query): + """Set the query of this Routes. + + :param query: The query of this Routes. + :type: RoutesQuery + """ # noqa: E501 + self._query = query + + @property + def setup(self): + """Get the setup of this Routes. + + :return: The setup of this Routes. + :rtype: str + """ # noqa: E501 + return self._setup + + @setup.setter + def setup(self, setup): + """Set the setup of this Routes. + + :param setup: The setup of this Routes. + :type: str + """ # noqa: E501 + self._setup = setup + + @property + def signin(self): + """Get the signin of this Routes. + + :return: The signin of this Routes. + :rtype: str + """ # noqa: E501 + return self._signin + + @signin.setter + def signin(self, signin): + """Set the signin of this Routes. + + :param signin: The signin of this Routes. + :type: str + """ # noqa: E501 + self._signin = signin + + @property + def signout(self): + """Get the signout of this Routes. + + :return: The signout of this Routes. + :rtype: str + """ # noqa: E501 + return self._signout + + @signout.setter + def signout(self, signout): + """Set the signout of this Routes. + + :param signout: The signout of this Routes. + :type: str + """ # noqa: E501 + self._signout = signout + + @property + def sources(self): + """Get the sources of this Routes. + + :return: The sources of this Routes. + :rtype: str + """ # noqa: E501 + return self._sources + + @sources.setter + def sources(self, sources): + """Set the sources of this Routes. + + :param sources: The sources of this Routes. + :type: str + """ # noqa: E501 + self._sources = sources + + @property + def system(self): + """Get the system of this Routes. + + :return: The system of this Routes. + :rtype: RoutesSystem + """ # noqa: E501 + return self._system + + @system.setter + def system(self, system): + """Set the system of this Routes. + + :param system: The system of this Routes. + :type: RoutesSystem + """ # noqa: E501 + self._system = system + + @property + def tasks(self): + """Get the tasks of this Routes. + + :return: The tasks of this Routes. + :rtype: str + """ # noqa: E501 + return self._tasks + + @tasks.setter + def tasks(self, tasks): + """Set the tasks of this Routes. + + :param tasks: The tasks of this Routes. + :type: str + """ # noqa: E501 + self._tasks = tasks + + @property + def telegrafs(self): + """Get the telegrafs of this Routes. + + :return: The telegrafs of this Routes. + :rtype: str + """ # noqa: E501 + return self._telegrafs + + @telegrafs.setter + def telegrafs(self, telegrafs): + """Set the telegrafs of this Routes. + + :param telegrafs: The telegrafs of this Routes. + :type: str + """ # noqa: E501 + self._telegrafs = telegrafs + + @property + def users(self): + """Get the users of this Routes. + + :return: The users of this Routes. + :rtype: str + """ # noqa: E501 + return self._users + + @users.setter + def users(self, users): + """Set the users of this Routes. + + :param users: The users of this Routes. + :type: str + """ # noqa: E501 + self._users = users + + @property + def write(self): + """Get the write of this Routes. + + :return: The write of this Routes. + :rtype: str + """ # noqa: E501 + return self._write + + @write.setter + def write(self, write): + """Set the write of this Routes. + + :param write: The write of this Routes. + :type: str + """ # noqa: E501 + self._write = write + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Routes): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes_external.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes_external.py new file mode 100644 index 0000000..87c49f9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes_external.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RoutesExternal(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'status_feed': 'str' + } + + attribute_map = { + 'status_feed': 'statusFeed' + } + + def __init__(self, status_feed=None): # noqa: E501,D401,D403 + """RoutesExternal - a model defined in OpenAPI.""" # noqa: E501 + self._status_feed = None + self.discriminator = None + + if status_feed is not None: + self.status_feed = status_feed + + @property + def status_feed(self): + """Get the status_feed of this RoutesExternal. + + :return: The status_feed of this RoutesExternal. + :rtype: str + """ # noqa: E501 + return self._status_feed + + @status_feed.setter + def status_feed(self, status_feed): + """Set the status_feed of this RoutesExternal. + + :param status_feed: The status_feed of this RoutesExternal. + :type: str + """ # noqa: E501 + self._status_feed = status_feed + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RoutesExternal): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes_query.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes_query.py new file mode 100644 index 0000000..82ae34e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes_query.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RoutesQuery(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str', + 'ast': 'str', + 'analyze': 'str', + 'suggestions': 'str' + } + + attribute_map = { + '_self': 'self', + 'ast': 'ast', + 'analyze': 'analyze', + 'suggestions': 'suggestions' + } + + def __init__(self, _self=None, ast=None, analyze=None, suggestions=None): # noqa: E501,D401,D403 + """RoutesQuery - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self._ast = None + self._analyze = None + self._suggestions = None + self.discriminator = None + + if _self is not None: + self._self = _self + if ast is not None: + self.ast = ast + if analyze is not None: + self.analyze = analyze + if suggestions is not None: + self.suggestions = suggestions + + @property + def _self(self): + """Get the _self of this RoutesQuery. + + :return: The _self of this RoutesQuery. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this RoutesQuery. + + :param _self: The _self of this RoutesQuery. + :type: str + """ # noqa: E501 + self.__self = _self + + @property + def ast(self): + """Get the ast of this RoutesQuery. + + :return: The ast of this RoutesQuery. + :rtype: str + """ # noqa: E501 + return self._ast + + @ast.setter + def ast(self, ast): + """Set the ast of this RoutesQuery. + + :param ast: The ast of this RoutesQuery. + :type: str + """ # noqa: E501 + self._ast = ast + + @property + def analyze(self): + """Get the analyze of this RoutesQuery. + + :return: The analyze of this RoutesQuery. + :rtype: str + """ # noqa: E501 + return self._analyze + + @analyze.setter + def analyze(self, analyze): + """Set the analyze of this RoutesQuery. + + :param analyze: The analyze of this RoutesQuery. + :type: str + """ # noqa: E501 + self._analyze = analyze + + @property + def suggestions(self): + """Get the suggestions of this RoutesQuery. + + :return: The suggestions of this RoutesQuery. + :rtype: str + """ # noqa: E501 + return self._suggestions + + @suggestions.setter + def suggestions(self, suggestions): + """Set the suggestions of this RoutesQuery. + + :param suggestions: The suggestions of this RoutesQuery. + :type: str + """ # noqa: E501 + self._suggestions = suggestions + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RoutesQuery): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes_system.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes_system.py new file mode 100644 index 0000000..73ac677 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/routes_system.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RoutesSystem(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'metrics': 'str', + 'debug': 'str', + 'health': 'str' + } + + attribute_map = { + 'metrics': 'metrics', + 'debug': 'debug', + 'health': 'health' + } + + def __init__(self, metrics=None, debug=None, health=None): # noqa: E501,D401,D403 + """RoutesSystem - a model defined in OpenAPI.""" # noqa: E501 + self._metrics = None + self._debug = None + self._health = None + self.discriminator = None + + if metrics is not None: + self.metrics = metrics + if debug is not None: + self.debug = debug + if health is not None: + self.health = health + + @property + def metrics(self): + """Get the metrics of this RoutesSystem. + + :return: The metrics of this RoutesSystem. + :rtype: str + """ # noqa: E501 + return self._metrics + + @metrics.setter + def metrics(self, metrics): + """Set the metrics of this RoutesSystem. + + :param metrics: The metrics of this RoutesSystem. + :type: str + """ # noqa: E501 + self._metrics = metrics + + @property + def debug(self): + """Get the debug of this RoutesSystem. + + :return: The debug of this RoutesSystem. + :rtype: str + """ # noqa: E501 + return self._debug + + @debug.setter + def debug(self, debug): + """Set the debug of this RoutesSystem. + + :param debug: The debug of this RoutesSystem. + :type: str + """ # noqa: E501 + self._debug = debug + + @property + def health(self): + """Get the health of this RoutesSystem. + + :return: The health of this RoutesSystem. + :rtype: str + """ # noqa: E501 + return self._health + + @health.setter + def health(self, health): + """Set the health of this RoutesSystem. + + :param health: The health of this RoutesSystem. + :type: str + """ # noqa: E501 + self._health = health + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RoutesSystem): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/rule_status_level.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/rule_status_level.py new file mode 100644 index 0000000..9d9f424 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/rule_status_level.py @@ -0,0 +1,92 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RuleStatusLevel(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + UNKNOWN = "UNKNOWN" + OK = "OK" + INFO = "INFO" + CRIT = "CRIT" + WARN = "WARN" + ANY = "ANY" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """RuleStatusLevel - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RuleStatusLevel): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/run.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/run.py new file mode 100644 index 0000000..32634ac --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/run.py @@ -0,0 +1,338 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Run(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'task_id': 'str', + 'status': 'str', + 'scheduled_for': 'datetime', + 'log': 'list[LogEvent]', + 'flux': 'str', + 'started_at': 'datetime', + 'finished_at': 'datetime', + 'requested_at': 'datetime', + 'links': 'RunLinks' + } + + attribute_map = { + 'id': 'id', + 'task_id': 'taskID', + 'status': 'status', + 'scheduled_for': 'scheduledFor', + 'log': 'log', + 'flux': 'flux', + 'started_at': 'startedAt', + 'finished_at': 'finishedAt', + 'requested_at': 'requestedAt', + 'links': 'links' + } + + def __init__(self, id=None, task_id=None, status=None, scheduled_for=None, log=None, flux=None, started_at=None, finished_at=None, requested_at=None, links=None): # noqa: E501,D401,D403 + """Run - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._task_id = None + self._status = None + self._scheduled_for = None + self._log = None + self._flux = None + self._started_at = None + self._finished_at = None + self._requested_at = None + self._links = None + self.discriminator = None + + if id is not None: + self.id = id + if task_id is not None: + self.task_id = task_id + if status is not None: + self.status = status + if scheduled_for is not None: + self.scheduled_for = scheduled_for + if log is not None: + self.log = log + if flux is not None: + self.flux = flux + if started_at is not None: + self.started_at = started_at + if finished_at is not None: + self.finished_at = finished_at + if requested_at is not None: + self.requested_at = requested_at + if links is not None: + self.links = links + + @property + def id(self): + """Get the id of this Run. + + :return: The id of this Run. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Run. + + :param id: The id of this Run. + :type: str + """ # noqa: E501 + self._id = id + + @property + def task_id(self): + """Get the task_id of this Run. + + :return: The task_id of this Run. + :rtype: str + """ # noqa: E501 + return self._task_id + + @task_id.setter + def task_id(self, task_id): + """Set the task_id of this Run. + + :param task_id: The task_id of this Run. + :type: str + """ # noqa: E501 + self._task_id = task_id + + @property + def status(self): + """Get the status of this Run. + + :return: The status of this Run. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this Run. + + :param status: The status of this Run. + :type: str + """ # noqa: E501 + self._status = status + + @property + def scheduled_for(self): + """Get the scheduled_for of this Run. + + The time [RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp) used for the run's `now` option. + + :return: The scheduled_for of this Run. + :rtype: datetime + """ # noqa: E501 + return self._scheduled_for + + @scheduled_for.setter + def scheduled_for(self, scheduled_for): + """Set the scheduled_for of this Run. + + The time [RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp) used for the run's `now` option. + + :param scheduled_for: The scheduled_for of this Run. + :type: datetime + """ # noqa: E501 + self._scheduled_for = scheduled_for + + @property + def log(self): + """Get the log of this Run. + + An array of logs associated with the run. + + :return: The log of this Run. + :rtype: list[LogEvent] + """ # noqa: E501 + return self._log + + @log.setter + def log(self, log): + """Set the log of this Run. + + An array of logs associated with the run. + + :param log: The log of this Run. + :type: list[LogEvent] + """ # noqa: E501 + self._log = log + + @property + def flux(self): + """Get the flux of this Run. + + Flux used for the task + + :return: The flux of this Run. + :rtype: str + """ # noqa: E501 + return self._flux + + @flux.setter + def flux(self, flux): + """Set the flux of this Run. + + Flux used for the task + + :param flux: The flux of this Run. + :type: str + """ # noqa: E501 + self._flux = flux + + @property + def started_at(self): + """Get the started_at of this Run. + + The time ([RFC3339Nano date/time format](https://go.dev/src/time/format.go)) the run started executing. + + :return: The started_at of this Run. + :rtype: datetime + """ # noqa: E501 + return self._started_at + + @started_at.setter + def started_at(self, started_at): + """Set the started_at of this Run. + + The time ([RFC3339Nano date/time format](https://go.dev/src/time/format.go)) the run started executing. + + :param started_at: The started_at of this Run. + :type: datetime + """ # noqa: E501 + self._started_at = started_at + + @property + def finished_at(self): + """Get the finished_at of this Run. + + The time ([RFC3339Nano date/time format](https://go.dev/src/time/format.go)) the run finished executing. + + :return: The finished_at of this Run. + :rtype: datetime + """ # noqa: E501 + return self._finished_at + + @finished_at.setter + def finished_at(self, finished_at): + """Set the finished_at of this Run. + + The time ([RFC3339Nano date/time format](https://go.dev/src/time/format.go)) the run finished executing. + + :param finished_at: The finished_at of this Run. + :type: datetime + """ # noqa: E501 + self._finished_at = finished_at + + @property + def requested_at(self): + """Get the requested_at of this Run. + + The time ([RFC3339Nano date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339nano-timestamp)) the run was manually requested. + + :return: The requested_at of this Run. + :rtype: datetime + """ # noqa: E501 + return self._requested_at + + @requested_at.setter + def requested_at(self, requested_at): + """Set the requested_at of this Run. + + The time ([RFC3339Nano date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339nano-timestamp)) the run was manually requested. + + :param requested_at: The requested_at of this Run. + :type: datetime + """ # noqa: E501 + self._requested_at = requested_at + + @property + def links(self): + """Get the links of this Run. + + :return: The links of this Run. + :rtype: RunLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Run. + + :param links: The links of this Run. + :type: RunLinks + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Run): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/run_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/run_links.py new file mode 100644 index 0000000..08c0533 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/run_links.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RunLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str', + 'task': 'str', + 'retry': 'str' + } + + attribute_map = { + '_self': 'self', + 'task': 'task', + 'retry': 'retry' + } + + def __init__(self, _self=None, task=None, retry=None): # noqa: E501,D401,D403 + """RunLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self._task = None + self._retry = None + self.discriminator = None + + if _self is not None: + self._self = _self + if task is not None: + self.task = task + if retry is not None: + self.retry = retry + + @property + def _self(self): + """Get the _self of this RunLinks. + + :return: The _self of this RunLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this RunLinks. + + :param _self: The _self of this RunLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + @property + def task(self): + """Get the task of this RunLinks. + + :return: The task of this RunLinks. + :rtype: str + """ # noqa: E501 + return self._task + + @task.setter + def task(self, task): + """Set the task of this RunLinks. + + :param task: The task of this RunLinks. + :type: str + """ # noqa: E501 + self._task = task + + @property + def retry(self): + """Get the retry of this RunLinks. + + :return: The retry of this RunLinks. + :rtype: str + """ # noqa: E501 + return self._retry + + @retry.setter + def retry(self, retry): + """Set the retry of this RunLinks. + + :param retry: The retry of this RunLinks. + :type: str + """ # noqa: E501 + self._retry = retry + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RunLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/run_manually.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/run_manually.py new file mode 100644 index 0000000..c326083 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/run_manually.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class RunManually(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'scheduled_for': 'datetime' + } + + attribute_map = { + 'scheduled_for': 'scheduledFor' + } + + def __init__(self, scheduled_for=None): # noqa: E501,D401,D403 + """RunManually - a model defined in OpenAPI.""" # noqa: E501 + self._scheduled_for = None + self.discriminator = None + + self.scheduled_for = scheduled_for + + @property + def scheduled_for(self): + """Get the scheduled_for of this RunManually. + + The time [RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp) used for the run's `now` option. Default is the server _now_ time. + + :return: The scheduled_for of this RunManually. + :rtype: datetime + """ # noqa: E501 + return self._scheduled_for + + @scheduled_for.setter + def scheduled_for(self, scheduled_for): + """Set the scheduled_for of this RunManually. + + The time [RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp) used for the run's `now` option. Default is the server _now_ time. + + :param scheduled_for: The scheduled_for of this RunManually. + :type: datetime + """ # noqa: E501 + self._scheduled_for = scheduled_for + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, RunManually): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/runs.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/runs.py new file mode 100644 index 0000000..5f92630 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/runs.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Runs(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'Links', + 'runs': 'list[Run]' + } + + attribute_map = { + 'links': 'links', + 'runs': 'runs' + } + + def __init__(self, links=None, runs=None): # noqa: E501,D401,D403 + """Runs - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._runs = None + self.discriminator = None + + if links is not None: + self.links = links + if runs is not None: + self.runs = runs + + @property + def links(self): + """Get the links of this Runs. + + :return: The links of this Runs. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Runs. + + :param links: The links of this Runs. + :type: Links + """ # noqa: E501 + self._links = links + + @property + def runs(self): + """Get the runs of this Runs. + + :return: The runs of this Runs. + :rtype: list[Run] + """ # noqa: E501 + return self._runs + + @runs.setter + def runs(self, runs): + """Set the runs of this Runs. + + :param runs: The runs of this Runs. + :type: list[Run] + """ # noqa: E501 + self._runs = runs + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Runs): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/scatter_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/scatter_view_properties.py new file mode 100644 index 0000000..783a87c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/scatter_view_properties.py @@ -0,0 +1,850 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class ScatterViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'adaptive_zoom_hide': 'bool', + 'time_format': 'str', + 'type': 'str', + 'queries': 'list[DashboardQuery]', + 'colors': 'list[str]', + 'shape': 'str', + 'note': 'str', + 'show_note_when_empty': 'bool', + 'x_column': 'str', + 'generate_x_axis_ticks': 'list[str]', + 'x_total_ticks': 'int', + 'x_tick_start': 'float', + 'x_tick_step': 'float', + 'y_column': 'str', + 'generate_y_axis_ticks': 'list[str]', + 'y_total_ticks': 'int', + 'y_tick_start': 'float', + 'y_tick_step': 'float', + 'fill_columns': 'list[str]', + 'symbol_columns': 'list[str]', + 'x_domain': 'list[float]', + 'y_domain': 'list[float]', + 'x_axis_label': 'str', + 'y_axis_label': 'str', + 'x_prefix': 'str', + 'x_suffix': 'str', + 'y_prefix': 'str', + 'y_suffix': 'str', + 'legend_colorize_rows': 'bool', + 'legend_hide': 'bool', + 'legend_opacity': 'float', + 'legend_orientation_threshold': 'int' + } + + attribute_map = { + 'adaptive_zoom_hide': 'adaptiveZoomHide', + 'time_format': 'timeFormat', + 'type': 'type', + 'queries': 'queries', + 'colors': 'colors', + 'shape': 'shape', + 'note': 'note', + 'show_note_when_empty': 'showNoteWhenEmpty', + 'x_column': 'xColumn', + 'generate_x_axis_ticks': 'generateXAxisTicks', + 'x_total_ticks': 'xTotalTicks', + 'x_tick_start': 'xTickStart', + 'x_tick_step': 'xTickStep', + 'y_column': 'yColumn', + 'generate_y_axis_ticks': 'generateYAxisTicks', + 'y_total_ticks': 'yTotalTicks', + 'y_tick_start': 'yTickStart', + 'y_tick_step': 'yTickStep', + 'fill_columns': 'fillColumns', + 'symbol_columns': 'symbolColumns', + 'x_domain': 'xDomain', + 'y_domain': 'yDomain', + 'x_axis_label': 'xAxisLabel', + 'y_axis_label': 'yAxisLabel', + 'x_prefix': 'xPrefix', + 'x_suffix': 'xSuffix', + 'y_prefix': 'yPrefix', + 'y_suffix': 'ySuffix', + 'legend_colorize_rows': 'legendColorizeRows', + 'legend_hide': 'legendHide', + 'legend_opacity': 'legendOpacity', + 'legend_orientation_threshold': 'legendOrientationThreshold' + } + + def __init__(self, adaptive_zoom_hide=None, time_format=None, type=None, queries=None, colors=None, shape=None, note=None, show_note_when_empty=None, x_column=None, generate_x_axis_ticks=None, x_total_ticks=None, x_tick_start=None, x_tick_step=None, y_column=None, generate_y_axis_ticks=None, y_total_ticks=None, y_tick_start=None, y_tick_step=None, fill_columns=None, symbol_columns=None, x_domain=None, y_domain=None, x_axis_label=None, y_axis_label=None, x_prefix=None, x_suffix=None, y_prefix=None, y_suffix=None, legend_colorize_rows=None, legend_hide=None, legend_opacity=None, legend_orientation_threshold=None): # noqa: E501,D401,D403 + """ScatterViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._adaptive_zoom_hide = None + self._time_format = None + self._type = None + self._queries = None + self._colors = None + self._shape = None + self._note = None + self._show_note_when_empty = None + self._x_column = None + self._generate_x_axis_ticks = None + self._x_total_ticks = None + self._x_tick_start = None + self._x_tick_step = None + self._y_column = None + self._generate_y_axis_ticks = None + self._y_total_ticks = None + self._y_tick_start = None + self._y_tick_step = None + self._fill_columns = None + self._symbol_columns = None + self._x_domain = None + self._y_domain = None + self._x_axis_label = None + self._y_axis_label = None + self._x_prefix = None + self._x_suffix = None + self._y_prefix = None + self._y_suffix = None + self._legend_colorize_rows = None + self._legend_hide = None + self._legend_opacity = None + self._legend_orientation_threshold = None + self.discriminator = None + + if adaptive_zoom_hide is not None: + self.adaptive_zoom_hide = adaptive_zoom_hide + if time_format is not None: + self.time_format = time_format + self.type = type + self.queries = queries + self.colors = colors + self.shape = shape + self.note = note + self.show_note_when_empty = show_note_when_empty + self.x_column = x_column + if generate_x_axis_ticks is not None: + self.generate_x_axis_ticks = generate_x_axis_ticks + if x_total_ticks is not None: + self.x_total_ticks = x_total_ticks + if x_tick_start is not None: + self.x_tick_start = x_tick_start + if x_tick_step is not None: + self.x_tick_step = x_tick_step + self.y_column = y_column + if generate_y_axis_ticks is not None: + self.generate_y_axis_ticks = generate_y_axis_ticks + if y_total_ticks is not None: + self.y_total_ticks = y_total_ticks + if y_tick_start is not None: + self.y_tick_start = y_tick_start + if y_tick_step is not None: + self.y_tick_step = y_tick_step + self.fill_columns = fill_columns + self.symbol_columns = symbol_columns + self.x_domain = x_domain + self.y_domain = y_domain + self.x_axis_label = x_axis_label + self.y_axis_label = y_axis_label + self.x_prefix = x_prefix + self.x_suffix = x_suffix + self.y_prefix = y_prefix + self.y_suffix = y_suffix + if legend_colorize_rows is not None: + self.legend_colorize_rows = legend_colorize_rows + if legend_hide is not None: + self.legend_hide = legend_hide + if legend_opacity is not None: + self.legend_opacity = legend_opacity + if legend_orientation_threshold is not None: + self.legend_orientation_threshold = legend_orientation_threshold + + @property + def adaptive_zoom_hide(self): + """Get the adaptive_zoom_hide of this ScatterViewProperties. + + :return: The adaptive_zoom_hide of this ScatterViewProperties. + :rtype: bool + """ # noqa: E501 + return self._adaptive_zoom_hide + + @adaptive_zoom_hide.setter + def adaptive_zoom_hide(self, adaptive_zoom_hide): + """Set the adaptive_zoom_hide of this ScatterViewProperties. + + :param adaptive_zoom_hide: The adaptive_zoom_hide of this ScatterViewProperties. + :type: bool + """ # noqa: E501 + self._adaptive_zoom_hide = adaptive_zoom_hide + + @property + def time_format(self): + """Get the time_format of this ScatterViewProperties. + + :return: The time_format of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._time_format + + @time_format.setter + def time_format(self, time_format): + """Set the time_format of this ScatterViewProperties. + + :param time_format: The time_format of this ScatterViewProperties. + :type: str + """ # noqa: E501 + self._time_format = time_format + + @property + def type(self): + """Get the type of this ScatterViewProperties. + + :return: The type of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ScatterViewProperties. + + :param type: The type of this ScatterViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def queries(self): + """Get the queries of this ScatterViewProperties. + + :return: The queries of this ScatterViewProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this ScatterViewProperties. + + :param queries: The queries of this ScatterViewProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def colors(self): + """Get the colors of this ScatterViewProperties. + + Colors define color encoding of data into a visualization + + :return: The colors of this ScatterViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._colors + + @colors.setter + def colors(self, colors): + """Set the colors of this ScatterViewProperties. + + Colors define color encoding of data into a visualization + + :param colors: The colors of this ScatterViewProperties. + :type: list[str] + """ # noqa: E501 + if colors is None: + raise ValueError("Invalid value for `colors`, must not be `None`") # noqa: E501 + self._colors = colors + + @property + def shape(self): + """Get the shape of this ScatterViewProperties. + + :return: The shape of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this ScatterViewProperties. + + :param shape: The shape of this ScatterViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this ScatterViewProperties. + + :return: The note of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this ScatterViewProperties. + + :param note: The note of this ScatterViewProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + @property + def show_note_when_empty(self): + """Get the show_note_when_empty of this ScatterViewProperties. + + If true, will display note when empty + + :return: The show_note_when_empty of this ScatterViewProperties. + :rtype: bool + """ # noqa: E501 + return self._show_note_when_empty + + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty): + """Set the show_note_when_empty of this ScatterViewProperties. + + If true, will display note when empty + + :param show_note_when_empty: The show_note_when_empty of this ScatterViewProperties. + :type: bool + """ # noqa: E501 + if show_note_when_empty is None: + raise ValueError("Invalid value for `show_note_when_empty`, must not be `None`") # noqa: E501 + self._show_note_when_empty = show_note_when_empty + + @property + def x_column(self): + """Get the x_column of this ScatterViewProperties. + + :return: The x_column of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_column + + @x_column.setter + def x_column(self, x_column): + """Set the x_column of this ScatterViewProperties. + + :param x_column: The x_column of this ScatterViewProperties. + :type: str + """ # noqa: E501 + if x_column is None: + raise ValueError("Invalid value for `x_column`, must not be `None`") # noqa: E501 + self._x_column = x_column + + @property + def generate_x_axis_ticks(self): + """Get the generate_x_axis_ticks of this ScatterViewProperties. + + :return: The generate_x_axis_ticks of this ScatterViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._generate_x_axis_ticks + + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks): + """Set the generate_x_axis_ticks of this ScatterViewProperties. + + :param generate_x_axis_ticks: The generate_x_axis_ticks of this ScatterViewProperties. + :type: list[str] + """ # noqa: E501 + self._generate_x_axis_ticks = generate_x_axis_ticks + + @property + def x_total_ticks(self): + """Get the x_total_ticks of this ScatterViewProperties. + + :return: The x_total_ticks of this ScatterViewProperties. + :rtype: int + """ # noqa: E501 + return self._x_total_ticks + + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks): + """Set the x_total_ticks of this ScatterViewProperties. + + :param x_total_ticks: The x_total_ticks of this ScatterViewProperties. + :type: int + """ # noqa: E501 + self._x_total_ticks = x_total_ticks + + @property + def x_tick_start(self): + """Get the x_tick_start of this ScatterViewProperties. + + :return: The x_tick_start of this ScatterViewProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_start + + @x_tick_start.setter + def x_tick_start(self, x_tick_start): + """Set the x_tick_start of this ScatterViewProperties. + + :param x_tick_start: The x_tick_start of this ScatterViewProperties. + :type: float + """ # noqa: E501 + self._x_tick_start = x_tick_start + + @property + def x_tick_step(self): + """Get the x_tick_step of this ScatterViewProperties. + + :return: The x_tick_step of this ScatterViewProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_step + + @x_tick_step.setter + def x_tick_step(self, x_tick_step): + """Set the x_tick_step of this ScatterViewProperties. + + :param x_tick_step: The x_tick_step of this ScatterViewProperties. + :type: float + """ # noqa: E501 + self._x_tick_step = x_tick_step + + @property + def y_column(self): + """Get the y_column of this ScatterViewProperties. + + :return: The y_column of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_column + + @y_column.setter + def y_column(self, y_column): + """Set the y_column of this ScatterViewProperties. + + :param y_column: The y_column of this ScatterViewProperties. + :type: str + """ # noqa: E501 + if y_column is None: + raise ValueError("Invalid value for `y_column`, must not be `None`") # noqa: E501 + self._y_column = y_column + + @property + def generate_y_axis_ticks(self): + """Get the generate_y_axis_ticks of this ScatterViewProperties. + + :return: The generate_y_axis_ticks of this ScatterViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._generate_y_axis_ticks + + @generate_y_axis_ticks.setter + def generate_y_axis_ticks(self, generate_y_axis_ticks): + """Set the generate_y_axis_ticks of this ScatterViewProperties. + + :param generate_y_axis_ticks: The generate_y_axis_ticks of this ScatterViewProperties. + :type: list[str] + """ # noqa: E501 + self._generate_y_axis_ticks = generate_y_axis_ticks + + @property + def y_total_ticks(self): + """Get the y_total_ticks of this ScatterViewProperties. + + :return: The y_total_ticks of this ScatterViewProperties. + :rtype: int + """ # noqa: E501 + return self._y_total_ticks + + @y_total_ticks.setter + def y_total_ticks(self, y_total_ticks): + """Set the y_total_ticks of this ScatterViewProperties. + + :param y_total_ticks: The y_total_ticks of this ScatterViewProperties. + :type: int + """ # noqa: E501 + self._y_total_ticks = y_total_ticks + + @property + def y_tick_start(self): + """Get the y_tick_start of this ScatterViewProperties. + + :return: The y_tick_start of this ScatterViewProperties. + :rtype: float + """ # noqa: E501 + return self._y_tick_start + + @y_tick_start.setter + def y_tick_start(self, y_tick_start): + """Set the y_tick_start of this ScatterViewProperties. + + :param y_tick_start: The y_tick_start of this ScatterViewProperties. + :type: float + """ # noqa: E501 + self._y_tick_start = y_tick_start + + @property + def y_tick_step(self): + """Get the y_tick_step of this ScatterViewProperties. + + :return: The y_tick_step of this ScatterViewProperties. + :rtype: float + """ # noqa: E501 + return self._y_tick_step + + @y_tick_step.setter + def y_tick_step(self, y_tick_step): + """Set the y_tick_step of this ScatterViewProperties. + + :param y_tick_step: The y_tick_step of this ScatterViewProperties. + :type: float + """ # noqa: E501 + self._y_tick_step = y_tick_step + + @property + def fill_columns(self): + """Get the fill_columns of this ScatterViewProperties. + + :return: The fill_columns of this ScatterViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._fill_columns + + @fill_columns.setter + def fill_columns(self, fill_columns): + """Set the fill_columns of this ScatterViewProperties. + + :param fill_columns: The fill_columns of this ScatterViewProperties. + :type: list[str] + """ # noqa: E501 + if fill_columns is None: + raise ValueError("Invalid value for `fill_columns`, must not be `None`") # noqa: E501 + self._fill_columns = fill_columns + + @property + def symbol_columns(self): + """Get the symbol_columns of this ScatterViewProperties. + + :return: The symbol_columns of this ScatterViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._symbol_columns + + @symbol_columns.setter + def symbol_columns(self, symbol_columns): + """Set the symbol_columns of this ScatterViewProperties. + + :param symbol_columns: The symbol_columns of this ScatterViewProperties. + :type: list[str] + """ # noqa: E501 + if symbol_columns is None: + raise ValueError("Invalid value for `symbol_columns`, must not be `None`") # noqa: E501 + self._symbol_columns = symbol_columns + + @property + def x_domain(self): + """Get the x_domain of this ScatterViewProperties. + + :return: The x_domain of this ScatterViewProperties. + :rtype: list[float] + """ # noqa: E501 + return self._x_domain + + @x_domain.setter + def x_domain(self, x_domain): + """Set the x_domain of this ScatterViewProperties. + + :param x_domain: The x_domain of this ScatterViewProperties. + :type: list[float] + """ # noqa: E501 + if x_domain is None: + raise ValueError("Invalid value for `x_domain`, must not be `None`") # noqa: E501 + self._x_domain = x_domain + + @property + def y_domain(self): + """Get the y_domain of this ScatterViewProperties. + + :return: The y_domain of this ScatterViewProperties. + :rtype: list[float] + """ # noqa: E501 + return self._y_domain + + @y_domain.setter + def y_domain(self, y_domain): + """Set the y_domain of this ScatterViewProperties. + + :param y_domain: The y_domain of this ScatterViewProperties. + :type: list[float] + """ # noqa: E501 + if y_domain is None: + raise ValueError("Invalid value for `y_domain`, must not be `None`") # noqa: E501 + self._y_domain = y_domain + + @property + def x_axis_label(self): + """Get the x_axis_label of this ScatterViewProperties. + + :return: The x_axis_label of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_axis_label + + @x_axis_label.setter + def x_axis_label(self, x_axis_label): + """Set the x_axis_label of this ScatterViewProperties. + + :param x_axis_label: The x_axis_label of this ScatterViewProperties. + :type: str + """ # noqa: E501 + if x_axis_label is None: + raise ValueError("Invalid value for `x_axis_label`, must not be `None`") # noqa: E501 + self._x_axis_label = x_axis_label + + @property + def y_axis_label(self): + """Get the y_axis_label of this ScatterViewProperties. + + :return: The y_axis_label of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_axis_label + + @y_axis_label.setter + def y_axis_label(self, y_axis_label): + """Set the y_axis_label of this ScatterViewProperties. + + :param y_axis_label: The y_axis_label of this ScatterViewProperties. + :type: str + """ # noqa: E501 + if y_axis_label is None: + raise ValueError("Invalid value for `y_axis_label`, must not be `None`") # noqa: E501 + self._y_axis_label = y_axis_label + + @property + def x_prefix(self): + """Get the x_prefix of this ScatterViewProperties. + + :return: The x_prefix of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_prefix + + @x_prefix.setter + def x_prefix(self, x_prefix): + """Set the x_prefix of this ScatterViewProperties. + + :param x_prefix: The x_prefix of this ScatterViewProperties. + :type: str + """ # noqa: E501 + if x_prefix is None: + raise ValueError("Invalid value for `x_prefix`, must not be `None`") # noqa: E501 + self._x_prefix = x_prefix + + @property + def x_suffix(self): + """Get the x_suffix of this ScatterViewProperties. + + :return: The x_suffix of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_suffix + + @x_suffix.setter + def x_suffix(self, x_suffix): + """Set the x_suffix of this ScatterViewProperties. + + :param x_suffix: The x_suffix of this ScatterViewProperties. + :type: str + """ # noqa: E501 + if x_suffix is None: + raise ValueError("Invalid value for `x_suffix`, must not be `None`") # noqa: E501 + self._x_suffix = x_suffix + + @property + def y_prefix(self): + """Get the y_prefix of this ScatterViewProperties. + + :return: The y_prefix of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_prefix + + @y_prefix.setter + def y_prefix(self, y_prefix): + """Set the y_prefix of this ScatterViewProperties. + + :param y_prefix: The y_prefix of this ScatterViewProperties. + :type: str + """ # noqa: E501 + if y_prefix is None: + raise ValueError("Invalid value for `y_prefix`, must not be `None`") # noqa: E501 + self._y_prefix = y_prefix + + @property + def y_suffix(self): + """Get the y_suffix of this ScatterViewProperties. + + :return: The y_suffix of this ScatterViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_suffix + + @y_suffix.setter + def y_suffix(self, y_suffix): + """Set the y_suffix of this ScatterViewProperties. + + :param y_suffix: The y_suffix of this ScatterViewProperties. + :type: str + """ # noqa: E501 + if y_suffix is None: + raise ValueError("Invalid value for `y_suffix`, must not be `None`") # noqa: E501 + self._y_suffix = y_suffix + + @property + def legend_colorize_rows(self): + """Get the legend_colorize_rows of this ScatterViewProperties. + + :return: The legend_colorize_rows of this ScatterViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_colorize_rows + + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows): + """Set the legend_colorize_rows of this ScatterViewProperties. + + :param legend_colorize_rows: The legend_colorize_rows of this ScatterViewProperties. + :type: bool + """ # noqa: E501 + self._legend_colorize_rows = legend_colorize_rows + + @property + def legend_hide(self): + """Get the legend_hide of this ScatterViewProperties. + + :return: The legend_hide of this ScatterViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_hide + + @legend_hide.setter + def legend_hide(self, legend_hide): + """Set the legend_hide of this ScatterViewProperties. + + :param legend_hide: The legend_hide of this ScatterViewProperties. + :type: bool + """ # noqa: E501 + self._legend_hide = legend_hide + + @property + def legend_opacity(self): + """Get the legend_opacity of this ScatterViewProperties. + + :return: The legend_opacity of this ScatterViewProperties. + :rtype: float + """ # noqa: E501 + return self._legend_opacity + + @legend_opacity.setter + def legend_opacity(self, legend_opacity): + """Set the legend_opacity of this ScatterViewProperties. + + :param legend_opacity: The legend_opacity of this ScatterViewProperties. + :type: float + """ # noqa: E501 + self._legend_opacity = legend_opacity + + @property + def legend_orientation_threshold(self): + """Get the legend_orientation_threshold of this ScatterViewProperties. + + :return: The legend_orientation_threshold of this ScatterViewProperties. + :rtype: int + """ # noqa: E501 + return self._legend_orientation_threshold + + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold): + """Set the legend_orientation_threshold of this ScatterViewProperties. + + :param legend_orientation_threshold: The legend_orientation_threshold of this ScatterViewProperties. + :type: int + """ # noqa: E501 + self._legend_orientation_threshold = legend_orientation_threshold + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ScatterViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/schema_type.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/schema_type.py new file mode 100644 index 0000000..c8612b5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/schema_type.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class SchemaType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + IMPLICIT = "implicit" + EXPLICIT = "explicit" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """SchemaType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SchemaType): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/scraper_target_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/scraper_target_request.py new file mode 100644 index 0000000..ae96067 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/scraper_target_request.py @@ -0,0 +1,246 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ScraperTargetRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'type': 'str', + 'url': 'str', + 'org_id': 'str', + 'bucket_id': 'str', + 'allow_insecure': 'bool' + } + + attribute_map = { + 'name': 'name', + 'type': 'type', + 'url': 'url', + 'org_id': 'orgID', + 'bucket_id': 'bucketID', + 'allow_insecure': 'allowInsecure' + } + + def __init__(self, name=None, type=None, url=None, org_id=None, bucket_id=None, allow_insecure=False): # noqa: E501,D401,D403 + """ScraperTargetRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._type = None + self._url = None + self._org_id = None + self._bucket_id = None + self._allow_insecure = None + self.discriminator = None + + if name is not None: + self.name = name + if type is not None: + self.type = type + if url is not None: + self.url = url + if org_id is not None: + self.org_id = org_id + if bucket_id is not None: + self.bucket_id = bucket_id + if allow_insecure is not None: + self.allow_insecure = allow_insecure + + @property + def name(self): + """Get the name of this ScraperTargetRequest. + + The name of the scraper target. + + :return: The name of this ScraperTargetRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this ScraperTargetRequest. + + The name of the scraper target. + + :param name: The name of this ScraperTargetRequest. + :type: str + """ # noqa: E501 + self._name = name + + @property + def type(self): + """Get the type of this ScraperTargetRequest. + + The type of the metrics to be parsed. + + :return: The type of this ScraperTargetRequest. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ScraperTargetRequest. + + The type of the metrics to be parsed. + + :param type: The type of this ScraperTargetRequest. + :type: str + """ # noqa: E501 + self._type = type + + @property + def url(self): + """Get the url of this ScraperTargetRequest. + + The URL of the metrics endpoint. + + :return: The url of this ScraperTargetRequest. + :rtype: str + """ # noqa: E501 + return self._url + + @url.setter + def url(self, url): + """Set the url of this ScraperTargetRequest. + + The URL of the metrics endpoint. + + :param url: The url of this ScraperTargetRequest. + :type: str + """ # noqa: E501 + self._url = url + + @property + def org_id(self): + """Get the org_id of this ScraperTargetRequest. + + The organization ID. + + :return: The org_id of this ScraperTargetRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this ScraperTargetRequest. + + The organization ID. + + :param org_id: The org_id of this ScraperTargetRequest. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def bucket_id(self): + """Get the bucket_id of this ScraperTargetRequest. + + The ID of the bucket to write to. + + :return: The bucket_id of this ScraperTargetRequest. + :rtype: str + """ # noqa: E501 + return self._bucket_id + + @bucket_id.setter + def bucket_id(self, bucket_id): + """Set the bucket_id of this ScraperTargetRequest. + + The ID of the bucket to write to. + + :param bucket_id: The bucket_id of this ScraperTargetRequest. + :type: str + """ # noqa: E501 + self._bucket_id = bucket_id + + @property + def allow_insecure(self): + """Get the allow_insecure of this ScraperTargetRequest. + + Skip TLS verification on endpoint. + + :return: The allow_insecure of this ScraperTargetRequest. + :rtype: bool + """ # noqa: E501 + return self._allow_insecure + + @allow_insecure.setter + def allow_insecure(self, allow_insecure): + """Set the allow_insecure of this ScraperTargetRequest. + + Skip TLS verification on endpoint. + + :param allow_insecure: The allow_insecure of this ScraperTargetRequest. + :type: bool + """ # noqa: E501 + self._allow_insecure = allow_insecure + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ScraperTargetRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/scraper_target_response.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/scraper_target_response.py new file mode 100644 index 0000000..8f40ae0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/scraper_target_response.py @@ -0,0 +1,200 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.scraper_target_request import ScraperTargetRequest + + +class ScraperTargetResponse(ScraperTargetRequest): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'org': 'str', + 'bucket': 'str', + 'links': 'object', + 'name': 'str', + 'type': 'str', + 'url': 'str', + 'org_id': 'str', + 'bucket_id': 'str', + 'allow_insecure': 'bool' + } + + attribute_map = { + 'id': 'id', + 'org': 'org', + 'bucket': 'bucket', + 'links': 'links', + 'name': 'name', + 'type': 'type', + 'url': 'url', + 'org_id': 'orgID', + 'bucket_id': 'bucketID', + 'allow_insecure': 'allowInsecure' + } + + def __init__(self, id=None, org=None, bucket=None, links=None, name=None, type=None, url=None, org_id=None, bucket_id=None, allow_insecure=False): # noqa: E501,D401,D403 + """ScraperTargetResponse - a model defined in OpenAPI.""" # noqa: E501 + ScraperTargetRequest.__init__(self, name=name, type=type, url=url, org_id=org_id, bucket_id=bucket_id, allow_insecure=allow_insecure) # noqa: E501 + + self._id = None + self._org = None + self._bucket = None + self._links = None + self.discriminator = None + + if id is not None: + self.id = id + if org is not None: + self.org = org + if bucket is not None: + self.bucket = bucket + if links is not None: + self.links = links + + @property + def id(self): + """Get the id of this ScraperTargetResponse. + + :return: The id of this ScraperTargetResponse. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this ScraperTargetResponse. + + :param id: The id of this ScraperTargetResponse. + :type: str + """ # noqa: E501 + self._id = id + + @property + def org(self): + """Get the org of this ScraperTargetResponse. + + The name of the organization. + + :return: The org of this ScraperTargetResponse. + :rtype: str + """ # noqa: E501 + return self._org + + @org.setter + def org(self, org): + """Set the org of this ScraperTargetResponse. + + The name of the organization. + + :param org: The org of this ScraperTargetResponse. + :type: str + """ # noqa: E501 + self._org = org + + @property + def bucket(self): + """Get the bucket of this ScraperTargetResponse. + + The bucket name. + + :return: The bucket of this ScraperTargetResponse. + :rtype: str + """ # noqa: E501 + return self._bucket + + @bucket.setter + def bucket(self, bucket): + """Set the bucket of this ScraperTargetResponse. + + The bucket name. + + :param bucket: The bucket of this ScraperTargetResponse. + :type: str + """ # noqa: E501 + self._bucket = bucket + + @property + def links(self): + """Get the links of this ScraperTargetResponse. + + :return: The links of this ScraperTargetResponse. + :rtype: object + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this ScraperTargetResponse. + + :param links: The links of this ScraperTargetResponse. + :type: object + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ScraperTargetResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/scraper_target_responses.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/scraper_target_responses.py new file mode 100644 index 0000000..7e4e477 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/scraper_target_responses.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ScraperTargetResponses(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'configurations': 'list[ScraperTargetResponse]' + } + + attribute_map = { + 'configurations': 'configurations' + } + + def __init__(self, configurations=None): # noqa: E501,D401,D403 + """ScraperTargetResponses - a model defined in OpenAPI.""" # noqa: E501 + self._configurations = None + self.discriminator = None + + if configurations is not None: + self.configurations = configurations + + @property + def configurations(self): + """Get the configurations of this ScraperTargetResponses. + + :return: The configurations of this ScraperTargetResponses. + :rtype: list[ScraperTargetResponse] + """ # noqa: E501 + return self._configurations + + @configurations.setter + def configurations(self, configurations): + """Set the configurations of this ScraperTargetResponses. + + :param configurations: The configurations of this ScraperTargetResponses. + :type: list[ScraperTargetResponse] + """ # noqa: E501 + self._configurations = configurations + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ScraperTargetResponses): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/script.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/script.py new file mode 100644 index 0000000..ea2296f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/script.py @@ -0,0 +1,302 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Script(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'description': 'str', + 'org_id': 'str', + 'script': 'str', + 'language': 'ScriptLanguage', + 'url': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'description': 'description', + 'org_id': 'orgID', + 'script': 'script', + 'language': 'language', + 'url': 'url', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt' + } + + def __init__(self, id=None, name=None, description=None, org_id=None, script=None, language=None, url=None, created_at=None, updated_at=None): # noqa: E501,D401,D403 + """Script - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._name = None + self._description = None + self._org_id = None + self._script = None + self._language = None + self._url = None + self._created_at = None + self._updated_at = None + self.discriminator = None + + if id is not None: + self.id = id + self.name = name + if description is not None: + self.description = description + self.org_id = org_id + self.script = script + if language is not None: + self.language = language + if url is not None: + self.url = url + if created_at is not None: + self.created_at = created_at + if updated_at is not None: + self.updated_at = updated_at + + @property + def id(self): + """Get the id of this Script. + + :return: The id of this Script. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Script. + + :param id: The id of this Script. + :type: str + """ # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this Script. + + :return: The name of this Script. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this Script. + + :param name: The name of this Script. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this Script. + + :return: The description of this Script. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this Script. + + :param description: The description of this Script. + :type: str + """ # noqa: E501 + self._description = description + + @property + def org_id(self): + """Get the org_id of this Script. + + :return: The org_id of this Script. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this Script. + + :param org_id: The org_id of this Script. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def script(self): + """Get the script of this Script. + + The script to execute. + + :return: The script of this Script. + :rtype: str + """ # noqa: E501 + return self._script + + @script.setter + def script(self, script): + """Set the script of this Script. + + The script to execute. + + :param script: The script of this Script. + :type: str + """ # noqa: E501 + if script is None: + raise ValueError("Invalid value for `script`, must not be `None`") # noqa: E501 + self._script = script + + @property + def language(self): + """Get the language of this Script. + + :return: The language of this Script. + :rtype: ScriptLanguage + """ # noqa: E501 + return self._language + + @language.setter + def language(self, language): + """Set the language of this Script. + + :param language: The language of this Script. + :type: ScriptLanguage + """ # noqa: E501 + self._language = language + + @property + def url(self): + """Get the url of this Script. + + The invocation endpoint address. + + :return: The url of this Script. + :rtype: str + """ # noqa: E501 + return self._url + + @url.setter + def url(self, url): + """Set the url of this Script. + + The invocation endpoint address. + + :param url: The url of this Script. + :type: str + """ # noqa: E501 + self._url = url + + @property + def created_at(self): + """Get the created_at of this Script. + + :return: The created_at of this Script. + :rtype: datetime + """ # noqa: E501 + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Set the created_at of this Script. + + :param created_at: The created_at of this Script. + :type: datetime + """ # noqa: E501 + self._created_at = created_at + + @property + def updated_at(self): + """Get the updated_at of this Script. + + :return: The updated_at of this Script. + :rtype: datetime + """ # noqa: E501 + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Set the updated_at of this Script. + + :param updated_at: The updated_at of this Script. + :type: datetime + """ # noqa: E501 + self._updated_at = updated_at + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Script): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_create_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_create_request.py new file mode 100644 index 0000000..5af2b5d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_create_request.py @@ -0,0 +1,192 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ScriptCreateRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'script': 'str', + 'language': 'ScriptLanguage' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'script': 'script', + 'language': 'language' + } + + def __init__(self, name=None, description=None, script=None, language=None): # noqa: E501,D401,D403 + """ScriptCreateRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._script = None + self._language = None + self.discriminator = None + + self.name = name + self.description = description + self.script = script + self.language = language + + @property + def name(self): + """Get the name of this ScriptCreateRequest. + + Script name. The name must be unique within the organization. + + :return: The name of this ScriptCreateRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this ScriptCreateRequest. + + Script name. The name must be unique within the organization. + + :param name: The name of this ScriptCreateRequest. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this ScriptCreateRequest. + + Script description. A description of the script. + + :return: The description of this ScriptCreateRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this ScriptCreateRequest. + + Script description. A description of the script. + + :param description: The description of this ScriptCreateRequest. + :type: str + """ # noqa: E501 + if description is None: + raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501 + self._description = description + + @property + def script(self): + """Get the script of this ScriptCreateRequest. + + The script to execute. + + :return: The script of this ScriptCreateRequest. + :rtype: str + """ # noqa: E501 + return self._script + + @script.setter + def script(self, script): + """Set the script of this ScriptCreateRequest. + + The script to execute. + + :param script: The script of this ScriptCreateRequest. + :type: str + """ # noqa: E501 + if script is None: + raise ValueError("Invalid value for `script`, must not be `None`") # noqa: E501 + self._script = script + + @property + def language(self): + """Get the language of this ScriptCreateRequest. + + :return: The language of this ScriptCreateRequest. + :rtype: ScriptLanguage + """ # noqa: E501 + return self._language + + @language.setter + def language(self, language): + """Set the language of this ScriptCreateRequest. + + :param language: The language of this ScriptCreateRequest. + :type: ScriptLanguage + """ # noqa: E501 + if language is None: + raise ValueError("Invalid value for `language`, must not be `None`") # noqa: E501 + self._language = language + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ScriptCreateRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_invocation_params.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_invocation_params.py new file mode 100644 index 0000000..1849280 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_invocation_params.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ScriptInvocationParams(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'params': 'dict(str, object)' + } + + attribute_map = { + 'params': 'params' + } + + def __init__(self, params=None): # noqa: E501,D401,D403 + """ScriptInvocationParams - a model defined in OpenAPI.""" # noqa: E501 + self._params = None + self.discriminator = None + + if params is not None: + self.params = params + + @property + def params(self): + """Get the params of this ScriptInvocationParams. + + The script parameters. `params` contains key-value pairs that map values to the **params.keys** in a script. When you invoke a script with `params`, InfluxDB passes the values as invocation parameters to the script. + + :return: The params of this ScriptInvocationParams. + :rtype: dict(str, object) + """ # noqa: E501 + return self._params + + @params.setter + def params(self, params): + """Set the params of this ScriptInvocationParams. + + The script parameters. `params` contains key-value pairs that map values to the **params.keys** in a script. When you invoke a script with `params`, InfluxDB passes the values as invocation parameters to the script. + + :param params: The params of this ScriptInvocationParams. + :type: dict(str, object) + """ # noqa: E501 + self._params = params + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ScriptInvocationParams): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_language.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_language.py new file mode 100644 index 0000000..473055e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_language.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ScriptLanguage(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + FLUX = "flux" + SQL = "sql" + INFLUXQL = "influxql" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """ScriptLanguage - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ScriptLanguage): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_update_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_update_request.py new file mode 100644 index 0000000..36444c7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/script_update_request.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ScriptUpdateRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'description': 'str', + 'script': 'str' + } + + attribute_map = { + 'description': 'description', + 'script': 'script' + } + + def __init__(self, description=None, script=None): # noqa: E501,D401,D403 + """ScriptUpdateRequest - a model defined in OpenAPI.""" # noqa: E501 + self._description = None + self._script = None + self.discriminator = None + + if description is not None: + self.description = description + if script is not None: + self.script = script + + @property + def description(self): + """Get the description of this ScriptUpdateRequest. + + A description of the script. + + :return: The description of this ScriptUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this ScriptUpdateRequest. + + A description of the script. + + :param description: The description of this ScriptUpdateRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def script(self): + """Get the script of this ScriptUpdateRequest. + + The script to execute. + + :return: The script of this ScriptUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._script + + @script.setter + def script(self, script): + """Set the script of this ScriptUpdateRequest. + + The script to execute. + + :param script: The script of this ScriptUpdateRequest. + :type: str + """ # noqa: E501 + self._script = script + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ScriptUpdateRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/scripts.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/scripts.py new file mode 100644 index 0000000..03ca492 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/scripts.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Scripts(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'scripts': 'list[Script]' + } + + attribute_map = { + 'scripts': 'scripts' + } + + def __init__(self, scripts=None): # noqa: E501,D401,D403 + """Scripts - a model defined in OpenAPI.""" # noqa: E501 + self._scripts = None + self.discriminator = None + + if scripts is not None: + self.scripts = scripts + + @property + def scripts(self): + """Get the scripts of this Scripts. + + :return: The scripts of this Scripts. + :rtype: list[Script] + """ # noqa: E501 + return self._scripts + + @scripts.setter + def scripts(self, scripts): + """Set the scripts of this Scripts. + + :param scripts: The scripts of this Scripts. + :type: list[Script] + """ # noqa: E501 + self._scripts = scripts + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Scripts): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/secret_keys.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/secret_keys.py new file mode 100644 index 0000000..b935a61 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/secret_keys.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class SecretKeys(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'secrets': 'list[str]' + } + + attribute_map = { + 'secrets': 'secrets' + } + + def __init__(self, secrets=None): # noqa: E501,D401,D403 + """SecretKeys - a model defined in OpenAPI.""" # noqa: E501 + self._secrets = None + self.discriminator = None + + if secrets is not None: + self.secrets = secrets + + @property + def secrets(self): + """Get the secrets of this SecretKeys. + + :return: The secrets of this SecretKeys. + :rtype: list[str] + """ # noqa: E501 + return self._secrets + + @secrets.setter + def secrets(self, secrets): + """Set the secrets of this SecretKeys. + + :param secrets: The secrets of this SecretKeys. + :type: list[str] + """ # noqa: E501 + self._secrets = secrets + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SecretKeys): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/secret_keys_response.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/secret_keys_response.py new file mode 100644 index 0000000..c9686cf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/secret_keys_response.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.secret_keys import SecretKeys + + +class SecretKeysResponse(SecretKeys): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'object', + 'secrets': 'list[str]' + } + + attribute_map = { + 'links': 'links', + 'secrets': 'secrets' + } + + def __init__(self, links=None, secrets=None): # noqa: E501,D401,D403 + """SecretKeysResponse - a model defined in OpenAPI.""" # noqa: E501 + SecretKeys.__init__(self, secrets=secrets) # noqa: E501 + + self._links = None + self.discriminator = None + + if links is not None: + self.links = links + + @property + def links(self): + """Get the links of this SecretKeysResponse. + + :return: The links of this SecretKeysResponse. + :rtype: object + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this SecretKeysResponse. + + :param links: The links of this SecretKeysResponse. + :type: object + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SecretKeysResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/shard_group_manifest.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/shard_group_manifest.py new file mode 100644 index 0000000..cd64444 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/shard_group_manifest.py @@ -0,0 +1,226 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ShardGroupManifest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'int', + 'start_time': 'datetime', + 'end_time': 'datetime', + 'deleted_at': 'datetime', + 'truncated_at': 'datetime', + 'shards': 'list[ShardManifest]' + } + + attribute_map = { + 'id': 'id', + 'start_time': 'startTime', + 'end_time': 'endTime', + 'deleted_at': 'deletedAt', + 'truncated_at': 'truncatedAt', + 'shards': 'shards' + } + + def __init__(self, id=None, start_time=None, end_time=None, deleted_at=None, truncated_at=None, shards=None): # noqa: E501,D401,D403 + """ShardGroupManifest - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._start_time = None + self._end_time = None + self._deleted_at = None + self._truncated_at = None + self._shards = None + self.discriminator = None + + self.id = id + self.start_time = start_time + self.end_time = end_time + if deleted_at is not None: + self.deleted_at = deleted_at + if truncated_at is not None: + self.truncated_at = truncated_at + self.shards = shards + + @property + def id(self): + """Get the id of this ShardGroupManifest. + + :return: The id of this ShardGroupManifest. + :rtype: int + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this ShardGroupManifest. + + :param id: The id of this ShardGroupManifest. + :type: int + """ # noqa: E501 + if id is None: + raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 + self._id = id + + @property + def start_time(self): + """Get the start_time of this ShardGroupManifest. + + :return: The start_time of this ShardGroupManifest. + :rtype: datetime + """ # noqa: E501 + return self._start_time + + @start_time.setter + def start_time(self, start_time): + """Set the start_time of this ShardGroupManifest. + + :param start_time: The start_time of this ShardGroupManifest. + :type: datetime + """ # noqa: E501 + if start_time is None: + raise ValueError("Invalid value for `start_time`, must not be `None`") # noqa: E501 + self._start_time = start_time + + @property + def end_time(self): + """Get the end_time of this ShardGroupManifest. + + :return: The end_time of this ShardGroupManifest. + :rtype: datetime + """ # noqa: E501 + return self._end_time + + @end_time.setter + def end_time(self, end_time): + """Set the end_time of this ShardGroupManifest. + + :param end_time: The end_time of this ShardGroupManifest. + :type: datetime + """ # noqa: E501 + if end_time is None: + raise ValueError("Invalid value for `end_time`, must not be `None`") # noqa: E501 + self._end_time = end_time + + @property + def deleted_at(self): + """Get the deleted_at of this ShardGroupManifest. + + :return: The deleted_at of this ShardGroupManifest. + :rtype: datetime + """ # noqa: E501 + return self._deleted_at + + @deleted_at.setter + def deleted_at(self, deleted_at): + """Set the deleted_at of this ShardGroupManifest. + + :param deleted_at: The deleted_at of this ShardGroupManifest. + :type: datetime + """ # noqa: E501 + self._deleted_at = deleted_at + + @property + def truncated_at(self): + """Get the truncated_at of this ShardGroupManifest. + + :return: The truncated_at of this ShardGroupManifest. + :rtype: datetime + """ # noqa: E501 + return self._truncated_at + + @truncated_at.setter + def truncated_at(self, truncated_at): + """Set the truncated_at of this ShardGroupManifest. + + :param truncated_at: The truncated_at of this ShardGroupManifest. + :type: datetime + """ # noqa: E501 + self._truncated_at = truncated_at + + @property + def shards(self): + """Get the shards of this ShardGroupManifest. + + :return: The shards of this ShardGroupManifest. + :rtype: list[ShardManifest] + """ # noqa: E501 + return self._shards + + @shards.setter + def shards(self, shards): + """Set the shards of this ShardGroupManifest. + + :param shards: The shards of this ShardGroupManifest. + :type: list[ShardManifest] + """ # noqa: E501 + if shards is None: + raise ValueError("Invalid value for `shards`, must not be `None`") # noqa: E501 + self._shards = shards + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ShardGroupManifest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/shard_manifest.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/shard_manifest.py new file mode 100644 index 0000000..1d18346 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/shard_manifest.py @@ -0,0 +1,132 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ShardManifest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'int', + 'shard_owners': 'list[ShardOwner]' + } + + attribute_map = { + 'id': 'id', + 'shard_owners': 'shardOwners' + } + + def __init__(self, id=None, shard_owners=None): # noqa: E501,D401,D403 + """ShardManifest - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._shard_owners = None + self.discriminator = None + + self.id = id + self.shard_owners = shard_owners + + @property + def id(self): + """Get the id of this ShardManifest. + + :return: The id of this ShardManifest. + :rtype: int + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this ShardManifest. + + :param id: The id of this ShardManifest. + :type: int + """ # noqa: E501 + if id is None: + raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 + self._id = id + + @property + def shard_owners(self): + """Get the shard_owners of this ShardManifest. + + :return: The shard_owners of this ShardManifest. + :rtype: list[ShardOwner] + """ # noqa: E501 + return self._shard_owners + + @shard_owners.setter + def shard_owners(self, shard_owners): + """Set the shard_owners of this ShardManifest. + + :param shard_owners: The shard_owners of this ShardManifest. + :type: list[ShardOwner] + """ # noqa: E501 + if shard_owners is None: + raise ValueError("Invalid value for `shard_owners`, must not be `None`") # noqa: E501 + self._shard_owners = shard_owners + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ShardManifest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/shard_owner.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/shard_owner.py new file mode 100644 index 0000000..3c8232a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/shard_owner.py @@ -0,0 +1,112 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ShardOwner(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'node_id': 'int' + } + + attribute_map = { + 'node_id': 'nodeID' + } + + def __init__(self, node_id=None): # noqa: E501,D401,D403 + """ShardOwner - a model defined in OpenAPI.""" # noqa: E501 + self._node_id = None + self.discriminator = None + + self.node_id = node_id + + @property + def node_id(self): + """Get the node_id of this ShardOwner. + + The ID of the node that owns the shard. + + :return: The node_id of this ShardOwner. + :rtype: int + """ # noqa: E501 + return self._node_id + + @node_id.setter + def node_id(self, node_id): + """Set the node_id of this ShardOwner. + + The ID of the node that owns the shard. + + :param node_id: The node_id of this ShardOwner. + :type: int + """ # noqa: E501 + if node_id is None: + raise ValueError("Invalid value for `node_id`, must not be `None`") # noqa: E501 + self._node_id = node_id + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ShardOwner): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/simple_table_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/simple_table_view_properties.py new file mode 100644 index 0000000..670be20 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/simple_table_view_properties.py @@ -0,0 +1,236 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class SimpleTableViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'show_all': 'bool', + 'queries': 'list[DashboardQuery]', + 'shape': 'str', + 'note': 'str', + 'show_note_when_empty': 'bool' + } + + attribute_map = { + 'type': 'type', + 'show_all': 'showAll', + 'queries': 'queries', + 'shape': 'shape', + 'note': 'note', + 'show_note_when_empty': 'showNoteWhenEmpty' + } + + def __init__(self, type=None, show_all=None, queries=None, shape=None, note=None, show_note_when_empty=None): # noqa: E501,D401,D403 + """SimpleTableViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._type = None + self._show_all = None + self._queries = None + self._shape = None + self._note = None + self._show_note_when_empty = None + self.discriminator = None + + self.type = type + self.show_all = show_all + self.queries = queries + self.shape = shape + self.note = note + self.show_note_when_empty = show_note_when_empty + + @property + def type(self): + """Get the type of this SimpleTableViewProperties. + + :return: The type of this SimpleTableViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this SimpleTableViewProperties. + + :param type: The type of this SimpleTableViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def show_all(self): + """Get the show_all of this SimpleTableViewProperties. + + :return: The show_all of this SimpleTableViewProperties. + :rtype: bool + """ # noqa: E501 + return self._show_all + + @show_all.setter + def show_all(self, show_all): + """Set the show_all of this SimpleTableViewProperties. + + :param show_all: The show_all of this SimpleTableViewProperties. + :type: bool + """ # noqa: E501 + if show_all is None: + raise ValueError("Invalid value for `show_all`, must not be `None`") # noqa: E501 + self._show_all = show_all + + @property + def queries(self): + """Get the queries of this SimpleTableViewProperties. + + :return: The queries of this SimpleTableViewProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this SimpleTableViewProperties. + + :param queries: The queries of this SimpleTableViewProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def shape(self): + """Get the shape of this SimpleTableViewProperties. + + :return: The shape of this SimpleTableViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this SimpleTableViewProperties. + + :param shape: The shape of this SimpleTableViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this SimpleTableViewProperties. + + :return: The note of this SimpleTableViewProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this SimpleTableViewProperties. + + :param note: The note of this SimpleTableViewProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + @property + def show_note_when_empty(self): + """Get the show_note_when_empty of this SimpleTableViewProperties. + + If true, will display note when empty + + :return: The show_note_when_empty of this SimpleTableViewProperties. + :rtype: bool + """ # noqa: E501 + return self._show_note_when_empty + + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty): + """Set the show_note_when_empty of this SimpleTableViewProperties. + + If true, will display note when empty + + :param show_note_when_empty: The show_note_when_empty of this SimpleTableViewProperties. + :type: bool + """ # noqa: E501 + if show_note_when_empty is None: + raise ValueError("Invalid value for `show_note_when_empty`, must not be `None`") # noqa: E501 + self._show_note_when_empty = show_note_when_empty + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SimpleTableViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/single_stat_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/single_stat_view_properties.py new file mode 100644 index 0000000..cc46240 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/single_stat_view_properties.py @@ -0,0 +1,383 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class SingleStatViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'queries': 'list[DashboardQuery]', + 'colors': 'list[DashboardColor]', + 'shape': 'str', + 'note': 'str', + 'show_note_when_empty': 'bool', + 'prefix': 'str', + 'tick_prefix': 'str', + 'suffix': 'str', + 'tick_suffix': 'str', + 'static_legend': 'StaticLegend', + 'decimal_places': 'DecimalPlaces' + } + + attribute_map = { + 'type': 'type', + 'queries': 'queries', + 'colors': 'colors', + 'shape': 'shape', + 'note': 'note', + 'show_note_when_empty': 'showNoteWhenEmpty', + 'prefix': 'prefix', + 'tick_prefix': 'tickPrefix', + 'suffix': 'suffix', + 'tick_suffix': 'tickSuffix', + 'static_legend': 'staticLegend', + 'decimal_places': 'decimalPlaces' + } + + def __init__(self, type=None, queries=None, colors=None, shape=None, note=None, show_note_when_empty=None, prefix=None, tick_prefix=None, suffix=None, tick_suffix=None, static_legend=None, decimal_places=None): # noqa: E501,D401,D403 + """SingleStatViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._type = None + self._queries = None + self._colors = None + self._shape = None + self._note = None + self._show_note_when_empty = None + self._prefix = None + self._tick_prefix = None + self._suffix = None + self._tick_suffix = None + self._static_legend = None + self._decimal_places = None + self.discriminator = None + + self.type = type + self.queries = queries + self.colors = colors + self.shape = shape + self.note = note + self.show_note_when_empty = show_note_when_empty + self.prefix = prefix + self.tick_prefix = tick_prefix + self.suffix = suffix + self.tick_suffix = tick_suffix + if static_legend is not None: + self.static_legend = static_legend + self.decimal_places = decimal_places + + @property + def type(self): + """Get the type of this SingleStatViewProperties. + + :return: The type of this SingleStatViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this SingleStatViewProperties. + + :param type: The type of this SingleStatViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def queries(self): + """Get the queries of this SingleStatViewProperties. + + :return: The queries of this SingleStatViewProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this SingleStatViewProperties. + + :param queries: The queries of this SingleStatViewProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def colors(self): + """Get the colors of this SingleStatViewProperties. + + Colors define color encoding of data into a visualization + + :return: The colors of this SingleStatViewProperties. + :rtype: list[DashboardColor] + """ # noqa: E501 + return self._colors + + @colors.setter + def colors(self, colors): + """Set the colors of this SingleStatViewProperties. + + Colors define color encoding of data into a visualization + + :param colors: The colors of this SingleStatViewProperties. + :type: list[DashboardColor] + """ # noqa: E501 + if colors is None: + raise ValueError("Invalid value for `colors`, must not be `None`") # noqa: E501 + self._colors = colors + + @property + def shape(self): + """Get the shape of this SingleStatViewProperties. + + :return: The shape of this SingleStatViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this SingleStatViewProperties. + + :param shape: The shape of this SingleStatViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this SingleStatViewProperties. + + :return: The note of this SingleStatViewProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this SingleStatViewProperties. + + :param note: The note of this SingleStatViewProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + @property + def show_note_when_empty(self): + """Get the show_note_when_empty of this SingleStatViewProperties. + + If true, will display note when empty + + :return: The show_note_when_empty of this SingleStatViewProperties. + :rtype: bool + """ # noqa: E501 + return self._show_note_when_empty + + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty): + """Set the show_note_when_empty of this SingleStatViewProperties. + + If true, will display note when empty + + :param show_note_when_empty: The show_note_when_empty of this SingleStatViewProperties. + :type: bool + """ # noqa: E501 + if show_note_when_empty is None: + raise ValueError("Invalid value for `show_note_when_empty`, must not be `None`") # noqa: E501 + self._show_note_when_empty = show_note_when_empty + + @property + def prefix(self): + """Get the prefix of this SingleStatViewProperties. + + :return: The prefix of this SingleStatViewProperties. + :rtype: str + """ # noqa: E501 + return self._prefix + + @prefix.setter + def prefix(self, prefix): + """Set the prefix of this SingleStatViewProperties. + + :param prefix: The prefix of this SingleStatViewProperties. + :type: str + """ # noqa: E501 + if prefix is None: + raise ValueError("Invalid value for `prefix`, must not be `None`") # noqa: E501 + self._prefix = prefix + + @property + def tick_prefix(self): + """Get the tick_prefix of this SingleStatViewProperties. + + :return: The tick_prefix of this SingleStatViewProperties. + :rtype: str + """ # noqa: E501 + return self._tick_prefix + + @tick_prefix.setter + def tick_prefix(self, tick_prefix): + """Set the tick_prefix of this SingleStatViewProperties. + + :param tick_prefix: The tick_prefix of this SingleStatViewProperties. + :type: str + """ # noqa: E501 + if tick_prefix is None: + raise ValueError("Invalid value for `tick_prefix`, must not be `None`") # noqa: E501 + self._tick_prefix = tick_prefix + + @property + def suffix(self): + """Get the suffix of this SingleStatViewProperties. + + :return: The suffix of this SingleStatViewProperties. + :rtype: str + """ # noqa: E501 + return self._suffix + + @suffix.setter + def suffix(self, suffix): + """Set the suffix of this SingleStatViewProperties. + + :param suffix: The suffix of this SingleStatViewProperties. + :type: str + """ # noqa: E501 + if suffix is None: + raise ValueError("Invalid value for `suffix`, must not be `None`") # noqa: E501 + self._suffix = suffix + + @property + def tick_suffix(self): + """Get the tick_suffix of this SingleStatViewProperties. + + :return: The tick_suffix of this SingleStatViewProperties. + :rtype: str + """ # noqa: E501 + return self._tick_suffix + + @tick_suffix.setter + def tick_suffix(self, tick_suffix): + """Set the tick_suffix of this SingleStatViewProperties. + + :param tick_suffix: The tick_suffix of this SingleStatViewProperties. + :type: str + """ # noqa: E501 + if tick_suffix is None: + raise ValueError("Invalid value for `tick_suffix`, must not be `None`") # noqa: E501 + self._tick_suffix = tick_suffix + + @property + def static_legend(self): + """Get the static_legend of this SingleStatViewProperties. + + :return: The static_legend of this SingleStatViewProperties. + :rtype: StaticLegend + """ # noqa: E501 + return self._static_legend + + @static_legend.setter + def static_legend(self, static_legend): + """Set the static_legend of this SingleStatViewProperties. + + :param static_legend: The static_legend of this SingleStatViewProperties. + :type: StaticLegend + """ # noqa: E501 + self._static_legend = static_legend + + @property + def decimal_places(self): + """Get the decimal_places of this SingleStatViewProperties. + + :return: The decimal_places of this SingleStatViewProperties. + :rtype: DecimalPlaces + """ # noqa: E501 + return self._decimal_places + + @decimal_places.setter + def decimal_places(self, decimal_places): + """Set the decimal_places of this SingleStatViewProperties. + + :param decimal_places: The decimal_places of this SingleStatViewProperties. + :type: DecimalPlaces + """ # noqa: E501 + if decimal_places is None: + raise ValueError("Invalid value for `decimal_places`, must not be `None`") # noqa: E501 + self._decimal_places = decimal_places + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SingleStatViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/slack_notification_endpoint.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/slack_notification_endpoint.py new file mode 100644 index 0000000..d619a4c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/slack_notification_endpoint.py @@ -0,0 +1,164 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator + + +class SlackNotificationEndpoint(NotificationEndpointDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'url': 'str', + 'token': 'str', + 'id': 'str', + 'org_id': 'str', + 'user_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'description': 'str', + 'name': 'str', + 'status': 'str', + 'labels': 'list[Label]', + 'links': 'NotificationEndpointBaseLinks', + 'type': 'NotificationEndpointType' + } + + attribute_map = { + 'url': 'url', + 'token': 'token', + 'id': 'id', + 'org_id': 'orgID', + 'user_id': 'userID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'description': 'description', + 'name': 'name', + 'status': 'status', + 'labels': 'labels', + 'links': 'links', + 'type': 'type' + } + + def __init__(self, url=None, token=None, id=None, org_id=None, user_id=None, created_at=None, updated_at=None, description=None, name=None, status='active', labels=None, links=None, type="slack"): # noqa: E501,D401,D403 + """SlackNotificationEndpoint - a model defined in OpenAPI.""" # noqa: E501 + NotificationEndpointDiscriminator.__init__(self, id=id, org_id=org_id, user_id=user_id, created_at=created_at, updated_at=updated_at, description=description, name=name, status=status, labels=labels, links=links, type=type) # noqa: E501 + + self._url = None + self._token = None + self.discriminator = None + + if url is not None: + self.url = url + if token is not None: + self.token = token + + @property + def url(self): + """Get the url of this SlackNotificationEndpoint. + + Specifies the URL of the Slack endpoint. Specify either `URL` or `Token`. + + :return: The url of this SlackNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._url + + @url.setter + def url(self, url): + """Set the url of this SlackNotificationEndpoint. + + Specifies the URL of the Slack endpoint. Specify either `URL` or `Token`. + + :param url: The url of this SlackNotificationEndpoint. + :type: str + """ # noqa: E501 + self._url = url + + @property + def token(self): + """Get the token of this SlackNotificationEndpoint. + + Specifies the API token string. Specify either `URL` or `Token`. + + :return: The token of this SlackNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._token + + @token.setter + def token(self, token): + """Set the token of this SlackNotificationEndpoint. + + Specifies the API token string. Specify either `URL` or `Token`. + + :param token: The token of this SlackNotificationEndpoint. + :type: str + """ # noqa: E501 + self._token = token + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SlackNotificationEndpoint): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/slack_notification_rule.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/slack_notification_rule.py new file mode 100644 index 0000000..c7cd5b2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/slack_notification_rule.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.slack_notification_rule_base import SlackNotificationRuleBase + + +class SlackNotificationRule(SlackNotificationRuleBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'channel': 'str', + 'message_template': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'channel': 'channel', + 'message_template': 'messageTemplate', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type="slack", channel=None, message_template=None, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """SlackNotificationRule - a model defined in OpenAPI.""" # noqa: E501 + SlackNotificationRuleBase.__init__(self, type=type, channel=channel, message_template=message_template, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, id=id, endpoint_id=endpoint_id, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, status=status, name=name, sleep_until=sleep_until, every=every, offset=offset, runbook_link=runbook_link, limit_every=limit_every, limit=limit, tag_rules=tag_rules, description=description, status_rules=status_rules, labels=labels, links=links) # noqa: E501 + self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SlackNotificationRule): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/slack_notification_rule_base.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/slack_notification_rule_base.py new file mode 100644 index 0000000..923a5df --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/slack_notification_rule_base.py @@ -0,0 +1,205 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator + + +class SlackNotificationRuleBase(NotificationRuleDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'channel': 'str', + 'message_template': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'channel': 'channel', + 'message_template': 'messageTemplate', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type=None, channel=None, message_template=None, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """SlackNotificationRuleBase - a model defined in OpenAPI.""" # noqa: E501 + NotificationRuleDiscriminator.__init__(self, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, id=id, endpoint_id=endpoint_id, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, status=status, name=name, sleep_until=sleep_until, every=every, offset=offset, runbook_link=runbook_link, limit_every=limit_every, limit=limit, tag_rules=tag_rules, description=description, status_rules=status_rules, labels=labels, links=links) # noqa: E501 + + self._type = None + self._channel = None + self._message_template = None + self.discriminator = None + + self.type = type + if channel is not None: + self.channel = channel + self.message_template = message_template + + @property + def type(self): + """Get the type of this SlackNotificationRuleBase. + + :return: The type of this SlackNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this SlackNotificationRuleBase. + + :param type: The type of this SlackNotificationRuleBase. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def channel(self): + """Get the channel of this SlackNotificationRuleBase. + + :return: The channel of this SlackNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._channel + + @channel.setter + def channel(self, channel): + """Set the channel of this SlackNotificationRuleBase. + + :param channel: The channel of this SlackNotificationRuleBase. + :type: str + """ # noqa: E501 + self._channel = channel + + @property + def message_template(self): + """Get the message_template of this SlackNotificationRuleBase. + + :return: The message_template of this SlackNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._message_template + + @message_template.setter + def message_template(self, message_template): + """Set the message_template of this SlackNotificationRuleBase. + + :param message_template: The message_template of this SlackNotificationRuleBase. + :type: str + """ # noqa: E501 + if message_template is None: + raise ValueError("Invalid value for `message_template`, must not be `None`") # noqa: E501 + self._message_template = message_template + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SlackNotificationRuleBase): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/smtp_notification_rule.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/smtp_notification_rule.py new file mode 100644 index 0000000..d910732 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/smtp_notification_rule.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.smtp_notification_rule_base import SMTPNotificationRuleBase + + +class SMTPNotificationRule(SMTPNotificationRuleBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'subject_template': 'str', + 'body_template': 'str', + 'to': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'subject_template': 'subjectTemplate', + 'body_template': 'bodyTemplate', + 'to': 'to', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type="smtp", subject_template=None, body_template=None, to=None, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """SMTPNotificationRule - a model defined in OpenAPI.""" # noqa: E501 + SMTPNotificationRuleBase.__init__(self, type=type, subject_template=subject_template, body_template=body_template, to=to, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, id=id, endpoint_id=endpoint_id, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, status=status, name=name, sleep_until=sleep_until, every=every, offset=offset, runbook_link=runbook_link, limit_every=limit_every, limit=limit, tag_rules=tag_rules, description=description, status_rules=status_rules, labels=labels, links=links) # noqa: E501 + self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SMTPNotificationRule): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/smtp_notification_rule_base.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/smtp_notification_rule_base.py new file mode 100644 index 0000000..18cbae1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/smtp_notification_rule_base.py @@ -0,0 +1,229 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator + + +class SMTPNotificationRuleBase(NotificationRuleDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'subject_template': 'str', + 'body_template': 'str', + 'to': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'subject_template': 'subjectTemplate', + 'body_template': 'bodyTemplate', + 'to': 'to', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type=None, subject_template=None, body_template=None, to=None, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """SMTPNotificationRuleBase - a model defined in OpenAPI.""" # noqa: E501 + NotificationRuleDiscriminator.__init__(self, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, id=id, endpoint_id=endpoint_id, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, status=status, name=name, sleep_until=sleep_until, every=every, offset=offset, runbook_link=runbook_link, limit_every=limit_every, limit=limit, tag_rules=tag_rules, description=description, status_rules=status_rules, labels=labels, links=links) # noqa: E501 + + self._type = None + self._subject_template = None + self._body_template = None + self._to = None + self.discriminator = None + + self.type = type + self.subject_template = subject_template + if body_template is not None: + self.body_template = body_template + self.to = to + + @property + def type(self): + """Get the type of this SMTPNotificationRuleBase. + + :return: The type of this SMTPNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this SMTPNotificationRuleBase. + + :param type: The type of this SMTPNotificationRuleBase. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def subject_template(self): + """Get the subject_template of this SMTPNotificationRuleBase. + + :return: The subject_template of this SMTPNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._subject_template + + @subject_template.setter + def subject_template(self, subject_template): + """Set the subject_template of this SMTPNotificationRuleBase. + + :param subject_template: The subject_template of this SMTPNotificationRuleBase. + :type: str + """ # noqa: E501 + if subject_template is None: + raise ValueError("Invalid value for `subject_template`, must not be `None`") # noqa: E501 + self._subject_template = subject_template + + @property + def body_template(self): + """Get the body_template of this SMTPNotificationRuleBase. + + :return: The body_template of this SMTPNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._body_template + + @body_template.setter + def body_template(self, body_template): + """Set the body_template of this SMTPNotificationRuleBase. + + :param body_template: The body_template of this SMTPNotificationRuleBase. + :type: str + """ # noqa: E501 + self._body_template = body_template + + @property + def to(self): + """Get the to of this SMTPNotificationRuleBase. + + :return: The to of this SMTPNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._to + + @to.setter + def to(self, to): + """Set the to of this SMTPNotificationRuleBase. + + :param to: The to of this SMTPNotificationRuleBase. + :type: str + """ # noqa: E501 + if to is None: + raise ValueError("Invalid value for `to`, must not be `None`") # noqa: E501 + self._to = to + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SMTPNotificationRuleBase): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/source.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/source.py new file mode 100644 index 0000000..c45cb12 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/source.py @@ -0,0 +1,459 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Source(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'SourceLinks', + 'id': 'str', + 'org_id': 'str', + 'default': 'bool', + 'name': 'str', + 'type': 'str', + 'url': 'str', + 'insecure_skip_verify': 'bool', + 'telegraf': 'str', + 'token': 'str', + 'username': 'str', + 'password': 'str', + 'shared_secret': 'str', + 'meta_url': 'str', + 'default_rp': 'str', + 'languages': 'list[str]' + } + + attribute_map = { + 'links': 'links', + 'id': 'id', + 'org_id': 'orgID', + 'default': 'default', + 'name': 'name', + 'type': 'type', + 'url': 'url', + 'insecure_skip_verify': 'insecureSkipVerify', + 'telegraf': 'telegraf', + 'token': 'token', + 'username': 'username', + 'password': 'password', + 'shared_secret': 'sharedSecret', + 'meta_url': 'metaUrl', + 'default_rp': 'defaultRP', + 'languages': 'languages' + } + + def __init__(self, links=None, id=None, org_id=None, default=None, name=None, type=None, url=None, insecure_skip_verify=None, telegraf=None, token=None, username=None, password=None, shared_secret=None, meta_url=None, default_rp=None, languages=None): # noqa: E501,D401,D403 + """Source - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._id = None + self._org_id = None + self._default = None + self._name = None + self._type = None + self._url = None + self._insecure_skip_verify = None + self._telegraf = None + self._token = None + self._username = None + self._password = None + self._shared_secret = None + self._meta_url = None + self._default_rp = None + self._languages = None + self.discriminator = None + + if links is not None: + self.links = links + if id is not None: + self.id = id + if org_id is not None: + self.org_id = org_id + if default is not None: + self.default = default + if name is not None: + self.name = name + if type is not None: + self.type = type + if url is not None: + self.url = url + if insecure_skip_verify is not None: + self.insecure_skip_verify = insecure_skip_verify + if telegraf is not None: + self.telegraf = telegraf + if token is not None: + self.token = token + if username is not None: + self.username = username + if password is not None: + self.password = password + if shared_secret is not None: + self.shared_secret = shared_secret + if meta_url is not None: + self.meta_url = meta_url + if default_rp is not None: + self.default_rp = default_rp + if languages is not None: + self.languages = languages + + @property + def links(self): + """Get the links of this Source. + + :return: The links of this Source. + :rtype: SourceLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Source. + + :param links: The links of this Source. + :type: SourceLinks + """ # noqa: E501 + self._links = links + + @property + def id(self): + """Get the id of this Source. + + :return: The id of this Source. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Source. + + :param id: The id of this Source. + :type: str + """ # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this Source. + + :return: The org_id of this Source. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this Source. + + :param org_id: The org_id of this Source. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def default(self): + """Get the default of this Source. + + :return: The default of this Source. + :rtype: bool + """ # noqa: E501 + return self._default + + @default.setter + def default(self, default): + """Set the default of this Source. + + :param default: The default of this Source. + :type: bool + """ # noqa: E501 + self._default = default + + @property + def name(self): + """Get the name of this Source. + + :return: The name of this Source. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this Source. + + :param name: The name of this Source. + :type: str + """ # noqa: E501 + self._name = name + + @property + def type(self): + """Get the type of this Source. + + :return: The type of this Source. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this Source. + + :param type: The type of this Source. + :type: str + """ # noqa: E501 + self._type = type + + @property + def url(self): + """Get the url of this Source. + + :return: The url of this Source. + :rtype: str + """ # noqa: E501 + return self._url + + @url.setter + def url(self, url): + """Set the url of this Source. + + :param url: The url of this Source. + :type: str + """ # noqa: E501 + self._url = url + + @property + def insecure_skip_verify(self): + """Get the insecure_skip_verify of this Source. + + :return: The insecure_skip_verify of this Source. + :rtype: bool + """ # noqa: E501 + return self._insecure_skip_verify + + @insecure_skip_verify.setter + def insecure_skip_verify(self, insecure_skip_verify): + """Set the insecure_skip_verify of this Source. + + :param insecure_skip_verify: The insecure_skip_verify of this Source. + :type: bool + """ # noqa: E501 + self._insecure_skip_verify = insecure_skip_verify + + @property + def telegraf(self): + """Get the telegraf of this Source. + + :return: The telegraf of this Source. + :rtype: str + """ # noqa: E501 + return self._telegraf + + @telegraf.setter + def telegraf(self, telegraf): + """Set the telegraf of this Source. + + :param telegraf: The telegraf of this Source. + :type: str + """ # noqa: E501 + self._telegraf = telegraf + + @property + def token(self): + """Get the token of this Source. + + :return: The token of this Source. + :rtype: str + """ # noqa: E501 + return self._token + + @token.setter + def token(self, token): + """Set the token of this Source. + + :param token: The token of this Source. + :type: str + """ # noqa: E501 + self._token = token + + @property + def username(self): + """Get the username of this Source. + + :return: The username of this Source. + :rtype: str + """ # noqa: E501 + return self._username + + @username.setter + def username(self, username): + """Set the username of this Source. + + :param username: The username of this Source. + :type: str + """ # noqa: E501 + self._username = username + + @property + def password(self): + """Get the password of this Source. + + :return: The password of this Source. + :rtype: str + """ # noqa: E501 + return self._password + + @password.setter + def password(self, password): + """Set the password of this Source. + + :param password: The password of this Source. + :type: str + """ # noqa: E501 + self._password = password + + @property + def shared_secret(self): + """Get the shared_secret of this Source. + + :return: The shared_secret of this Source. + :rtype: str + """ # noqa: E501 + return self._shared_secret + + @shared_secret.setter + def shared_secret(self, shared_secret): + """Set the shared_secret of this Source. + + :param shared_secret: The shared_secret of this Source. + :type: str + """ # noqa: E501 + self._shared_secret = shared_secret + + @property + def meta_url(self): + """Get the meta_url of this Source. + + :return: The meta_url of this Source. + :rtype: str + """ # noqa: E501 + return self._meta_url + + @meta_url.setter + def meta_url(self, meta_url): + """Set the meta_url of this Source. + + :param meta_url: The meta_url of this Source. + :type: str + """ # noqa: E501 + self._meta_url = meta_url + + @property + def default_rp(self): + """Get the default_rp of this Source. + + :return: The default_rp of this Source. + :rtype: str + """ # noqa: E501 + return self._default_rp + + @default_rp.setter + def default_rp(self, default_rp): + """Set the default_rp of this Source. + + :param default_rp: The default_rp of this Source. + :type: str + """ # noqa: E501 + self._default_rp = default_rp + + @property + def languages(self): + """Get the languages of this Source. + + :return: The languages of this Source. + :rtype: list[str] + """ # noqa: E501 + return self._languages + + @languages.setter + def languages(self, languages): + """Set the languages of this Source. + + :param languages: The languages of this Source. + :type: list[str] + """ # noqa: E501 + allowed_values = ["flux", "influxql"] # noqa: E501 + if not set(languages).issubset(set(allowed_values)): + raise ValueError( + "Invalid values for `languages` [{0}], must be a subset of [{1}]" # noqa: E501 + .format(", ".join(map(str, set(languages) - set(allowed_values))), # noqa: E501 + ", ".join(map(str, allowed_values))) + ) + self._languages = languages + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Source): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/source_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/source_links.py new file mode 100644 index 0000000..baffe91 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/source_links.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class SourceLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str', + 'query': 'str', + 'health': 'str', + 'buckets': 'str' + } + + attribute_map = { + '_self': 'self', + 'query': 'query', + 'health': 'health', + 'buckets': 'buckets' + } + + def __init__(self, _self=None, query=None, health=None, buckets=None): # noqa: E501,D401,D403 + """SourceLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self._query = None + self._health = None + self._buckets = None + self.discriminator = None + + if _self is not None: + self._self = _self + if query is not None: + self.query = query + if health is not None: + self.health = health + if buckets is not None: + self.buckets = buckets + + @property + def _self(self): + """Get the _self of this SourceLinks. + + :return: The _self of this SourceLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this SourceLinks. + + :param _self: The _self of this SourceLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + @property + def query(self): + """Get the query of this SourceLinks. + + :return: The query of this SourceLinks. + :rtype: str + """ # noqa: E501 + return self._query + + @query.setter + def query(self, query): + """Set the query of this SourceLinks. + + :param query: The query of this SourceLinks. + :type: str + """ # noqa: E501 + self._query = query + + @property + def health(self): + """Get the health of this SourceLinks. + + :return: The health of this SourceLinks. + :rtype: str + """ # noqa: E501 + return self._health + + @health.setter + def health(self, health): + """Set the health of this SourceLinks. + + :param health: The health of this SourceLinks. + :type: str + """ # noqa: E501 + self._health = health + + @property + def buckets(self): + """Get the buckets of this SourceLinks. + + :return: The buckets of this SourceLinks. + :rtype: str + """ # noqa: E501 + return self._buckets + + @buckets.setter + def buckets(self, buckets): + """Set the buckets of this SourceLinks. + + :param buckets: The buckets of this SourceLinks. + :type: str + """ # noqa: E501 + self._buckets = buckets + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SourceLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/sources.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/sources.py new file mode 100644 index 0000000..c368f13 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/sources.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Sources(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'ResourceMembersLinks', + 'sources': 'list[Source]' + } + + attribute_map = { + 'links': 'links', + 'sources': 'sources' + } + + def __init__(self, links=None, sources=None): # noqa: E501,D401,D403 + """Sources - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._sources = None + self.discriminator = None + + if links is not None: + self.links = links + if sources is not None: + self.sources = sources + + @property + def links(self): + """Get the links of this Sources. + + :return: The links of this Sources. + :rtype: ResourceMembersLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Sources. + + :param links: The links of this Sources. + :type: ResourceMembersLinks + """ # noqa: E501 + self._links = links + + @property + def sources(self): + """Get the sources of this Sources. + + :return: The sources of this Sources. + :rtype: list[Source] + """ # noqa: E501 + return self._sources + + @sources.setter + def sources(self, sources): + """Set the sources of this Sources. + + :param sources: The sources of this Sources. + :type: list[Source] + """ # noqa: E501 + self._sources = sources + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Sources): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack.py new file mode 100644 index 0000000..cdae5a3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Stack(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'org_id': 'str', + 'created_at': 'datetime', + 'events': 'list[StackEvents]' + } + + attribute_map = { + 'id': 'id', + 'org_id': 'orgID', + 'created_at': 'createdAt', + 'events': 'events' + } + + def __init__(self, id=None, org_id=None, created_at=None, events=None): # noqa: E501,D401,D403 + """Stack - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._org_id = None + self._created_at = None + self._events = None + self.discriminator = None + + if id is not None: + self.id = id + if org_id is not None: + self.org_id = org_id + if created_at is not None: + self.created_at = created_at + if events is not None: + self.events = events + + @property + def id(self): + """Get the id of this Stack. + + :return: The id of this Stack. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Stack. + + :param id: The id of this Stack. + :type: str + """ # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this Stack. + + :return: The org_id of this Stack. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this Stack. + + :param org_id: The org_id of this Stack. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def created_at(self): + """Get the created_at of this Stack. + + :return: The created_at of this Stack. + :rtype: datetime + """ # noqa: E501 + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Set the created_at of this Stack. + + :param created_at: The created_at of this Stack. + :type: datetime + """ # noqa: E501 + self._created_at = created_at + + @property + def events(self): + """Get the events of this Stack. + + :return: The events of this Stack. + :rtype: list[StackEvents] + """ # noqa: E501 + return self._events + + @events.setter + def events(self, events): + """Set the events of this Stack. + + :param events: The events of this Stack. + :type: list[StackEvents] + """ # noqa: E501 + self._events = events + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Stack): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_associations.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_associations.py new file mode 100644 index 0000000..7e3191b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_associations.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class StackAssociations(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'meta_name': 'str' + } + + attribute_map = { + 'kind': 'kind', + 'meta_name': 'metaName' + } + + def __init__(self, kind=None, meta_name=None): # noqa: E501,D401,D403 + """StackAssociations - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._meta_name = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if meta_name is not None: + self.meta_name = meta_name + + @property + def kind(self): + """Get the kind of this StackAssociations. + + :return: The kind of this StackAssociations. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this StackAssociations. + + :param kind: The kind of this StackAssociations. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def meta_name(self): + """Get the meta_name of this StackAssociations. + + :return: The meta_name of this StackAssociations. + :rtype: str + """ # noqa: E501 + return self._meta_name + + @meta_name.setter + def meta_name(self, meta_name): + """Set the meta_name of this StackAssociations. + + :param meta_name: The meta_name of this StackAssociations. + :type: str + """ # noqa: E501 + self._meta_name = meta_name + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, StackAssociations): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_events.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_events.py new file mode 100644 index 0000000..854a3ab --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_events.py @@ -0,0 +1,245 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class StackEvents(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'event_type': 'str', + 'name': 'str', + 'description': 'str', + 'sources': 'list[str]', + 'resources': 'list[StackResources]', + 'urls': 'list[str]', + 'updated_at': 'datetime' + } + + attribute_map = { + 'event_type': 'eventType', + 'name': 'name', + 'description': 'description', + 'sources': 'sources', + 'resources': 'resources', + 'urls': 'urls', + 'updated_at': 'updatedAt' + } + + def __init__(self, event_type=None, name=None, description=None, sources=None, resources=None, urls=None, updated_at=None): # noqa: E501,D401,D403 + """StackEvents - a model defined in OpenAPI.""" # noqa: E501 + self._event_type = None + self._name = None + self._description = None + self._sources = None + self._resources = None + self._urls = None + self._updated_at = None + self.discriminator = None + + if event_type is not None: + self.event_type = event_type + if name is not None: + self.name = name + if description is not None: + self.description = description + if sources is not None: + self.sources = sources + if resources is not None: + self.resources = resources + if urls is not None: + self.urls = urls + if updated_at is not None: + self.updated_at = updated_at + + @property + def event_type(self): + """Get the event_type of this StackEvents. + + :return: The event_type of this StackEvents. + :rtype: str + """ # noqa: E501 + return self._event_type + + @event_type.setter + def event_type(self, event_type): + """Set the event_type of this StackEvents. + + :param event_type: The event_type of this StackEvents. + :type: str + """ # noqa: E501 + self._event_type = event_type + + @property + def name(self): + """Get the name of this StackEvents. + + :return: The name of this StackEvents. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this StackEvents. + + :param name: The name of this StackEvents. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this StackEvents. + + :return: The description of this StackEvents. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this StackEvents. + + :param description: The description of this StackEvents. + :type: str + """ # noqa: E501 + self._description = description + + @property + def sources(self): + """Get the sources of this StackEvents. + + :return: The sources of this StackEvents. + :rtype: list[str] + """ # noqa: E501 + return self._sources + + @sources.setter + def sources(self, sources): + """Set the sources of this StackEvents. + + :param sources: The sources of this StackEvents. + :type: list[str] + """ # noqa: E501 + self._sources = sources + + @property + def resources(self): + """Get the resources of this StackEvents. + + :return: The resources of this StackEvents. + :rtype: list[StackResources] + """ # noqa: E501 + return self._resources + + @resources.setter + def resources(self, resources): + """Set the resources of this StackEvents. + + :param resources: The resources of this StackEvents. + :type: list[StackResources] + """ # noqa: E501 + self._resources = resources + + @property + def urls(self): + """Get the urls of this StackEvents. + + :return: The urls of this StackEvents. + :rtype: list[str] + """ # noqa: E501 + return self._urls + + @urls.setter + def urls(self, urls): + """Set the urls of this StackEvents. + + :param urls: The urls of this StackEvents. + :type: list[str] + """ # noqa: E501 + self._urls = urls + + @property + def updated_at(self): + """Get the updated_at of this StackEvents. + + :return: The updated_at of this StackEvents. + :rtype: datetime + """ # noqa: E501 + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Set the updated_at of this StackEvents. + + :param updated_at: The updated_at of this StackEvents. + :type: datetime + """ # noqa: E501 + self._updated_at = updated_at + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, StackEvents): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_links.py new file mode 100644 index 0000000..f3c7335 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_links.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class StackLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str' + } + + attribute_map = { + '_self': 'self' + } + + def __init__(self, _self=None): # noqa: E501,D401,D403 + """StackLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self.discriminator = None + + if _self is not None: + self._self = _self + + @property + def _self(self): + """Get the _self of this StackLinks. + + :return: The _self of this StackLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this StackLinks. + + :param _self: The _self of this StackLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, StackLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_resources.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_resources.py new file mode 100644 index 0000000..3cd5f98 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/stack_resources.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class StackResources(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'api_version': 'str', + 'resource_id': 'str', + 'kind': 'TemplateKind', + 'template_meta_name': 'str', + 'associations': 'list[StackAssociations]', + 'links': 'StackLinks' + } + + attribute_map = { + 'api_version': 'apiVersion', + 'resource_id': 'resourceID', + 'kind': 'kind', + 'template_meta_name': 'templateMetaName', + 'associations': 'associations', + 'links': 'links' + } + + def __init__(self, api_version=None, resource_id=None, kind=None, template_meta_name=None, associations=None, links=None): # noqa: E501,D401,D403 + """StackResources - a model defined in OpenAPI.""" # noqa: E501 + self._api_version = None + self._resource_id = None + self._kind = None + self._template_meta_name = None + self._associations = None + self._links = None + self.discriminator = None + + if api_version is not None: + self.api_version = api_version + if resource_id is not None: + self.resource_id = resource_id + if kind is not None: + self.kind = kind + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if associations is not None: + self.associations = associations + if links is not None: + self.links = links + + @property + def api_version(self): + """Get the api_version of this StackResources. + + :return: The api_version of this StackResources. + :rtype: str + """ # noqa: E501 + return self._api_version + + @api_version.setter + def api_version(self, api_version): + """Set the api_version of this StackResources. + + :param api_version: The api_version of this StackResources. + :type: str + """ # noqa: E501 + self._api_version = api_version + + @property + def resource_id(self): + """Get the resource_id of this StackResources. + + :return: The resource_id of this StackResources. + :rtype: str + """ # noqa: E501 + return self._resource_id + + @resource_id.setter + def resource_id(self, resource_id): + """Set the resource_id of this StackResources. + + :param resource_id: The resource_id of this StackResources. + :type: str + """ # noqa: E501 + self._resource_id = resource_id + + @property + def kind(self): + """Get the kind of this StackResources. + + :return: The kind of this StackResources. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this StackResources. + + :param kind: The kind of this StackResources. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def template_meta_name(self): + """Get the template_meta_name of this StackResources. + + :return: The template_meta_name of this StackResources. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this StackResources. + + :param template_meta_name: The template_meta_name of this StackResources. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def associations(self): + """Get the associations of this StackResources. + + :return: The associations of this StackResources. + :rtype: list[StackAssociations] + """ # noqa: E501 + return self._associations + + @associations.setter + def associations(self, associations): + """Set the associations of this StackResources. + + :param associations: The associations of this StackResources. + :type: list[StackAssociations] + """ # noqa: E501 + self._associations = associations + + @property + def links(self): + """Get the links of this StackResources. + + :return: The links of this StackResources. + :rtype: StackLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this StackResources. + + :param links: The links of this StackResources. + :type: StackLinks + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, StackResources): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/statement.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/statement.py new file mode 100644 index 0000000..bb19fc3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/statement.py @@ -0,0 +1,82 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Statement(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """Statement - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Statement): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/static_legend.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/static_legend.py new file mode 100644 index 0000000..cff6943 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/static_legend.py @@ -0,0 +1,245 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class StaticLegend(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'colorize_rows': 'bool', + 'height_ratio': 'float', + 'show': 'bool', + 'opacity': 'float', + 'orientation_threshold': 'int', + 'value_axis': 'str', + 'width_ratio': 'float' + } + + attribute_map = { + 'colorize_rows': 'colorizeRows', + 'height_ratio': 'heightRatio', + 'show': 'show', + 'opacity': 'opacity', + 'orientation_threshold': 'orientationThreshold', + 'value_axis': 'valueAxis', + 'width_ratio': 'widthRatio' + } + + def __init__(self, colorize_rows=None, height_ratio=None, show=None, opacity=None, orientation_threshold=None, value_axis=None, width_ratio=None): # noqa: E501,D401,D403 + """StaticLegend - a model defined in OpenAPI.""" # noqa: E501 + self._colorize_rows = None + self._height_ratio = None + self._show = None + self._opacity = None + self._orientation_threshold = None + self._value_axis = None + self._width_ratio = None + self.discriminator = None + + if colorize_rows is not None: + self.colorize_rows = colorize_rows + if height_ratio is not None: + self.height_ratio = height_ratio + if show is not None: + self.show = show + if opacity is not None: + self.opacity = opacity + if orientation_threshold is not None: + self.orientation_threshold = orientation_threshold + if value_axis is not None: + self.value_axis = value_axis + if width_ratio is not None: + self.width_ratio = width_ratio + + @property + def colorize_rows(self): + """Get the colorize_rows of this StaticLegend. + + :return: The colorize_rows of this StaticLegend. + :rtype: bool + """ # noqa: E501 + return self._colorize_rows + + @colorize_rows.setter + def colorize_rows(self, colorize_rows): + """Set the colorize_rows of this StaticLegend. + + :param colorize_rows: The colorize_rows of this StaticLegend. + :type: bool + """ # noqa: E501 + self._colorize_rows = colorize_rows + + @property + def height_ratio(self): + """Get the height_ratio of this StaticLegend. + + :return: The height_ratio of this StaticLegend. + :rtype: float + """ # noqa: E501 + return self._height_ratio + + @height_ratio.setter + def height_ratio(self, height_ratio): + """Set the height_ratio of this StaticLegend. + + :param height_ratio: The height_ratio of this StaticLegend. + :type: float + """ # noqa: E501 + self._height_ratio = height_ratio + + @property + def show(self): + """Get the show of this StaticLegend. + + :return: The show of this StaticLegend. + :rtype: bool + """ # noqa: E501 + return self._show + + @show.setter + def show(self, show): + """Set the show of this StaticLegend. + + :param show: The show of this StaticLegend. + :type: bool + """ # noqa: E501 + self._show = show + + @property + def opacity(self): + """Get the opacity of this StaticLegend. + + :return: The opacity of this StaticLegend. + :rtype: float + """ # noqa: E501 + return self._opacity + + @opacity.setter + def opacity(self, opacity): + """Set the opacity of this StaticLegend. + + :param opacity: The opacity of this StaticLegend. + :type: float + """ # noqa: E501 + self._opacity = opacity + + @property + def orientation_threshold(self): + """Get the orientation_threshold of this StaticLegend. + + :return: The orientation_threshold of this StaticLegend. + :rtype: int + """ # noqa: E501 + return self._orientation_threshold + + @orientation_threshold.setter + def orientation_threshold(self, orientation_threshold): + """Set the orientation_threshold of this StaticLegend. + + :param orientation_threshold: The orientation_threshold of this StaticLegend. + :type: int + """ # noqa: E501 + self._orientation_threshold = orientation_threshold + + @property + def value_axis(self): + """Get the value_axis of this StaticLegend. + + :return: The value_axis of this StaticLegend. + :rtype: str + """ # noqa: E501 + return self._value_axis + + @value_axis.setter + def value_axis(self, value_axis): + """Set the value_axis of this StaticLegend. + + :param value_axis: The value_axis of this StaticLegend. + :type: str + """ # noqa: E501 + self._value_axis = value_axis + + @property + def width_ratio(self): + """Get the width_ratio of this StaticLegend. + + :return: The width_ratio of this StaticLegend. + :rtype: float + """ # noqa: E501 + return self._width_ratio + + @width_ratio.setter + def width_ratio(self, width_ratio): + """Set the width_ratio of this StaticLegend. + + :param width_ratio: The width_ratio of this StaticLegend. + :type: float + """ # noqa: E501 + self._width_ratio = width_ratio + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, StaticLegend): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/status_rule.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/status_rule.py new file mode 100644 index 0000000..bac0584 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/status_rule.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class StatusRule(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'current_level': 'RuleStatusLevel', + 'previous_level': 'RuleStatusLevel', + 'count': 'int', + 'period': 'str' + } + + attribute_map = { + 'current_level': 'currentLevel', + 'previous_level': 'previousLevel', + 'count': 'count', + 'period': 'period' + } + + def __init__(self, current_level=None, previous_level=None, count=None, period=None): # noqa: E501,D401,D403 + """StatusRule - a model defined in OpenAPI.""" # noqa: E501 + self._current_level = None + self._previous_level = None + self._count = None + self._period = None + self.discriminator = None + + if current_level is not None: + self.current_level = current_level + if previous_level is not None: + self.previous_level = previous_level + if count is not None: + self.count = count + if period is not None: + self.period = period + + @property + def current_level(self): + """Get the current_level of this StatusRule. + + :return: The current_level of this StatusRule. + :rtype: RuleStatusLevel + """ # noqa: E501 + return self._current_level + + @current_level.setter + def current_level(self, current_level): + """Set the current_level of this StatusRule. + + :param current_level: The current_level of this StatusRule. + :type: RuleStatusLevel + """ # noqa: E501 + self._current_level = current_level + + @property + def previous_level(self): + """Get the previous_level of this StatusRule. + + :return: The previous_level of this StatusRule. + :rtype: RuleStatusLevel + """ # noqa: E501 + return self._previous_level + + @previous_level.setter + def previous_level(self, previous_level): + """Set the previous_level of this StatusRule. + + :param previous_level: The previous_level of this StatusRule. + :type: RuleStatusLevel + """ # noqa: E501 + self._previous_level = previous_level + + @property + def count(self): + """Get the count of this StatusRule. + + :return: The count of this StatusRule. + :rtype: int + """ # noqa: E501 + return self._count + + @count.setter + def count(self, count): + """Set the count of this StatusRule. + + :param count: The count of this StatusRule. + :type: int + """ # noqa: E501 + self._count = count + + @property + def period(self): + """Get the period of this StatusRule. + + :return: The period of this StatusRule. + :rtype: str + """ # noqa: E501 + return self._period + + @period.setter + def period(self, period): + """Set the period of this StatusRule. + + :param period: The period of this StatusRule. + :type: str + """ # noqa: E501 + self._period = period + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, StatusRule): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/string_literal.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/string_literal.py new file mode 100644 index 0000000..643ca42 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/string_literal.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.property_key import PropertyKey + + +class StringLiteral(PropertyKey): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'value': 'str' + } + + attribute_map = { + 'type': 'type', + 'value': 'value' + } + + def __init__(self, type=None, value=None): # noqa: E501,D401,D403 + """StringLiteral - a model defined in OpenAPI.""" # noqa: E501 + PropertyKey.__init__(self) # noqa: E501 + + self._type = None + self._value = None + self.discriminator = None + + if type is not None: + self.type = type + if value is not None: + self.value = value + + @property + def type(self): + """Get the type of this StringLiteral. + + Type of AST node + + :return: The type of this StringLiteral. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this StringLiteral. + + Type of AST node + + :param type: The type of this StringLiteral. + :type: str + """ # noqa: E501 + self._type = type + + @property + def value(self): + """Get the value of this StringLiteral. + + :return: The value of this StringLiteral. + :rtype: str + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this StringLiteral. + + :param value: The value of this StringLiteral. + :type: str + """ # noqa: E501 + self._value = value + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, StringLiteral): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/subscription_manifest.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/subscription_manifest.py new file mode 100644 index 0000000..7366561 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/subscription_manifest.py @@ -0,0 +1,156 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class SubscriptionManifest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'mode': 'str', + 'destinations': 'list[str]' + } + + attribute_map = { + 'name': 'name', + 'mode': 'mode', + 'destinations': 'destinations' + } + + def __init__(self, name=None, mode=None, destinations=None): # noqa: E501,D401,D403 + """SubscriptionManifest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._mode = None + self._destinations = None + self.discriminator = None + + self.name = name + self.mode = mode + self.destinations = destinations + + @property + def name(self): + """Get the name of this SubscriptionManifest. + + :return: The name of this SubscriptionManifest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this SubscriptionManifest. + + :param name: The name of this SubscriptionManifest. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def mode(self): + """Get the mode of this SubscriptionManifest. + + :return: The mode of this SubscriptionManifest. + :rtype: str + """ # noqa: E501 + return self._mode + + @mode.setter + def mode(self, mode): + """Set the mode of this SubscriptionManifest. + + :param mode: The mode of this SubscriptionManifest. + :type: str + """ # noqa: E501 + if mode is None: + raise ValueError("Invalid value for `mode`, must not be `None`") # noqa: E501 + self._mode = mode + + @property + def destinations(self): + """Get the destinations of this SubscriptionManifest. + + :return: The destinations of this SubscriptionManifest. + :rtype: list[str] + """ # noqa: E501 + return self._destinations + + @destinations.setter + def destinations(self, destinations): + """Set the destinations of this SubscriptionManifest. + + :param destinations: The destinations of this SubscriptionManifest. + :type: list[str] + """ # noqa: E501 + if destinations is None: + raise ValueError("Invalid value for `destinations`, must not be `None`") # noqa: E501 + self._destinations = destinations + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, SubscriptionManifest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/table_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/table_view_properties.py new file mode 100644 index 0000000..bd8ec0d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/table_view_properties.py @@ -0,0 +1,344 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class TableViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'queries': 'list[DashboardQuery]', + 'colors': 'list[DashboardColor]', + 'shape': 'str', + 'note': 'str', + 'show_note_when_empty': 'bool', + 'table_options': 'TableViewPropertiesTableOptions', + 'field_options': 'list[RenamableField]', + 'time_format': 'str', + 'decimal_places': 'DecimalPlaces' + } + + attribute_map = { + 'type': 'type', + 'queries': 'queries', + 'colors': 'colors', + 'shape': 'shape', + 'note': 'note', + 'show_note_when_empty': 'showNoteWhenEmpty', + 'table_options': 'tableOptions', + 'field_options': 'fieldOptions', + 'time_format': 'timeFormat', + 'decimal_places': 'decimalPlaces' + } + + def __init__(self, type=None, queries=None, colors=None, shape=None, note=None, show_note_when_empty=None, table_options=None, field_options=None, time_format=None, decimal_places=None): # noqa: E501,D401,D403 + """TableViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._type = None + self._queries = None + self._colors = None + self._shape = None + self._note = None + self._show_note_when_empty = None + self._table_options = None + self._field_options = None + self._time_format = None + self._decimal_places = None + self.discriminator = None + + self.type = type + self.queries = queries + self.colors = colors + self.shape = shape + self.note = note + self.show_note_when_empty = show_note_when_empty + self.table_options = table_options + self.field_options = field_options + self.time_format = time_format + self.decimal_places = decimal_places + + @property + def type(self): + """Get the type of this TableViewProperties. + + :return: The type of this TableViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this TableViewProperties. + + :param type: The type of this TableViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def queries(self): + """Get the queries of this TableViewProperties. + + :return: The queries of this TableViewProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this TableViewProperties. + + :param queries: The queries of this TableViewProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def colors(self): + """Get the colors of this TableViewProperties. + + Colors define color encoding of data into a visualization + + :return: The colors of this TableViewProperties. + :rtype: list[DashboardColor] + """ # noqa: E501 + return self._colors + + @colors.setter + def colors(self, colors): + """Set the colors of this TableViewProperties. + + Colors define color encoding of data into a visualization + + :param colors: The colors of this TableViewProperties. + :type: list[DashboardColor] + """ # noqa: E501 + if colors is None: + raise ValueError("Invalid value for `colors`, must not be `None`") # noqa: E501 + self._colors = colors + + @property + def shape(self): + """Get the shape of this TableViewProperties. + + :return: The shape of this TableViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this TableViewProperties. + + :param shape: The shape of this TableViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this TableViewProperties. + + :return: The note of this TableViewProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this TableViewProperties. + + :param note: The note of this TableViewProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + @property + def show_note_when_empty(self): + """Get the show_note_when_empty of this TableViewProperties. + + If true, will display note when empty + + :return: The show_note_when_empty of this TableViewProperties. + :rtype: bool + """ # noqa: E501 + return self._show_note_when_empty + + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty): + """Set the show_note_when_empty of this TableViewProperties. + + If true, will display note when empty + + :param show_note_when_empty: The show_note_when_empty of this TableViewProperties. + :type: bool + """ # noqa: E501 + if show_note_when_empty is None: + raise ValueError("Invalid value for `show_note_when_empty`, must not be `None`") # noqa: E501 + self._show_note_when_empty = show_note_when_empty + + @property + def table_options(self): + """Get the table_options of this TableViewProperties. + + :return: The table_options of this TableViewProperties. + :rtype: TableViewPropertiesTableOptions + """ # noqa: E501 + return self._table_options + + @table_options.setter + def table_options(self, table_options): + """Set the table_options of this TableViewProperties. + + :param table_options: The table_options of this TableViewProperties. + :type: TableViewPropertiesTableOptions + """ # noqa: E501 + if table_options is None: + raise ValueError("Invalid value for `table_options`, must not be `None`") # noqa: E501 + self._table_options = table_options + + @property + def field_options(self): + """Get the field_options of this TableViewProperties. + + fieldOptions represent the fields retrieved by the query with customization options + + :return: The field_options of this TableViewProperties. + :rtype: list[RenamableField] + """ # noqa: E501 + return self._field_options + + @field_options.setter + def field_options(self, field_options): + """Set the field_options of this TableViewProperties. + + fieldOptions represent the fields retrieved by the query with customization options + + :param field_options: The field_options of this TableViewProperties. + :type: list[RenamableField] + """ # noqa: E501 + if field_options is None: + raise ValueError("Invalid value for `field_options`, must not be `None`") # noqa: E501 + self._field_options = field_options + + @property + def time_format(self): + """Get the time_format of this TableViewProperties. + + timeFormat describes the display format for time values according to moment.js date formatting + + :return: The time_format of this TableViewProperties. + :rtype: str + """ # noqa: E501 + return self._time_format + + @time_format.setter + def time_format(self, time_format): + """Set the time_format of this TableViewProperties. + + timeFormat describes the display format for time values according to moment.js date formatting + + :param time_format: The time_format of this TableViewProperties. + :type: str + """ # noqa: E501 + if time_format is None: + raise ValueError("Invalid value for `time_format`, must not be `None`") # noqa: E501 + self._time_format = time_format + + @property + def decimal_places(self): + """Get the decimal_places of this TableViewProperties. + + :return: The decimal_places of this TableViewProperties. + :rtype: DecimalPlaces + """ # noqa: E501 + return self._decimal_places + + @decimal_places.setter + def decimal_places(self, decimal_places): + """Set the decimal_places of this TableViewProperties. + + :param decimal_places: The decimal_places of this TableViewProperties. + :type: DecimalPlaces + """ # noqa: E501 + if decimal_places is None: + raise ValueError("Invalid value for `decimal_places`, must not be `None`") # noqa: E501 + self._decimal_places = decimal_places + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TableViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/table_view_properties_table_options.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/table_view_properties_table_options.py new file mode 100644 index 0000000..71193ca --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/table_view_properties_table_options.py @@ -0,0 +1,188 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TableViewPropertiesTableOptions(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'vertical_time_axis': 'bool', + 'sort_by': 'RenamableField', + 'wrapping': 'str', + 'fix_first_column': 'bool' + } + + attribute_map = { + 'vertical_time_axis': 'verticalTimeAxis', + 'sort_by': 'sortBy', + 'wrapping': 'wrapping', + 'fix_first_column': 'fixFirstColumn' + } + + def __init__(self, vertical_time_axis=None, sort_by=None, wrapping=None, fix_first_column=None): # noqa: E501,D401,D403 + """TableViewPropertiesTableOptions - a model defined in OpenAPI.""" # noqa: E501 + self._vertical_time_axis = None + self._sort_by = None + self._wrapping = None + self._fix_first_column = None + self.discriminator = None + + if vertical_time_axis is not None: + self.vertical_time_axis = vertical_time_axis + if sort_by is not None: + self.sort_by = sort_by + if wrapping is not None: + self.wrapping = wrapping + if fix_first_column is not None: + self.fix_first_column = fix_first_column + + @property + def vertical_time_axis(self): + """Get the vertical_time_axis of this TableViewPropertiesTableOptions. + + verticalTimeAxis describes the orientation of the table by indicating whether the time axis will be displayed vertically + + :return: The vertical_time_axis of this TableViewPropertiesTableOptions. + :rtype: bool + """ # noqa: E501 + return self._vertical_time_axis + + @vertical_time_axis.setter + def vertical_time_axis(self, vertical_time_axis): + """Set the vertical_time_axis of this TableViewPropertiesTableOptions. + + verticalTimeAxis describes the orientation of the table by indicating whether the time axis will be displayed vertically + + :param vertical_time_axis: The vertical_time_axis of this TableViewPropertiesTableOptions. + :type: bool + """ # noqa: E501 + self._vertical_time_axis = vertical_time_axis + + @property + def sort_by(self): + """Get the sort_by of this TableViewPropertiesTableOptions. + + :return: The sort_by of this TableViewPropertiesTableOptions. + :rtype: RenamableField + """ # noqa: E501 + return self._sort_by + + @sort_by.setter + def sort_by(self, sort_by): + """Set the sort_by of this TableViewPropertiesTableOptions. + + :param sort_by: The sort_by of this TableViewPropertiesTableOptions. + :type: RenamableField + """ # noqa: E501 + self._sort_by = sort_by + + @property + def wrapping(self): + """Get the wrapping of this TableViewPropertiesTableOptions. + + Wrapping describes the text wrapping style to be used in table views + + :return: The wrapping of this TableViewPropertiesTableOptions. + :rtype: str + """ # noqa: E501 + return self._wrapping + + @wrapping.setter + def wrapping(self, wrapping): + """Set the wrapping of this TableViewPropertiesTableOptions. + + Wrapping describes the text wrapping style to be used in table views + + :param wrapping: The wrapping of this TableViewPropertiesTableOptions. + :type: str + """ # noqa: E501 + self._wrapping = wrapping + + @property + def fix_first_column(self): + """Get the fix_first_column of this TableViewPropertiesTableOptions. + + fixFirstColumn indicates whether the first column of the table should be locked + + :return: The fix_first_column of this TableViewPropertiesTableOptions. + :rtype: bool + """ # noqa: E501 + return self._fix_first_column + + @fix_first_column.setter + def fix_first_column(self, fix_first_column): + """Set the fix_first_column of this TableViewPropertiesTableOptions. + + fixFirstColumn indicates whether the first column of the table should be locked + + :param fix_first_column: The fix_first_column of this TableViewPropertiesTableOptions. + :type: bool + """ # noqa: E501 + self._fix_first_column = fix_first_column + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TableViewPropertiesTableOptions): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/tag_rule.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/tag_rule.py new file mode 100644 index 0000000..9683856 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/tag_rule.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TagRule(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'key': 'str', + 'value': 'str', + 'operator': 'str' + } + + attribute_map = { + 'key': 'key', + 'value': 'value', + 'operator': 'operator' + } + + def __init__(self, key=None, value=None, operator=None): # noqa: E501,D401,D403 + """TagRule - a model defined in OpenAPI.""" # noqa: E501 + self._key = None + self._value = None + self._operator = None + self.discriminator = None + + if key is not None: + self.key = key + if value is not None: + self.value = value + if operator is not None: + self.operator = operator + + @property + def key(self): + """Get the key of this TagRule. + + :return: The key of this TagRule. + :rtype: str + """ # noqa: E501 + return self._key + + @key.setter + def key(self, key): + """Set the key of this TagRule. + + :param key: The key of this TagRule. + :type: str + """ # noqa: E501 + self._key = key + + @property + def value(self): + """Get the value of this TagRule. + + :return: The value of this TagRule. + :rtype: str + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this TagRule. + + :param value: The value of this TagRule. + :type: str + """ # noqa: E501 + self._value = value + + @property + def operator(self): + """Get the operator of this TagRule. + + :return: The operator of this TagRule. + :rtype: str + """ # noqa: E501 + return self._operator + + @operator.setter + def operator(self, operator): + """Set the operator of this TagRule. + + :param operator: The operator of this TagRule. + :type: str + """ # noqa: E501 + self._operator = operator + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TagRule): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/task.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/task.py new file mode 100644 index 0000000..531babc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/task.py @@ -0,0 +1,569 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Task(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'org_id': 'str', + 'org': 'str', + 'name': 'str', + 'owner_id': 'str', + 'description': 'str', + 'status': 'TaskStatusType', + 'labels': 'list[Label]', + 'authorization_id': 'str', + 'flux': 'str', + 'every': 'str', + 'cron': 'str', + 'offset': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'links': 'TaskLinks' + } + + attribute_map = { + 'id': 'id', + 'org_id': 'orgID', + 'org': 'org', + 'name': 'name', + 'owner_id': 'ownerID', + 'description': 'description', + 'status': 'status', + 'labels': 'labels', + 'authorization_id': 'authorizationID', + 'flux': 'flux', + 'every': 'every', + 'cron': 'cron', + 'offset': 'offset', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'links': 'links' + } + + def __init__(self, id=None, org_id=None, org=None, name=None, owner_id=None, description=None, status=None, labels=None, authorization_id=None, flux=None, every=None, cron=None, offset=None, latest_completed=None, last_run_status=None, last_run_error=None, created_at=None, updated_at=None, links=None): # noqa: E501,D401,D403 + """Task - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._org_id = None + self._org = None + self._name = None + self._owner_id = None + self._description = None + self._status = None + self._labels = None + self._authorization_id = None + self._flux = None + self._every = None + self._cron = None + self._offset = None + self._latest_completed = None + self._last_run_status = None + self._last_run_error = None + self._created_at = None + self._updated_at = None + self._links = None + self.discriminator = None + + self.id = id + self.org_id = org_id + if org is not None: + self.org = org + self.name = name + if owner_id is not None: + self.owner_id = owner_id + if description is not None: + self.description = description + if status is not None: + self.status = status + if labels is not None: + self.labels = labels + if authorization_id is not None: + self.authorization_id = authorization_id + self.flux = flux + if every is not None: + self.every = every + if cron is not None: + self.cron = cron + if offset is not None: + self.offset = offset + if latest_completed is not None: + self.latest_completed = latest_completed + if last_run_status is not None: + self.last_run_status = last_run_status + if last_run_error is not None: + self.last_run_error = last_run_error + if created_at is not None: + self.created_at = created_at + if updated_at is not None: + self.updated_at = updated_at + if links is not None: + self.links = links + + @property + def id(self): + """Get the id of this Task. + + :return: The id of this Task. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Task. + + :param id: The id of this Task. + :type: str + """ # noqa: E501 + if id is None: + raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this Task. + + An [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) ID. Specifies the organization that owns the task. + + :return: The org_id of this Task. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this Task. + + An [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) ID. Specifies the organization that owns the task. + + :param org_id: The org_id of this Task. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def org(self): + """Get the org of this Task. + + An [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) name. Specifies the organization that owns the task. + + :return: The org of this Task. + :rtype: str + """ # noqa: E501 + return self._org + + @org.setter + def org(self, org): + """Set the org of this Task. + + An [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) name. Specifies the organization that owns the task. + + :param org: The org of this Task. + :type: str + """ # noqa: E501 + self._org = org + + @property + def name(self): + """Get the name of this Task. + + The name of the task. + + :return: The name of this Task. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this Task. + + The name of the task. + + :param name: The name of this Task. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def owner_id(self): + """Get the owner_id of this Task. + + A [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) ID. Specifies the owner of the task. To find a user ID, you can use the [`GET /api/v2/users` endpoint](#operation/GetUsers) to list users. + + :return: The owner_id of this Task. + :rtype: str + """ # noqa: E501 + return self._owner_id + + @owner_id.setter + def owner_id(self, owner_id): + """Set the owner_id of this Task. + + A [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) ID. Specifies the owner of the task. To find a user ID, you can use the [`GET /api/v2/users` endpoint](#operation/GetUsers) to list users. + + :param owner_id: The owner_id of this Task. + :type: str + """ # noqa: E501 + self._owner_id = owner_id + + @property + def description(self): + """Get the description of this Task. + + A description of the task. + + :return: The description of this Task. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this Task. + + A description of the task. + + :param description: The description of this Task. + :type: str + """ # noqa: E501 + self._description = description + + @property + def status(self): + """Get the status of this Task. + + :return: The status of this Task. + :rtype: TaskStatusType + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this Task. + + :param status: The status of this Task. + :type: TaskStatusType + """ # noqa: E501 + self._status = status + + @property + def labels(self): + """Get the labels of this Task. + + :return: The labels of this Task. + :rtype: list[Label] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this Task. + + :param labels: The labels of this Task. + :type: list[Label] + """ # noqa: E501 + self._labels = labels + + @property + def authorization_id(self): + """Get the authorization_id of this Task. + + An authorization ID. Specifies the authorization used when the task communicates with the query engine. To find an authorization ID, use the [`GET /api/v2/authorizations` endpoint](#operation/GetAuthorizations) to list authorizations. + + :return: The authorization_id of this Task. + :rtype: str + """ # noqa: E501 + return self._authorization_id + + @authorization_id.setter + def authorization_id(self, authorization_id): + """Set the authorization_id of this Task. + + An authorization ID. Specifies the authorization used when the task communicates with the query engine. To find an authorization ID, use the [`GET /api/v2/authorizations` endpoint](#operation/GetAuthorizations) to list authorizations. + + :param authorization_id: The authorization_id of this Task. + :type: str + """ # noqa: E501 + self._authorization_id = authorization_id + + @property + def flux(self): + """Get the flux of this Task. + + The Flux script that the task executes. + + :return: The flux of this Task. + :rtype: str + """ # noqa: E501 + return self._flux + + @flux.setter + def flux(self, flux): + """Set the flux of this Task. + + The Flux script that the task executes. + + :param flux: The flux of this Task. + :type: str + """ # noqa: E501 + if flux is None: + raise ValueError("Invalid value for `flux`, must not be `None`") # noqa: E501 + self._flux = flux + + @property + def every(self): + """Get the every of this Task. + + The interval ([duration literal](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)) at which the task runs. `every` also determines when the task first runs, depending on the specified time. + + :return: The every of this Task. + :rtype: str + """ # noqa: E501 + return self._every + + @every.setter + def every(self, every): + """Set the every of this Task. + + The interval ([duration literal](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)) at which the task runs. `every` also determines when the task first runs, depending on the specified time. + + :param every: The every of this Task. + :type: str + """ # noqa: E501 + self._every = every + + @property + def cron(self): + """Get the cron of this Task. + + A [Cron expression](https://en.wikipedia.org/wiki/Cron#Overview) that defines the schedule on which the task runs. InfluxDB uses the system time when evaluating Cron expressions. + + :return: The cron of this Task. + :rtype: str + """ # noqa: E501 + return self._cron + + @cron.setter + def cron(self, cron): + """Set the cron of this Task. + + A [Cron expression](https://en.wikipedia.org/wiki/Cron#Overview) that defines the schedule on which the task runs. InfluxDB uses the system time when evaluating Cron expressions. + + :param cron: The cron of this Task. + :type: str + """ # noqa: E501 + self._cron = cron + + @property + def offset(self): + """Get the offset of this Task. + + A [duration](https://docs.influxdata.com/flux/v0.x/spec/lexical-elements/#duration-literals) to delay execution of the task after the scheduled time has elapsed. `0` removes the offset. + + :return: The offset of this Task. + :rtype: str + """ # noqa: E501 + return self._offset + + @offset.setter + def offset(self, offset): + """Set the offset of this Task. + + A [duration](https://docs.influxdata.com/flux/v0.x/spec/lexical-elements/#duration-literals) to delay execution of the task after the scheduled time has elapsed. `0` removes the offset. + + :param offset: The offset of this Task. + :type: str + """ # noqa: E501 + self._offset = offset + + @property + def latest_completed(self): + """Get the latest_completed of this Task. + + A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)) of the latest scheduled and completed run. + + :return: The latest_completed of this Task. + :rtype: datetime + """ # noqa: E501 + return self._latest_completed + + @latest_completed.setter + def latest_completed(self, latest_completed): + """Set the latest_completed of this Task. + + A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)) of the latest scheduled and completed run. + + :param latest_completed: The latest_completed of this Task. + :type: datetime + """ # noqa: E501 + self._latest_completed = latest_completed + + @property + def last_run_status(self): + """Get the last_run_status of this Task. + + :return: The last_run_status of this Task. + :rtype: str + """ # noqa: E501 + return self._last_run_status + + @last_run_status.setter + def last_run_status(self, last_run_status): + """Set the last_run_status of this Task. + + :param last_run_status: The last_run_status of this Task. + :type: str + """ # noqa: E501 + self._last_run_status = last_run_status + + @property + def last_run_error(self): + """Get the last_run_error of this Task. + + :return: The last_run_error of this Task. + :rtype: str + """ # noqa: E501 + return self._last_run_error + + @last_run_error.setter + def last_run_error(self, last_run_error): + """Set the last_run_error of this Task. + + :param last_run_error: The last_run_error of this Task. + :type: str + """ # noqa: E501 + self._last_run_error = last_run_error + + @property + def created_at(self): + """Get the created_at of this Task. + + :return: The created_at of this Task. + :rtype: datetime + """ # noqa: E501 + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Set the created_at of this Task. + + :param created_at: The created_at of this Task. + :type: datetime + """ # noqa: E501 + self._created_at = created_at + + @property + def updated_at(self): + """Get the updated_at of this Task. + + :return: The updated_at of this Task. + :rtype: datetime + """ # noqa: E501 + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Set the updated_at of this Task. + + :param updated_at: The updated_at of this Task. + :type: datetime + """ # noqa: E501 + self._updated_at = updated_at + + @property + def links(self): + """Get the links of this Task. + + :return: The links of this Task. + :rtype: TaskLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Task. + + :param links: The links of this Task. + :type: TaskLinks + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Task): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_create_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_create_request.py new file mode 100644 index 0000000..462c008 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_create_request.py @@ -0,0 +1,216 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TaskCreateRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'org_id': 'str', + 'org': 'str', + 'status': 'TaskStatusType', + 'flux': 'str', + 'description': 'str' + } + + attribute_map = { + 'org_id': 'orgID', + 'org': 'org', + 'status': 'status', + 'flux': 'flux', + 'description': 'description' + } + + def __init__(self, org_id=None, org=None, status=None, flux=None, description=None): # noqa: E501,D401,D403 + """TaskCreateRequest - a model defined in OpenAPI.""" # noqa: E501 + self._org_id = None + self._org = None + self._status = None + self._flux = None + self._description = None + self.discriminator = None + + if org_id is not None: + self.org_id = org_id + if org is not None: + self.org = org + if status is not None: + self.status = status + self.flux = flux + if description is not None: + self.description = description + + @property + def org_id(self): + """Get the org_id of this TaskCreateRequest. + + The ID of the organization that owns this Task. + + :return: The org_id of this TaskCreateRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this TaskCreateRequest. + + The ID of the organization that owns this Task. + + :param org_id: The org_id of this TaskCreateRequest. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def org(self): + """Get the org of this TaskCreateRequest. + + The name of the organization that owns this Task. + + :return: The org of this TaskCreateRequest. + :rtype: str + """ # noqa: E501 + return self._org + + @org.setter + def org(self, org): + """Set the org of this TaskCreateRequest. + + The name of the organization that owns this Task. + + :param org: The org of this TaskCreateRequest. + :type: str + """ # noqa: E501 + self._org = org + + @property + def status(self): + """Get the status of this TaskCreateRequest. + + :return: The status of this TaskCreateRequest. + :rtype: TaskStatusType + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this TaskCreateRequest. + + :param status: The status of this TaskCreateRequest. + :type: TaskStatusType + """ # noqa: E501 + self._status = status + + @property + def flux(self): + """Get the flux of this TaskCreateRequest. + + The Flux script to run for this task. + + :return: The flux of this TaskCreateRequest. + :rtype: str + """ # noqa: E501 + return self._flux + + @flux.setter + def flux(self, flux): + """Set the flux of this TaskCreateRequest. + + The Flux script to run for this task. + + :param flux: The flux of this TaskCreateRequest. + :type: str + """ # noqa: E501 + if flux is None: + raise ValueError("Invalid value for `flux`, must not be `None`") # noqa: E501 + self._flux = flux + + @property + def description(self): + """Get the description of this TaskCreateRequest. + + An optional description of the task. + + :return: The description of this TaskCreateRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TaskCreateRequest. + + An optional description of the task. + + :param description: The description of this TaskCreateRequest. + :type: str + """ # noqa: E501 + self._description = description + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TaskCreateRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_links.py new file mode 100644 index 0000000..86465f0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_links.py @@ -0,0 +1,246 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TaskLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str', + 'owners': 'str', + 'members': 'str', + 'runs': 'str', + 'logs': 'str', + 'labels': 'str' + } + + attribute_map = { + '_self': 'self', + 'owners': 'owners', + 'members': 'members', + 'runs': 'runs', + 'logs': 'logs', + 'labels': 'labels' + } + + def __init__(self, _self=None, owners=None, members=None, runs=None, logs=None, labels=None): # noqa: E501,D401,D403 + """TaskLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self._owners = None + self._members = None + self._runs = None + self._logs = None + self._labels = None + self.discriminator = None + + if _self is not None: + self._self = _self + if owners is not None: + self.owners = owners + if members is not None: + self.members = members + if runs is not None: + self.runs = runs + if logs is not None: + self.logs = logs + if labels is not None: + self.labels = labels + + @property + def _self(self): + """Get the _self of this TaskLinks. + + URI of resource. + + :return: The _self of this TaskLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this TaskLinks. + + URI of resource. + + :param _self: The _self of this TaskLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + @property + def owners(self): + """Get the owners of this TaskLinks. + + URI of resource. + + :return: The owners of this TaskLinks. + :rtype: str + """ # noqa: E501 + return self._owners + + @owners.setter + def owners(self, owners): + """Set the owners of this TaskLinks. + + URI of resource. + + :param owners: The owners of this TaskLinks. + :type: str + """ # noqa: E501 + self._owners = owners + + @property + def members(self): + """Get the members of this TaskLinks. + + URI of resource. + + :return: The members of this TaskLinks. + :rtype: str + """ # noqa: E501 + return self._members + + @members.setter + def members(self, members): + """Set the members of this TaskLinks. + + URI of resource. + + :param members: The members of this TaskLinks. + :type: str + """ # noqa: E501 + self._members = members + + @property + def runs(self): + """Get the runs of this TaskLinks. + + URI of resource. + + :return: The runs of this TaskLinks. + :rtype: str + """ # noqa: E501 + return self._runs + + @runs.setter + def runs(self, runs): + """Set the runs of this TaskLinks. + + URI of resource. + + :param runs: The runs of this TaskLinks. + :type: str + """ # noqa: E501 + self._runs = runs + + @property + def logs(self): + """Get the logs of this TaskLinks. + + URI of resource. + + :return: The logs of this TaskLinks. + :rtype: str + """ # noqa: E501 + return self._logs + + @logs.setter + def logs(self, logs): + """Set the logs of this TaskLinks. + + URI of resource. + + :param logs: The logs of this TaskLinks. + :type: str + """ # noqa: E501 + self._logs = logs + + @property + def labels(self): + """Get the labels of this TaskLinks. + + URI of resource. + + :return: The labels of this TaskLinks. + :rtype: str + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this TaskLinks. + + URI of resource. + + :param labels: The labels of this TaskLinks. + :type: str + """ # noqa: E501 + self._labels = labels + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TaskLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_status_type.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_status_type.py new file mode 100644 index 0000000..c775c39 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_status_type.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TaskStatusType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + ACTIVE = "active" + INACTIVE = "inactive" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """TaskStatusType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TaskStatusType): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_update_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_update_request.py new file mode 100644 index 0000000..1755a43 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/task_update_request.py @@ -0,0 +1,269 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TaskUpdateRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'status': 'TaskStatusType', + 'flux': 'str', + 'name': 'str', + 'every': 'str', + 'cron': 'str', + 'offset': 'str', + 'description': 'str' + } + + attribute_map = { + 'status': 'status', + 'flux': 'flux', + 'name': 'name', + 'every': 'every', + 'cron': 'cron', + 'offset': 'offset', + 'description': 'description' + } + + def __init__(self, status=None, flux=None, name=None, every=None, cron=None, offset=None, description=None): # noqa: E501,D401,D403 + """TaskUpdateRequest - a model defined in OpenAPI.""" # noqa: E501 + self._status = None + self._flux = None + self._name = None + self._every = None + self._cron = None + self._offset = None + self._description = None + self.discriminator = None + + if status is not None: + self.status = status + if flux is not None: + self.flux = flux + if name is not None: + self.name = name + if every is not None: + self.every = every + if cron is not None: + self.cron = cron + if offset is not None: + self.offset = offset + if description is not None: + self.description = description + + @property + def status(self): + """Get the status of this TaskUpdateRequest. + + :return: The status of this TaskUpdateRequest. + :rtype: TaskStatusType + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this TaskUpdateRequest. + + :param status: The status of this TaskUpdateRequest. + :type: TaskStatusType + """ # noqa: E501 + self._status = status + + @property + def flux(self): + """Get the flux of this TaskUpdateRequest. + + The Flux script that the task runs. + + :return: The flux of this TaskUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._flux + + @flux.setter + def flux(self, flux): + """Set the flux of this TaskUpdateRequest. + + The Flux script that the task runs. + + :param flux: The flux of this TaskUpdateRequest. + :type: str + """ # noqa: E501 + self._flux = flux + + @property + def name(self): + """Get the name of this TaskUpdateRequest. + + Update the 'name' option in the flux script. + + :return: The name of this TaskUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TaskUpdateRequest. + + Update the 'name' option in the flux script. + + :param name: The name of this TaskUpdateRequest. + :type: str + """ # noqa: E501 + self._name = name + + @property + def every(self): + """Get the every of this TaskUpdateRequest. + + Update the 'every' option in the flux script. + + :return: The every of this TaskUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._every + + @every.setter + def every(self, every): + """Set the every of this TaskUpdateRequest. + + Update the 'every' option in the flux script. + + :param every: The every of this TaskUpdateRequest. + :type: str + """ # noqa: E501 + self._every = every + + @property + def cron(self): + """Get the cron of this TaskUpdateRequest. + + Update the 'cron' option in the flux script. + + :return: The cron of this TaskUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._cron + + @cron.setter + def cron(self, cron): + """Set the cron of this TaskUpdateRequest. + + Update the 'cron' option in the flux script. + + :param cron: The cron of this TaskUpdateRequest. + :type: str + """ # noqa: E501 + self._cron = cron + + @property + def offset(self): + """Get the offset of this TaskUpdateRequest. + + Update the 'offset' option in the flux script. + + :return: The offset of this TaskUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._offset + + @offset.setter + def offset(self, offset): + """Set the offset of this TaskUpdateRequest. + + Update the 'offset' option in the flux script. + + :param offset: The offset of this TaskUpdateRequest. + :type: str + """ # noqa: E501 + self._offset = offset + + @property + def description(self): + """Get the description of this TaskUpdateRequest. + + Update the description of the task. + + :return: The description of this TaskUpdateRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TaskUpdateRequest. + + Update the description of the task. + + :param description: The description of this TaskUpdateRequest. + :type: str + """ # noqa: E501 + self._description = description + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TaskUpdateRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/tasks.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/tasks.py new file mode 100644 index 0000000..4174b04 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/tasks.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Tasks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'Links', + 'tasks': 'list[Task]' + } + + attribute_map = { + 'links': 'links', + 'tasks': 'tasks' + } + + def __init__(self, links=None, tasks=None): # noqa: E501,D401,D403 + """Tasks - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._tasks = None + self.discriminator = None + + if links is not None: + self.links = links + if tasks is not None: + self.tasks = tasks + + @property + def links(self): + """Get the links of this Tasks. + + :return: The links of this Tasks. + :rtype: Links + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Tasks. + + :param links: The links of this Tasks. + :type: Links + """ # noqa: E501 + self._links = links + + @property + def tasks(self): + """Get the tasks of this Tasks. + + :return: The tasks of this Tasks. + :rtype: list[Task] + """ # noqa: E501 + return self._tasks + + @tasks.setter + def tasks(self, tasks): + """Set the tasks of this Tasks. + + :param tasks: The tasks of this Tasks. + :type: list[Task] + """ # noqa: E501 + self._tasks = tasks + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Tasks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf.py new file mode 100644 index 0000000..05e2a41 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf.py @@ -0,0 +1,167 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.telegraf_request import TelegrafRequest + + +class Telegraf(TelegrafRequest): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'links': 'object', + 'labels': 'list[Label]', + 'name': 'str', + 'description': 'str', + 'metadata': 'TelegrafRequestMetadata', + 'config': 'str', + 'org_id': 'str' + } + + attribute_map = { + 'id': 'id', + 'links': 'links', + 'labels': 'labels', + 'name': 'name', + 'description': 'description', + 'metadata': 'metadata', + 'config': 'config', + 'org_id': 'orgID' + } + + def __init__(self, id=None, links=None, labels=None, name=None, description=None, metadata=None, config=None, org_id=None): # noqa: E501,D401,D403 + """Telegraf - a model defined in OpenAPI.""" # noqa: E501 + TelegrafRequest.__init__(self, name=name, description=description, metadata=metadata, config=config, org_id=org_id) # noqa: E501 + + self._id = None + self._links = None + self._labels = None + self.discriminator = None + + if id is not None: + self.id = id + if links is not None: + self.links = links + if labels is not None: + self.labels = labels + + @property + def id(self): + """Get the id of this Telegraf. + + :return: The id of this Telegraf. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Telegraf. + + :param id: The id of this Telegraf. + :type: str + """ # noqa: E501 + self._id = id + + @property + def links(self): + """Get the links of this Telegraf. + + :return: The links of this Telegraf. + :rtype: object + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Telegraf. + + :param links: The links of this Telegraf. + :type: object + """ # noqa: E501 + self._links = links + + @property + def labels(self): + """Get the labels of this Telegraf. + + :return: The labels of this Telegraf. + :rtype: list[Label] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this Telegraf. + + :param labels: The labels of this Telegraf. + :type: list[Label] + """ # noqa: E501 + self._labels = labels + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Telegraf): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugin.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugin.py new file mode 100644 index 0000000..a01c2ef --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugin.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TelegrafPlugin(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'name': 'str', + 'description': 'str', + 'config': 'dict(str, object)' + } + + attribute_map = { + 'type': 'type', + 'name': 'name', + 'description': 'description', + 'config': 'config' + } + + def __init__(self, type=None, name=None, description=None, config=None): # noqa: E501,D401,D403 + """TelegrafPlugin - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self._name = None + self._description = None + self._config = None + self.discriminator = None + + if type is not None: + self.type = type + if name is not None: + self.name = name + if description is not None: + self.description = description + if config is not None: + self.config = config + + @property + def type(self): + """Get the type of this TelegrafPlugin. + + :return: The type of this TelegrafPlugin. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this TelegrafPlugin. + + :param type: The type of this TelegrafPlugin. + :type: str + """ # noqa: E501 + self._type = type + + @property + def name(self): + """Get the name of this TelegrafPlugin. + + :return: The name of this TelegrafPlugin. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TelegrafPlugin. + + :param name: The name of this TelegrafPlugin. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this TelegrafPlugin. + + :return: The description of this TelegrafPlugin. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TelegrafPlugin. + + :param description: The description of this TelegrafPlugin. + :type: str + """ # noqa: E501 + self._description = description + + @property + def config(self): + """Get the config of this TelegrafPlugin. + + :return: The config of this TelegrafPlugin. + :rtype: dict(str, object) + """ # noqa: E501 + return self._config + + @config.setter + def config(self, config): + """Set the config of this TelegrafPlugin. + + :param config: The config of this TelegrafPlugin. + :type: dict(str, object) + """ # noqa: E501 + self._config = config + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TelegrafPlugin): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugin_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugin_request.py new file mode 100644 index 0000000..79287c3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugin_request.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TelegrafPluginRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'plugins': 'list[TelegrafPluginRequestPlugins]', + 'metadata': 'TelegrafRequestMetadata', + 'config': 'str', + 'org_id': 'str' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'plugins': 'plugins', + 'metadata': 'metadata', + 'config': 'config', + 'org_id': 'orgID' + } + + def __init__(self, name=None, description=None, plugins=None, metadata=None, config=None, org_id=None): # noqa: E501,D401,D403 + """TelegrafPluginRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._plugins = None + self._metadata = None + self._config = None + self._org_id = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if plugins is not None: + self.plugins = plugins + if metadata is not None: + self.metadata = metadata + if config is not None: + self.config = config + if org_id is not None: + self.org_id = org_id + + @property + def name(self): + """Get the name of this TelegrafPluginRequest. + + :return: The name of this TelegrafPluginRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TelegrafPluginRequest. + + :param name: The name of this TelegrafPluginRequest. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this TelegrafPluginRequest. + + :return: The description of this TelegrafPluginRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TelegrafPluginRequest. + + :param description: The description of this TelegrafPluginRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def plugins(self): + """Get the plugins of this TelegrafPluginRequest. + + :return: The plugins of this TelegrafPluginRequest. + :rtype: list[TelegrafPluginRequestPlugins] + """ # noqa: E501 + return self._plugins + + @plugins.setter + def plugins(self, plugins): + """Set the plugins of this TelegrafPluginRequest. + + :param plugins: The plugins of this TelegrafPluginRequest. + :type: list[TelegrafPluginRequestPlugins] + """ # noqa: E501 + self._plugins = plugins + + @property + def metadata(self): + """Get the metadata of this TelegrafPluginRequest. + + :return: The metadata of this TelegrafPluginRequest. + :rtype: TelegrafRequestMetadata + """ # noqa: E501 + return self._metadata + + @metadata.setter + def metadata(self, metadata): + """Set the metadata of this TelegrafPluginRequest. + + :param metadata: The metadata of this TelegrafPluginRequest. + :type: TelegrafRequestMetadata + """ # noqa: E501 + self._metadata = metadata + + @property + def config(self): + """Get the config of this TelegrafPluginRequest. + + :return: The config of this TelegrafPluginRequest. + :rtype: str + """ # noqa: E501 + return self._config + + @config.setter + def config(self, config): + """Set the config of this TelegrafPluginRequest. + + :param config: The config of this TelegrafPluginRequest. + :type: str + """ # noqa: E501 + self._config = config + + @property + def org_id(self): + """Get the org_id of this TelegrafPluginRequest. + + :return: The org_id of this TelegrafPluginRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this TelegrafPluginRequest. + + :param org_id: The org_id of this TelegrafPluginRequest. + :type: str + """ # noqa: E501 + self._org_id = org_id + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TelegrafPluginRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugin_request_plugins.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugin_request_plugins.py new file mode 100644 index 0000000..1ccf0db --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugin_request_plugins.py @@ -0,0 +1,199 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TelegrafPluginRequestPlugins(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'name': 'str', + 'alias': 'str', + 'description': 'str', + 'config': 'dict(str, object)' + } + + attribute_map = { + 'type': 'type', + 'name': 'name', + 'alias': 'alias', + 'description': 'description', + 'config': 'config' + } + + def __init__(self, type=None, name=None, alias=None, description=None, config=None): # noqa: E501,D401,D403 + """TelegrafPluginRequestPlugins - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self._name = None + self._alias = None + self._description = None + self._config = None + self.discriminator = None + + if type is not None: + self.type = type + if name is not None: + self.name = name + if alias is not None: + self.alias = alias + if description is not None: + self.description = description + if config is not None: + self.config = config + + @property + def type(self): + """Get the type of this TelegrafPluginRequestPlugins. + + :return: The type of this TelegrafPluginRequestPlugins. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this TelegrafPluginRequestPlugins. + + :param type: The type of this TelegrafPluginRequestPlugins. + :type: str + """ # noqa: E501 + self._type = type + + @property + def name(self): + """Get the name of this TelegrafPluginRequestPlugins. + + :return: The name of this TelegrafPluginRequestPlugins. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TelegrafPluginRequestPlugins. + + :param name: The name of this TelegrafPluginRequestPlugins. + :type: str + """ # noqa: E501 + self._name = name + + @property + def alias(self): + """Get the alias of this TelegrafPluginRequestPlugins. + + :return: The alias of this TelegrafPluginRequestPlugins. + :rtype: str + """ # noqa: E501 + return self._alias + + @alias.setter + def alias(self, alias): + """Set the alias of this TelegrafPluginRequestPlugins. + + :param alias: The alias of this TelegrafPluginRequestPlugins. + :type: str + """ # noqa: E501 + self._alias = alias + + @property + def description(self): + """Get the description of this TelegrafPluginRequestPlugins. + + :return: The description of this TelegrafPluginRequestPlugins. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TelegrafPluginRequestPlugins. + + :param description: The description of this TelegrafPluginRequestPlugins. + :type: str + """ # noqa: E501 + self._description = description + + @property + def config(self): + """Get the config of this TelegrafPluginRequestPlugins. + + :return: The config of this TelegrafPluginRequestPlugins. + :rtype: dict(str, object) + """ # noqa: E501 + return self._config + + @config.setter + def config(self, config): + """Set the config of this TelegrafPluginRequestPlugins. + + :param config: The config of this TelegrafPluginRequestPlugins. + :type: dict(str, object) + """ # noqa: E501 + self._config = config + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TelegrafPluginRequestPlugins): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugins.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugins.py new file mode 100644 index 0000000..80a1842 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_plugins.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TelegrafPlugins(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'version': 'str', + 'os': 'str', + 'plugins': 'list[TelegrafPlugin]' + } + + attribute_map = { + 'version': 'version', + 'os': 'os', + 'plugins': 'plugins' + } + + def __init__(self, version=None, os=None, plugins=None): # noqa: E501,D401,D403 + """TelegrafPlugins - a model defined in OpenAPI.""" # noqa: E501 + self._version = None + self._os = None + self._plugins = None + self.discriminator = None + + if version is not None: + self.version = version + if os is not None: + self.os = os + if plugins is not None: + self.plugins = plugins + + @property + def version(self): + """Get the version of this TelegrafPlugins. + + :return: The version of this TelegrafPlugins. + :rtype: str + """ # noqa: E501 + return self._version + + @version.setter + def version(self, version): + """Set the version of this TelegrafPlugins. + + :param version: The version of this TelegrafPlugins. + :type: str + """ # noqa: E501 + self._version = version + + @property + def os(self): + """Get the os of this TelegrafPlugins. + + :return: The os of this TelegrafPlugins. + :rtype: str + """ # noqa: E501 + return self._os + + @os.setter + def os(self, os): + """Set the os of this TelegrafPlugins. + + :param os: The os of this TelegrafPlugins. + :type: str + """ # noqa: E501 + self._os = os + + @property + def plugins(self): + """Get the plugins of this TelegrafPlugins. + + :return: The plugins of this TelegrafPlugins. + :rtype: list[TelegrafPlugin] + """ # noqa: E501 + return self._plugins + + @plugins.setter + def plugins(self, plugins): + """Set the plugins of this TelegrafPlugins. + + :param plugins: The plugins of this TelegrafPlugins. + :type: list[TelegrafPlugin] + """ # noqa: E501 + self._plugins = plugins + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TelegrafPlugins): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_request.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_request.py new file mode 100644 index 0000000..898deb8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_request.py @@ -0,0 +1,199 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TelegrafRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'metadata': 'TelegrafRequestMetadata', + 'config': 'str', + 'org_id': 'str' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'metadata': 'metadata', + 'config': 'config', + 'org_id': 'orgID' + } + + def __init__(self, name=None, description=None, metadata=None, config=None, org_id=None): # noqa: E501,D401,D403 + """TelegrafRequest - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._metadata = None + self._config = None + self._org_id = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if metadata is not None: + self.metadata = metadata + if config is not None: + self.config = config + if org_id is not None: + self.org_id = org_id + + @property + def name(self): + """Get the name of this TelegrafRequest. + + :return: The name of this TelegrafRequest. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TelegrafRequest. + + :param name: The name of this TelegrafRequest. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this TelegrafRequest. + + :return: The description of this TelegrafRequest. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TelegrafRequest. + + :param description: The description of this TelegrafRequest. + :type: str + """ # noqa: E501 + self._description = description + + @property + def metadata(self): + """Get the metadata of this TelegrafRequest. + + :return: The metadata of this TelegrafRequest. + :rtype: TelegrafRequestMetadata + """ # noqa: E501 + return self._metadata + + @metadata.setter + def metadata(self, metadata): + """Set the metadata of this TelegrafRequest. + + :param metadata: The metadata of this TelegrafRequest. + :type: TelegrafRequestMetadata + """ # noqa: E501 + self._metadata = metadata + + @property + def config(self): + """Get the config of this TelegrafRequest. + + :return: The config of this TelegrafRequest. + :rtype: str + """ # noqa: E501 + return self._config + + @config.setter + def config(self, config): + """Set the config of this TelegrafRequest. + + :param config: The config of this TelegrafRequest. + :type: str + """ # noqa: E501 + self._config = config + + @property + def org_id(self): + """Get the org_id of this TelegrafRequest. + + :return: The org_id of this TelegrafRequest. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this TelegrafRequest. + + :param org_id: The org_id of this TelegrafRequest. + :type: str + """ # noqa: E501 + self._org_id = org_id + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TelegrafRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_request_metadata.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_request_metadata.py new file mode 100644 index 0000000..e7a9f81 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegraf_request_metadata.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TelegrafRequestMetadata(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'buckets': 'list[str]' + } + + attribute_map = { + 'buckets': 'buckets' + } + + def __init__(self, buckets=None): # noqa: E501,D401,D403 + """TelegrafRequestMetadata - a model defined in OpenAPI.""" # noqa: E501 + self._buckets = None + self.discriminator = None + + if buckets is not None: + self.buckets = buckets + + @property + def buckets(self): + """Get the buckets of this TelegrafRequestMetadata. + + :return: The buckets of this TelegrafRequestMetadata. + :rtype: list[str] + """ # noqa: E501 + return self._buckets + + @buckets.setter + def buckets(self, buckets): + """Set the buckets of this TelegrafRequestMetadata. + + :param buckets: The buckets of this TelegrafRequestMetadata. + :type: list[str] + """ # noqa: E501 + self._buckets = buckets + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TelegrafRequestMetadata): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegrafs.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegrafs.py new file mode 100644 index 0000000..17d7760 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegrafs.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Telegrafs(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'configurations': 'list[Telegraf]' + } + + attribute_map = { + 'configurations': 'configurations' + } + + def __init__(self, configurations=None): # noqa: E501,D401,D403 + """Telegrafs - a model defined in OpenAPI.""" # noqa: E501 + self._configurations = None + self.discriminator = None + + if configurations is not None: + self.configurations = configurations + + @property + def configurations(self): + """Get the configurations of this Telegrafs. + + :return: The configurations of this Telegrafs. + :rtype: list[Telegraf] + """ # noqa: E501 + return self._configurations + + @configurations.setter + def configurations(self, configurations): + """Set the configurations of this Telegrafs. + + :param configurations: The configurations of this Telegrafs. + :type: list[Telegraf] + """ # noqa: E501 + self._configurations = configurations + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Telegrafs): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegram_notification_endpoint.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegram_notification_endpoint.py new file mode 100644 index 0000000..44c22df --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegram_notification_endpoint.py @@ -0,0 +1,166 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator + + +class TelegramNotificationEndpoint(NotificationEndpointDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'token': 'str', + 'channel': 'str', + 'id': 'str', + 'org_id': 'str', + 'user_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'description': 'str', + 'name': 'str', + 'status': 'str', + 'labels': 'list[Label]', + 'links': 'NotificationEndpointBaseLinks', + 'type': 'NotificationEndpointType' + } + + attribute_map = { + 'token': 'token', + 'channel': 'channel', + 'id': 'id', + 'org_id': 'orgID', + 'user_id': 'userID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'description': 'description', + 'name': 'name', + 'status': 'status', + 'labels': 'labels', + 'links': 'links', + 'type': 'type' + } + + def __init__(self, token=None, channel=None, id=None, org_id=None, user_id=None, created_at=None, updated_at=None, description=None, name=None, status='active', labels=None, links=None, type="telegram"): # noqa: E501,D401,D403 + """TelegramNotificationEndpoint - a model defined in OpenAPI.""" # noqa: E501 + NotificationEndpointDiscriminator.__init__(self, id=id, org_id=org_id, user_id=user_id, created_at=created_at, updated_at=updated_at, description=description, name=name, status=status, labels=labels, links=links, type=type) # noqa: E501 + + self._token = None + self._channel = None + self.discriminator = None + + self.token = token + self.channel = channel + + @property + def token(self): + """Get the token of this TelegramNotificationEndpoint. + + Specifies the Telegram bot token. See https://core.telegram.org/bots#creating-a-new-bot . + + :return: The token of this TelegramNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._token + + @token.setter + def token(self, token): + """Set the token of this TelegramNotificationEndpoint. + + Specifies the Telegram bot token. See https://core.telegram.org/bots#creating-a-new-bot . + + :param token: The token of this TelegramNotificationEndpoint. + :type: str + """ # noqa: E501 + if token is None: + raise ValueError("Invalid value for `token`, must not be `None`") # noqa: E501 + self._token = token + + @property + def channel(self): + """Get the channel of this TelegramNotificationEndpoint. + + The ID of the telegram channel; a chat_id in https://core.telegram.org/bots/api#sendmessage . + + :return: The channel of this TelegramNotificationEndpoint. + :rtype: str + """ # noqa: E501 + return self._channel + + @channel.setter + def channel(self, channel): + """Set the channel of this TelegramNotificationEndpoint. + + The ID of the telegram channel; a chat_id in https://core.telegram.org/bots/api#sendmessage . + + :param channel: The channel of this TelegramNotificationEndpoint. + :type: str + """ # noqa: E501 + if channel is None: + raise ValueError("Invalid value for `channel`, must not be `None`") # noqa: E501 + self._channel = channel + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TelegramNotificationEndpoint): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegram_notification_rule.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegram_notification_rule.py new file mode 100644 index 0000000..2238909 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegram_notification_rule.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.telegram_notification_rule_base import TelegramNotificationRuleBase + + +class TelegramNotificationRule(TelegramNotificationRuleBase): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'message_template': 'str', + 'parse_mode': 'str', + 'disable_web_page_preview': 'bool', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'message_template': 'messageTemplate', + 'parse_mode': 'parseMode', + 'disable_web_page_preview': 'disableWebPagePreview', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type="telegram", message_template=None, parse_mode=None, disable_web_page_preview=None, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """TelegramNotificationRule - a model defined in OpenAPI.""" # noqa: E501 + TelegramNotificationRuleBase.__init__(self, type=type, message_template=message_template, parse_mode=parse_mode, disable_web_page_preview=disable_web_page_preview, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, id=id, endpoint_id=endpoint_id, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, status=status, name=name, sleep_until=sleep_until, every=every, offset=offset, runbook_link=runbook_link, limit_every=limit_every, limit=limit, tag_rules=tag_rules, description=description, status_rules=status_rules, labels=labels, links=links) # noqa: E501 + self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TelegramNotificationRule): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegram_notification_rule_base.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegram_notification_rule_base.py new file mode 100644 index 0000000..8d3aeb1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/telegram_notification_rule_base.py @@ -0,0 +1,244 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator + + +class TelegramNotificationRuleBase(NotificationRuleDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'message_template': 'str', + 'parse_mode': 'str', + 'disable_web_page_preview': 'bool', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'id': 'str', + 'endpoint_id': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'status': 'TaskStatusType', + 'name': 'str', + 'sleep_until': 'str', + 'every': 'str', + 'offset': 'str', + 'runbook_link': 'str', + 'limit_every': 'int', + 'limit': 'int', + 'tag_rules': 'list[TagRule]', + 'description': 'str', + 'status_rules': 'list[StatusRule]', + 'labels': 'list[Label]', + 'links': 'NotificationRuleBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'message_template': 'messageTemplate', + 'parse_mode': 'parseMode', + 'disable_web_page_preview': 'disableWebPagePreview', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'id': 'id', + 'endpoint_id': 'endpointID', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'status': 'status', + 'name': 'name', + 'sleep_until': 'sleepUntil', + 'every': 'every', + 'offset': 'offset', + 'runbook_link': 'runbookLink', + 'limit_every': 'limitEvery', + 'limit': 'limit', + 'tag_rules': 'tagRules', + 'description': 'description', + 'status_rules': 'statusRules', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type=None, message_template=None, parse_mode=None, disable_web_page_preview=None, latest_completed=None, last_run_status=None, last_run_error=None, id=None, endpoint_id=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, status=None, name=None, sleep_until=None, every=None, offset=None, runbook_link=None, limit_every=None, limit=None, tag_rules=None, description=None, status_rules=None, labels=None, links=None): # noqa: E501,D401,D403 + """TelegramNotificationRuleBase - a model defined in OpenAPI.""" # noqa: E501 + NotificationRuleDiscriminator.__init__(self, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, id=id, endpoint_id=endpoint_id, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, status=status, name=name, sleep_until=sleep_until, every=every, offset=offset, runbook_link=runbook_link, limit_every=limit_every, limit=limit, tag_rules=tag_rules, description=description, status_rules=status_rules, labels=labels, links=links) # noqa: E501 + + self._type = None + self._message_template = None + self._parse_mode = None + self._disable_web_page_preview = None + self.discriminator = None + + self.type = type + self.message_template = message_template + if parse_mode is not None: + self.parse_mode = parse_mode + if disable_web_page_preview is not None: + self.disable_web_page_preview = disable_web_page_preview + + @property + def type(self): + """Get the type of this TelegramNotificationRuleBase. + + The discriminator between other types of notification rules is "telegram". + + :return: The type of this TelegramNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this TelegramNotificationRuleBase. + + The discriminator between other types of notification rules is "telegram". + + :param type: The type of this TelegramNotificationRuleBase. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def message_template(self): + """Get the message_template of this TelegramNotificationRuleBase. + + The message template as a flux interpolated string. + + :return: The message_template of this TelegramNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._message_template + + @message_template.setter + def message_template(self, message_template): + """Set the message_template of this TelegramNotificationRuleBase. + + The message template as a flux interpolated string. + + :param message_template: The message_template of this TelegramNotificationRuleBase. + :type: str + """ # noqa: E501 + if message_template is None: + raise ValueError("Invalid value for `message_template`, must not be `None`") # noqa: E501 + self._message_template = message_template + + @property + def parse_mode(self): + """Get the parse_mode of this TelegramNotificationRuleBase. + + Parse mode of the message text per https://core.telegram.org/bots/api#formatting-options. Defaults to "MarkdownV2". + + :return: The parse_mode of this TelegramNotificationRuleBase. + :rtype: str + """ # noqa: E501 + return self._parse_mode + + @parse_mode.setter + def parse_mode(self, parse_mode): + """Set the parse_mode of this TelegramNotificationRuleBase. + + Parse mode of the message text per https://core.telegram.org/bots/api#formatting-options. Defaults to "MarkdownV2". + + :param parse_mode: The parse_mode of this TelegramNotificationRuleBase. + :type: str + """ # noqa: E501 + self._parse_mode = parse_mode + + @property + def disable_web_page_preview(self): + """Get the disable_web_page_preview of this TelegramNotificationRuleBase. + + Disables preview of web links in the sent messages when "true". Defaults to "false". + + :return: The disable_web_page_preview of this TelegramNotificationRuleBase. + :rtype: bool + """ # noqa: E501 + return self._disable_web_page_preview + + @disable_web_page_preview.setter + def disable_web_page_preview(self, disable_web_page_preview): + """Set the disable_web_page_preview of this TelegramNotificationRuleBase. + + Disables preview of web links in the sent messages when "true". Defaults to "false". + + :param disable_web_page_preview: The disable_web_page_preview of this TelegramNotificationRuleBase. + :type: bool + """ # noqa: E501 + self._disable_web_page_preview = disable_web_page_preview + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TelegramNotificationRuleBase): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_apply.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_apply.py new file mode 100644 index 0000000..6f8d5e4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_apply.py @@ -0,0 +1,323 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateApply(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'dry_run': 'bool', + 'org_id': 'str', + 'stack_id': 'str', + 'template': 'TemplateApplyTemplate', + 'templates': 'list[TemplateApplyTemplate]', + 'env_refs': 'dict(str, object)', + 'secrets': 'dict(str, str)', + 'remotes': 'list[TemplateApplyRemotes]', + 'actions': 'list[object]' + } + + attribute_map = { + 'dry_run': 'dryRun', + 'org_id': 'orgID', + 'stack_id': 'stackID', + 'template': 'template', + 'templates': 'templates', + 'env_refs': 'envRefs', + 'secrets': 'secrets', + 'remotes': 'remotes', + 'actions': 'actions' + } + + def __init__(self, dry_run=None, org_id=None, stack_id=None, template=None, templates=None, env_refs=None, secrets=None, remotes=None, actions=None): # noqa: E501,D401,D403 + """TemplateApply - a model defined in OpenAPI.""" # noqa: E501 + self._dry_run = None + self._org_id = None + self._stack_id = None + self._template = None + self._templates = None + self._env_refs = None + self._secrets = None + self._remotes = None + self._actions = None + self.discriminator = None + + if dry_run is not None: + self.dry_run = dry_run + if org_id is not None: + self.org_id = org_id + if stack_id is not None: + self.stack_id = stack_id + if template is not None: + self.template = template + if templates is not None: + self.templates = templates + if env_refs is not None: + self.env_refs = env_refs + if secrets is not None: + self.secrets = secrets + if remotes is not None: + self.remotes = remotes + if actions is not None: + self.actions = actions + + @property + def dry_run(self): + """Get the dry_run of this TemplateApply. + + Only applies a dry run of the templates passed in the request. - Validates the template and generates a resource diff and summary. - Doesn't install templates or make changes to the InfluxDB instance. + + :return: The dry_run of this TemplateApply. + :rtype: bool + """ # noqa: E501 + return self._dry_run + + @dry_run.setter + def dry_run(self, dry_run): + """Set the dry_run of this TemplateApply. + + Only applies a dry run of the templates passed in the request. - Validates the template and generates a resource diff and summary. - Doesn't install templates or make changes to the InfluxDB instance. + + :param dry_run: The dry_run of this TemplateApply. + :type: bool + """ # noqa: E501 + self._dry_run = dry_run + + @property + def org_id(self): + """Get the org_id of this TemplateApply. + + Organization ID. InfluxDB applies templates to this organization. The organization owns all resources created by the template. To find your organization, see how to [view organizations](https://docs.influxdata.com/influxdb/latest/organizations/view-orgs/). + + :return: The org_id of this TemplateApply. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this TemplateApply. + + Organization ID. InfluxDB applies templates to this organization. The organization owns all resources created by the template. To find your organization, see how to [view organizations](https://docs.influxdata.com/influxdb/latest/organizations/view-orgs/). + + :param org_id: The org_id of this TemplateApply. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def stack_id(self): + """Get the stack_id of this TemplateApply. + + ID of the stack to update. To apply templates to an existing stack in the organization, use the `stackID` parameter. If you apply templates without providing a stack ID, InfluxDB initializes a new stack with all new resources. To find a stack ID, use the InfluxDB [`/api/v2/stacks` API endpoint](#operation/ListStacks) to list stacks. #### Related guides - [Stacks](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/) - [View stacks](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/view/) + + :return: The stack_id of this TemplateApply. + :rtype: str + """ # noqa: E501 + return self._stack_id + + @stack_id.setter + def stack_id(self, stack_id): + """Set the stack_id of this TemplateApply. + + ID of the stack to update. To apply templates to an existing stack in the organization, use the `stackID` parameter. If you apply templates without providing a stack ID, InfluxDB initializes a new stack with all new resources. To find a stack ID, use the InfluxDB [`/api/v2/stacks` API endpoint](#operation/ListStacks) to list stacks. #### Related guides - [Stacks](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/) - [View stacks](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/view/) + + :param stack_id: The stack_id of this TemplateApply. + :type: str + """ # noqa: E501 + self._stack_id = stack_id + + @property + def template(self): + """Get the template of this TemplateApply. + + :return: The template of this TemplateApply. + :rtype: TemplateApplyTemplate + """ # noqa: E501 + return self._template + + @template.setter + def template(self, template): + """Set the template of this TemplateApply. + + :param template: The template of this TemplateApply. + :type: TemplateApplyTemplate + """ # noqa: E501 + self._template = template + + @property + def templates(self): + """Get the templates of this TemplateApply. + + A list of template objects to apply. A template object has a `contents` property with an array of InfluxDB resource configurations. Use the `templates` parameter to apply multiple template objects. If you use `templates`, you can't use the `template` parameter. + + :return: The templates of this TemplateApply. + :rtype: list[TemplateApplyTemplate] + """ # noqa: E501 + return self._templates + + @templates.setter + def templates(self, templates): + """Set the templates of this TemplateApply. + + A list of template objects to apply. A template object has a `contents` property with an array of InfluxDB resource configurations. Use the `templates` parameter to apply multiple template objects. If you use `templates`, you can't use the `template` parameter. + + :param templates: The templates of this TemplateApply. + :type: list[TemplateApplyTemplate] + """ # noqa: E501 + self._templates = templates + + @property + def env_refs(self): + """Get the env_refs of this TemplateApply. + + An object with key-value pairs that map to **environment references** in templates. Environment references in templates are `envRef` objects with an `envRef.key` property. To substitute a custom environment reference value when applying templates, pass `envRefs` with the `envRef.key` and the value. When you apply a template, InfluxDB replaces `envRef` objects in the template with the values that you provide in the `envRefs` parameter. For more examples, see how to [define environment references](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#define-environment-references). The following template fields may use environment references: - `metadata.name` - `spec.endpointName` - `spec.associations.name` For more information about including environment references in template fields, see how to [include user-definable resource names](https://docs.influxdata.com/influxdb/latest/influxdb-templates/create/#include-user-definable-resource-names). + + :return: The env_refs of this TemplateApply. + :rtype: dict(str, object) + """ # noqa: E501 + return self._env_refs + + @env_refs.setter + def env_refs(self, env_refs): + """Set the env_refs of this TemplateApply. + + An object with key-value pairs that map to **environment references** in templates. Environment references in templates are `envRef` objects with an `envRef.key` property. To substitute a custom environment reference value when applying templates, pass `envRefs` with the `envRef.key` and the value. When you apply a template, InfluxDB replaces `envRef` objects in the template with the values that you provide in the `envRefs` parameter. For more examples, see how to [define environment references](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#define-environment-references). The following template fields may use environment references: - `metadata.name` - `spec.endpointName` - `spec.associations.name` For more information about including environment references in template fields, see how to [include user-definable resource names](https://docs.influxdata.com/influxdb/latest/influxdb-templates/create/#include-user-definable-resource-names). + + :param env_refs: The env_refs of this TemplateApply. + :type: dict(str, object) + """ # noqa: E501 + self._env_refs = env_refs + + @property + def secrets(self): + """Get the secrets of this TemplateApply. + + An object with key-value pairs that map to **secrets** in queries. Queries may reference secrets stored in InfluxDB--for example, the following Flux script retrieves `POSTGRES_USERNAME` and `POSTGRES_PASSWORD` secrets and then uses them to connect to a PostgreSQL database: ```js import "sql" import "influxdata/influxdb/secrets" username = secrets.get(key: "POSTGRES_USERNAME") password = secrets.get(key: "POSTGRES_PASSWORD") sql.from( driverName: "postgres", dataSourceName: "postgresql://${username}:${password}@localhost:5432", query: "SELECT * FROM example_table", ) ``` To define secret values in your `/api/v2/templates/apply` request, pass the `secrets` parameter with key-value pairs--for example: ```json { ... "secrets": { "POSTGRES_USERNAME": "pguser", "POSTGRES_PASSWORD": "foo" } ... } ``` InfluxDB stores the key-value pairs as secrets that you can access with `secrets.get()`. Once stored, you can't view secret values in InfluxDB. #### Related guides - [How to pass secrets when installing a template](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#pass-secrets-when-installing-a-template) + + :return: The secrets of this TemplateApply. + :rtype: dict(str, str) + """ # noqa: E501 + return self._secrets + + @secrets.setter + def secrets(self, secrets): + """Set the secrets of this TemplateApply. + + An object with key-value pairs that map to **secrets** in queries. Queries may reference secrets stored in InfluxDB--for example, the following Flux script retrieves `POSTGRES_USERNAME` and `POSTGRES_PASSWORD` secrets and then uses them to connect to a PostgreSQL database: ```js import "sql" import "influxdata/influxdb/secrets" username = secrets.get(key: "POSTGRES_USERNAME") password = secrets.get(key: "POSTGRES_PASSWORD") sql.from( driverName: "postgres", dataSourceName: "postgresql://${username}:${password}@localhost:5432", query: "SELECT * FROM example_table", ) ``` To define secret values in your `/api/v2/templates/apply` request, pass the `secrets` parameter with key-value pairs--for example: ```json { ... "secrets": { "POSTGRES_USERNAME": "pguser", "POSTGRES_PASSWORD": "foo" } ... } ``` InfluxDB stores the key-value pairs as secrets that you can access with `secrets.get()`. Once stored, you can't view secret values in InfluxDB. #### Related guides - [How to pass secrets when installing a template](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#pass-secrets-when-installing-a-template) + + :param secrets: The secrets of this TemplateApply. + :type: dict(str, str) + """ # noqa: E501 + self._secrets = secrets + + @property + def remotes(self): + """Get the remotes of this TemplateApply. + + A list of URLs for template files. To apply a template manifest file located at a URL, pass `remotes` with an array that contains the URL. + + :return: The remotes of this TemplateApply. + :rtype: list[TemplateApplyRemotes] + """ # noqa: E501 + return self._remotes + + @remotes.setter + def remotes(self, remotes): + """Set the remotes of this TemplateApply. + + A list of URLs for template files. To apply a template manifest file located at a URL, pass `remotes` with an array that contains the URL. + + :param remotes: The remotes of this TemplateApply. + :type: list[TemplateApplyRemotes] + """ # noqa: E501 + self._remotes = remotes + + @property + def actions(self): + """Get the actions of this TemplateApply. + + A list of `action` objects. Actions let you customize how InfluxDB applies templates in the request. You can use the following actions to prevent creating or updating resources: - A `skipKind` action skips template resources of a specified `kind`. - A `skipResource` action skips template resources with a specified `metadata.name` and `kind`. + + :return: The actions of this TemplateApply. + :rtype: list[object] + """ # noqa: E501 + return self._actions + + @actions.setter + def actions(self, actions): + """Set the actions of this TemplateApply. + + A list of `action` objects. Actions let you customize how InfluxDB applies templates in the request. You can use the following actions to prevent creating or updating resources: - A `skipKind` action skips template resources of a specified `kind`. - A `skipResource` action skips template resources with a specified `metadata.name` and `kind`. + + :param actions: The actions of this TemplateApply. + :type: list[object] + """ # noqa: E501 + self._actions = actions + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateApply): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_apply_remotes.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_apply_remotes.py new file mode 100644 index 0000000..bdb835b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_apply_remotes.py @@ -0,0 +1,131 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateApplyRemotes(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'url': 'str', + 'content_type': 'str' + } + + attribute_map = { + 'url': 'url', + 'content_type': 'contentType' + } + + def __init__(self, url=None, content_type=None): # noqa: E501,D401,D403 + """TemplateApplyRemotes - a model defined in OpenAPI.""" # noqa: E501 + self._url = None + self._content_type = None + self.discriminator = None + + self.url = url + if content_type is not None: + self.content_type = content_type + + @property + def url(self): + """Get the url of this TemplateApplyRemotes. + + :return: The url of this TemplateApplyRemotes. + :rtype: str + """ # noqa: E501 + return self._url + + @url.setter + def url(self, url): + """Set the url of this TemplateApplyRemotes. + + :param url: The url of this TemplateApplyRemotes. + :type: str + """ # noqa: E501 + if url is None: + raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501 + self._url = url + + @property + def content_type(self): + """Get the content_type of this TemplateApplyRemotes. + + :return: The content_type of this TemplateApplyRemotes. + :rtype: str + """ # noqa: E501 + return self._content_type + + @content_type.setter + def content_type(self, content_type): + """Set the content_type of this TemplateApplyRemotes. + + :param content_type: The content_type of this TemplateApplyRemotes. + :type: str + """ # noqa: E501 + self._content_type = content_type + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateApplyRemotes): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_apply_template.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_apply_template.py new file mode 100644 index 0000000..0fee8e9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_apply_template.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateApplyTemplate(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'content_type': 'str', + 'sources': 'list[str]', + 'contents': 'list[object]' + } + + attribute_map = { + 'content_type': 'contentType', + 'sources': 'sources', + 'contents': 'contents' + } + + def __init__(self, content_type=None, sources=None, contents=None): # noqa: E501,D401,D403 + """TemplateApplyTemplate - a model defined in OpenAPI.""" # noqa: E501 + self._content_type = None + self._sources = None + self._contents = None + self.discriminator = None + + if content_type is not None: + self.content_type = content_type + if sources is not None: + self.sources = sources + if contents is not None: + self.contents = contents + + @property + def content_type(self): + """Get the content_type of this TemplateApplyTemplate. + + :return: The content_type of this TemplateApplyTemplate. + :rtype: str + """ # noqa: E501 + return self._content_type + + @content_type.setter + def content_type(self, content_type): + """Set the content_type of this TemplateApplyTemplate. + + :param content_type: The content_type of this TemplateApplyTemplate. + :type: str + """ # noqa: E501 + self._content_type = content_type + + @property + def sources(self): + """Get the sources of this TemplateApplyTemplate. + + :return: The sources of this TemplateApplyTemplate. + :rtype: list[str] + """ # noqa: E501 + return self._sources + + @sources.setter + def sources(self, sources): + """Set the sources of this TemplateApplyTemplate. + + :param sources: The sources of this TemplateApplyTemplate. + :type: list[str] + """ # noqa: E501 + self._sources = sources + + @property + def contents(self): + """Get the contents of this TemplateApplyTemplate. + + :return: The contents of this TemplateApplyTemplate. + :rtype: list[object] + """ # noqa: E501 + return self._contents + + @contents.setter + def contents(self, contents): + """Set the contents of this TemplateApplyTemplate. + + :param contents: The contents of this TemplateApplyTemplate. + :type: list[object] + """ # noqa: E501 + self._contents = contents + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateApplyTemplate): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_chart.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_chart.py new file mode 100644 index 0000000..7ae7b35 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_chart.py @@ -0,0 +1,199 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateChart(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'x_pos': 'int', + 'y_pos': 'int', + 'height': 'int', + 'width': 'int', + 'properties': 'ViewProperties' + } + + attribute_map = { + 'x_pos': 'xPos', + 'y_pos': 'yPos', + 'height': 'height', + 'width': 'width', + 'properties': 'properties' + } + + def __init__(self, x_pos=None, y_pos=None, height=None, width=None, properties=None): # noqa: E501,D401,D403 + """TemplateChart - a model defined in OpenAPI.""" # noqa: E501 + self._x_pos = None + self._y_pos = None + self._height = None + self._width = None + self._properties = None + self.discriminator = None + + if x_pos is not None: + self.x_pos = x_pos + if y_pos is not None: + self.y_pos = y_pos + if height is not None: + self.height = height + if width is not None: + self.width = width + if properties is not None: + self.properties = properties + + @property + def x_pos(self): + """Get the x_pos of this TemplateChart. + + :return: The x_pos of this TemplateChart. + :rtype: int + """ # noqa: E501 + return self._x_pos + + @x_pos.setter + def x_pos(self, x_pos): + """Set the x_pos of this TemplateChart. + + :param x_pos: The x_pos of this TemplateChart. + :type: int + """ # noqa: E501 + self._x_pos = x_pos + + @property + def y_pos(self): + """Get the y_pos of this TemplateChart. + + :return: The y_pos of this TemplateChart. + :rtype: int + """ # noqa: E501 + return self._y_pos + + @y_pos.setter + def y_pos(self, y_pos): + """Set the y_pos of this TemplateChart. + + :param y_pos: The y_pos of this TemplateChart. + :type: int + """ # noqa: E501 + self._y_pos = y_pos + + @property + def height(self): + """Get the height of this TemplateChart. + + :return: The height of this TemplateChart. + :rtype: int + """ # noqa: E501 + return self._height + + @height.setter + def height(self, height): + """Set the height of this TemplateChart. + + :param height: The height of this TemplateChart. + :type: int + """ # noqa: E501 + self._height = height + + @property + def width(self): + """Get the width of this TemplateChart. + + :return: The width of this TemplateChart. + :rtype: int + """ # noqa: E501 + return self._width + + @width.setter + def width(self, width): + """Set the width of this TemplateChart. + + :param width: The width of this TemplateChart. + :type: int + """ # noqa: E501 + self._width = width + + @property + def properties(self): + """Get the properties of this TemplateChart. + + :return: The properties of this TemplateChart. + :rtype: ViewProperties + """ # noqa: E501 + return self._properties + + @properties.setter + def properties(self, properties): + """Set the properties of this TemplateChart. + + :param properties: The properties of this TemplateChart. + :type: ViewProperties + """ # noqa: E501 + self._properties = properties + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateChart): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id.py new file mode 100644 index 0000000..7b5f212 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateExportByID(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'stack_id': 'str', + 'org_ids': 'list[TemplateExportByIDOrgIDs]', + 'resources': 'list[TemplateExportByIDResources]' + } + + attribute_map = { + 'stack_id': 'stackID', + 'org_ids': 'orgIDs', + 'resources': 'resources' + } + + def __init__(self, stack_id=None, org_ids=None, resources=None): # noqa: E501,D401,D403 + """TemplateExportByID - a model defined in OpenAPI.""" # noqa: E501 + self._stack_id = None + self._org_ids = None + self._resources = None + self.discriminator = None + + if stack_id is not None: + self.stack_id = stack_id + if org_ids is not None: + self.org_ids = org_ids + if resources is not None: + self.resources = resources + + @property + def stack_id(self): + """Get the stack_id of this TemplateExportByID. + + :return: The stack_id of this TemplateExportByID. + :rtype: str + """ # noqa: E501 + return self._stack_id + + @stack_id.setter + def stack_id(self, stack_id): + """Set the stack_id of this TemplateExportByID. + + :param stack_id: The stack_id of this TemplateExportByID. + :type: str + """ # noqa: E501 + self._stack_id = stack_id + + @property + def org_ids(self): + """Get the org_ids of this TemplateExportByID. + + :return: The org_ids of this TemplateExportByID. + :rtype: list[TemplateExportByIDOrgIDs] + """ # noqa: E501 + return self._org_ids + + @org_ids.setter + def org_ids(self, org_ids): + """Set the org_ids of this TemplateExportByID. + + :param org_ids: The org_ids of this TemplateExportByID. + :type: list[TemplateExportByIDOrgIDs] + """ # noqa: E501 + self._org_ids = org_ids + + @property + def resources(self): + """Get the resources of this TemplateExportByID. + + :return: The resources of this TemplateExportByID. + :rtype: list[TemplateExportByIDResources] + """ # noqa: E501 + return self._resources + + @resources.setter + def resources(self, resources): + """Set the resources of this TemplateExportByID. + + :param resources: The resources of this TemplateExportByID. + :type: list[TemplateExportByIDResources] + """ # noqa: E501 + self._resources = resources + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateExportByID): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id_org_ids.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id_org_ids.py new file mode 100644 index 0000000..f29886e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id_org_ids.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateExportByIDOrgIDs(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'org_id': 'str', + 'resource_filters': 'TemplateExportByIDResourceFilters' + } + + attribute_map = { + 'org_id': 'orgID', + 'resource_filters': 'resourceFilters' + } + + def __init__(self, org_id=None, resource_filters=None): # noqa: E501,D401,D403 + """TemplateExportByIDOrgIDs - a model defined in OpenAPI.""" # noqa: E501 + self._org_id = None + self._resource_filters = None + self.discriminator = None + + if org_id is not None: + self.org_id = org_id + if resource_filters is not None: + self.resource_filters = resource_filters + + @property + def org_id(self): + """Get the org_id of this TemplateExportByIDOrgIDs. + + :return: The org_id of this TemplateExportByIDOrgIDs. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this TemplateExportByIDOrgIDs. + + :param org_id: The org_id of this TemplateExportByIDOrgIDs. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def resource_filters(self): + """Get the resource_filters of this TemplateExportByIDOrgIDs. + + :return: The resource_filters of this TemplateExportByIDOrgIDs. + :rtype: TemplateExportByIDResourceFilters + """ # noqa: E501 + return self._resource_filters + + @resource_filters.setter + def resource_filters(self, resource_filters): + """Set the resource_filters of this TemplateExportByIDOrgIDs. + + :param resource_filters: The resource_filters of this TemplateExportByIDOrgIDs. + :type: TemplateExportByIDResourceFilters + """ # noqa: E501 + self._resource_filters = resource_filters + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateExportByIDOrgIDs): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id_resource_filters.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id_resource_filters.py new file mode 100644 index 0000000..33cd9f7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id_resource_filters.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateExportByIDResourceFilters(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'by_label': 'list[str]', + 'by_resource_kind': 'list[TemplateKind]' + } + + attribute_map = { + 'by_label': 'byLabel', + 'by_resource_kind': 'byResourceKind' + } + + def __init__(self, by_label=None, by_resource_kind=None): # noqa: E501,D401,D403 + """TemplateExportByIDResourceFilters - a model defined in OpenAPI.""" # noqa: E501 + self._by_label = None + self._by_resource_kind = None + self.discriminator = None + + if by_label is not None: + self.by_label = by_label + if by_resource_kind is not None: + self.by_resource_kind = by_resource_kind + + @property + def by_label(self): + """Get the by_label of this TemplateExportByIDResourceFilters. + + :return: The by_label of this TemplateExportByIDResourceFilters. + :rtype: list[str] + """ # noqa: E501 + return self._by_label + + @by_label.setter + def by_label(self, by_label): + """Set the by_label of this TemplateExportByIDResourceFilters. + + :param by_label: The by_label of this TemplateExportByIDResourceFilters. + :type: list[str] + """ # noqa: E501 + self._by_label = by_label + + @property + def by_resource_kind(self): + """Get the by_resource_kind of this TemplateExportByIDResourceFilters. + + :return: The by_resource_kind of this TemplateExportByIDResourceFilters. + :rtype: list[TemplateKind] + """ # noqa: E501 + return self._by_resource_kind + + @by_resource_kind.setter + def by_resource_kind(self, by_resource_kind): + """Set the by_resource_kind of this TemplateExportByIDResourceFilters. + + :param by_resource_kind: The by_resource_kind of this TemplateExportByIDResourceFilters. + :type: list[TemplateKind] + """ # noqa: E501 + self._by_resource_kind = by_resource_kind + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateExportByIDResourceFilters): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id_resources.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id_resources.py new file mode 100644 index 0000000..ddbc0b4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_id_resources.py @@ -0,0 +1,159 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateExportByIDResources(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'kind': 'TemplateKind', + 'name': 'str' + } + + attribute_map = { + 'id': 'id', + 'kind': 'kind', + 'name': 'name' + } + + def __init__(self, id=None, kind=None, name=None): # noqa: E501,D401,D403 + """TemplateExportByIDResources - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._kind = None + self._name = None + self.discriminator = None + + self.id = id + self.kind = kind + if name is not None: + self.name = name + + @property + def id(self): + """Get the id of this TemplateExportByIDResources. + + :return: The id of this TemplateExportByIDResources. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateExportByIDResources. + + :param id: The id of this TemplateExportByIDResources. + :type: str + """ # noqa: E501 + if id is None: + raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 + self._id = id + + @property + def kind(self): + """Get the kind of this TemplateExportByIDResources. + + :return: The kind of this TemplateExportByIDResources. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateExportByIDResources. + + :param kind: The kind of this TemplateExportByIDResources. + :type: TemplateKind + """ # noqa: E501 + if kind is None: + raise ValueError("Invalid value for `kind`, must not be `None`") # noqa: E501 + self._kind = kind + + @property + def name(self): + """Get the name of this TemplateExportByIDResources. + + if defined with id, name is used for resource exported by id. if defined independently, resources strictly matching name are exported + + :return: The name of this TemplateExportByIDResources. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateExportByIDResources. + + if defined with id, name is used for resource exported by id. if defined independently, resources strictly matching name are exported + + :param name: The name of this TemplateExportByIDResources. + :type: str + """ # noqa: E501 + self._name = name + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateExportByIDResources): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_name.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_name.py new file mode 100644 index 0000000..aa096de --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_name.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateExportByName(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'stack_id': 'str', + 'org_ids': 'list[TemplateExportByIDOrgIDs]', + 'resources': 'list[TemplateExportByNameResources]' + } + + attribute_map = { + 'stack_id': 'stackID', + 'org_ids': 'orgIDs', + 'resources': 'resources' + } + + def __init__(self, stack_id=None, org_ids=None, resources=None): # noqa: E501,D401,D403 + """TemplateExportByName - a model defined in OpenAPI.""" # noqa: E501 + self._stack_id = None + self._org_ids = None + self._resources = None + self.discriminator = None + + if stack_id is not None: + self.stack_id = stack_id + if org_ids is not None: + self.org_ids = org_ids + if resources is not None: + self.resources = resources + + @property + def stack_id(self): + """Get the stack_id of this TemplateExportByName. + + :return: The stack_id of this TemplateExportByName. + :rtype: str + """ # noqa: E501 + return self._stack_id + + @stack_id.setter + def stack_id(self, stack_id): + """Set the stack_id of this TemplateExportByName. + + :param stack_id: The stack_id of this TemplateExportByName. + :type: str + """ # noqa: E501 + self._stack_id = stack_id + + @property + def org_ids(self): + """Get the org_ids of this TemplateExportByName. + + :return: The org_ids of this TemplateExportByName. + :rtype: list[TemplateExportByIDOrgIDs] + """ # noqa: E501 + return self._org_ids + + @org_ids.setter + def org_ids(self, org_ids): + """Set the org_ids of this TemplateExportByName. + + :param org_ids: The org_ids of this TemplateExportByName. + :type: list[TemplateExportByIDOrgIDs] + """ # noqa: E501 + self._org_ids = org_ids + + @property + def resources(self): + """Get the resources of this TemplateExportByName. + + :return: The resources of this TemplateExportByName. + :rtype: list[TemplateExportByNameResources] + """ # noqa: E501 + return self._resources + + @resources.setter + def resources(self, resources): + """Set the resources of this TemplateExportByName. + + :param resources: The resources of this TemplateExportByName. + :type: list[TemplateExportByNameResources] + """ # noqa: E501 + self._resources = resources + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateExportByName): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_name_resources.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_name_resources.py new file mode 100644 index 0000000..35e955b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_export_by_name_resources.py @@ -0,0 +1,132 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateExportByNameResources(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'name': 'str' + } + + attribute_map = { + 'kind': 'kind', + 'name': 'name' + } + + def __init__(self, kind=None, name=None): # noqa: E501,D401,D403 + """TemplateExportByNameResources - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._name = None + self.discriminator = None + + self.kind = kind + self.name = name + + @property + def kind(self): + """Get the kind of this TemplateExportByNameResources. + + :return: The kind of this TemplateExportByNameResources. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateExportByNameResources. + + :param kind: The kind of this TemplateExportByNameResources. + :type: TemplateKind + """ # noqa: E501 + if kind is None: + raise ValueError("Invalid value for `kind`, must not be `None`") # noqa: E501 + self._kind = kind + + @property + def name(self): + """Get the name of this TemplateExportByNameResources. + + :return: The name of this TemplateExportByNameResources. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateExportByNameResources. + + :param name: The name of this TemplateExportByNameResources. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateExportByNameResources): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_kind.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_kind.py new file mode 100644 index 0000000..256408a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_kind.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateKind(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + BUCKET = "Bucket" + CHECK = "Check" + CHECKDEADMAN = "CheckDeadman" + CHECKTHRESHOLD = "CheckThreshold" + DASHBOARD = "Dashboard" + LABEL = "Label" + NOTIFICATIONENDPOINT = "NotificationEndpoint" + NOTIFICATIONENDPOINTHTTP = "NotificationEndpointHTTP" + NOTIFICATIONENDPOINTPAGERDUTY = "NotificationEndpointPagerDuty" + NOTIFICATIONENDPOINTSLACK = "NotificationEndpointSlack" + NOTIFICATIONRULE = "NotificationRule" + TASK = "Task" + TELEGRAF = "Telegraf" + VARIABLE = "Variable" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """TemplateKind - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateKind): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary.py new file mode 100644 index 0000000..bdb2e20 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary.py @@ -0,0 +1,199 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummary(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'sources': 'list[str]', + 'stack_id': 'str', + 'summary': 'TemplateSummarySummary', + 'diff': 'TemplateSummaryDiff', + 'errors': 'list[TemplateSummaryErrors]' + } + + attribute_map = { + 'sources': 'sources', + 'stack_id': 'stackID', + 'summary': 'summary', + 'diff': 'diff', + 'errors': 'errors' + } + + def __init__(self, sources=None, stack_id=None, summary=None, diff=None, errors=None): # noqa: E501,D401,D403 + """TemplateSummary - a model defined in OpenAPI.""" # noqa: E501 + self._sources = None + self._stack_id = None + self._summary = None + self._diff = None + self._errors = None + self.discriminator = None + + if sources is not None: + self.sources = sources + if stack_id is not None: + self.stack_id = stack_id + if summary is not None: + self.summary = summary + if diff is not None: + self.diff = diff + if errors is not None: + self.errors = errors + + @property + def sources(self): + """Get the sources of this TemplateSummary. + + :return: The sources of this TemplateSummary. + :rtype: list[str] + """ # noqa: E501 + return self._sources + + @sources.setter + def sources(self, sources): + """Set the sources of this TemplateSummary. + + :param sources: The sources of this TemplateSummary. + :type: list[str] + """ # noqa: E501 + self._sources = sources + + @property + def stack_id(self): + """Get the stack_id of this TemplateSummary. + + :return: The stack_id of this TemplateSummary. + :rtype: str + """ # noqa: E501 + return self._stack_id + + @stack_id.setter + def stack_id(self, stack_id): + """Set the stack_id of this TemplateSummary. + + :param stack_id: The stack_id of this TemplateSummary. + :type: str + """ # noqa: E501 + self._stack_id = stack_id + + @property + def summary(self): + """Get the summary of this TemplateSummary. + + :return: The summary of this TemplateSummary. + :rtype: TemplateSummarySummary + """ # noqa: E501 + return self._summary + + @summary.setter + def summary(self, summary): + """Set the summary of this TemplateSummary. + + :param summary: The summary of this TemplateSummary. + :type: TemplateSummarySummary + """ # noqa: E501 + self._summary = summary + + @property + def diff(self): + """Get the diff of this TemplateSummary. + + :return: The diff of this TemplateSummary. + :rtype: TemplateSummaryDiff + """ # noqa: E501 + return self._diff + + @diff.setter + def diff(self, diff): + """Set the diff of this TemplateSummary. + + :param diff: The diff of this TemplateSummary. + :type: TemplateSummaryDiff + """ # noqa: E501 + self._diff = diff + + @property + def errors(self): + """Get the errors of this TemplateSummary. + + :return: The errors of this TemplateSummary. + :rtype: list[TemplateSummaryErrors] + """ # noqa: E501 + return self._errors + + @errors.setter + def errors(self, errors): + """Set the errors of this TemplateSummary. + + :param errors: The errors of this TemplateSummary. + :type: list[TemplateSummaryErrors] + """ # noqa: E501 + self._errors = errors + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummary): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff.py new file mode 100644 index 0000000..468fd4f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff.py @@ -0,0 +1,314 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiff(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'buckets': 'list[TemplateSummaryDiffBuckets]', + 'checks': 'list[TemplateSummaryDiffChecks]', + 'dashboards': 'list[TemplateSummaryDiffDashboards]', + 'labels': 'list[TemplateSummaryDiffLabels]', + 'label_mappings': 'list[TemplateSummaryDiffLabelMappings]', + 'notification_endpoints': 'list[TemplateSummaryDiffNotificationEndpoints]', + 'notification_rules': 'list[TemplateSummaryDiffNotificationRules]', + 'tasks': 'list[TemplateSummaryDiffTasks]', + 'telegraf_configs': 'list[TemplateSummaryDiffTelegrafConfigs]', + 'variables': 'list[TemplateSummaryDiffVariables]' + } + + attribute_map = { + 'buckets': 'buckets', + 'checks': 'checks', + 'dashboards': 'dashboards', + 'labels': 'labels', + 'label_mappings': 'labelMappings', + 'notification_endpoints': 'notificationEndpoints', + 'notification_rules': 'notificationRules', + 'tasks': 'tasks', + 'telegraf_configs': 'telegrafConfigs', + 'variables': 'variables' + } + + def __init__(self, buckets=None, checks=None, dashboards=None, labels=None, label_mappings=None, notification_endpoints=None, notification_rules=None, tasks=None, telegraf_configs=None, variables=None): # noqa: E501,D401,D403 + """TemplateSummaryDiff - a model defined in OpenAPI.""" # noqa: E501 + self._buckets = None + self._checks = None + self._dashboards = None + self._labels = None + self._label_mappings = None + self._notification_endpoints = None + self._notification_rules = None + self._tasks = None + self._telegraf_configs = None + self._variables = None + self.discriminator = None + + if buckets is not None: + self.buckets = buckets + if checks is not None: + self.checks = checks + if dashboards is not None: + self.dashboards = dashboards + if labels is not None: + self.labels = labels + if label_mappings is not None: + self.label_mappings = label_mappings + if notification_endpoints is not None: + self.notification_endpoints = notification_endpoints + if notification_rules is not None: + self.notification_rules = notification_rules + if tasks is not None: + self.tasks = tasks + if telegraf_configs is not None: + self.telegraf_configs = telegraf_configs + if variables is not None: + self.variables = variables + + @property + def buckets(self): + """Get the buckets of this TemplateSummaryDiff. + + :return: The buckets of this TemplateSummaryDiff. + :rtype: list[TemplateSummaryDiffBuckets] + """ # noqa: E501 + return self._buckets + + @buckets.setter + def buckets(self, buckets): + """Set the buckets of this TemplateSummaryDiff. + + :param buckets: The buckets of this TemplateSummaryDiff. + :type: list[TemplateSummaryDiffBuckets] + """ # noqa: E501 + self._buckets = buckets + + @property + def checks(self): + """Get the checks of this TemplateSummaryDiff. + + :return: The checks of this TemplateSummaryDiff. + :rtype: list[TemplateSummaryDiffChecks] + """ # noqa: E501 + return self._checks + + @checks.setter + def checks(self, checks): + """Set the checks of this TemplateSummaryDiff. + + :param checks: The checks of this TemplateSummaryDiff. + :type: list[TemplateSummaryDiffChecks] + """ # noqa: E501 + self._checks = checks + + @property + def dashboards(self): + """Get the dashboards of this TemplateSummaryDiff. + + :return: The dashboards of this TemplateSummaryDiff. + :rtype: list[TemplateSummaryDiffDashboards] + """ # noqa: E501 + return self._dashboards + + @dashboards.setter + def dashboards(self, dashboards): + """Set the dashboards of this TemplateSummaryDiff. + + :param dashboards: The dashboards of this TemplateSummaryDiff. + :type: list[TemplateSummaryDiffDashboards] + """ # noqa: E501 + self._dashboards = dashboards + + @property + def labels(self): + """Get the labels of this TemplateSummaryDiff. + + :return: The labels of this TemplateSummaryDiff. + :rtype: list[TemplateSummaryDiffLabels] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this TemplateSummaryDiff. + + :param labels: The labels of this TemplateSummaryDiff. + :type: list[TemplateSummaryDiffLabels] + """ # noqa: E501 + self._labels = labels + + @property + def label_mappings(self): + """Get the label_mappings of this TemplateSummaryDiff. + + :return: The label_mappings of this TemplateSummaryDiff. + :rtype: list[TemplateSummaryDiffLabelMappings] + """ # noqa: E501 + return self._label_mappings + + @label_mappings.setter + def label_mappings(self, label_mappings): + """Set the label_mappings of this TemplateSummaryDiff. + + :param label_mappings: The label_mappings of this TemplateSummaryDiff. + :type: list[TemplateSummaryDiffLabelMappings] + """ # noqa: E501 + self._label_mappings = label_mappings + + @property + def notification_endpoints(self): + """Get the notification_endpoints of this TemplateSummaryDiff. + + :return: The notification_endpoints of this TemplateSummaryDiff. + :rtype: list[TemplateSummaryDiffNotificationEndpoints] + """ # noqa: E501 + return self._notification_endpoints + + @notification_endpoints.setter + def notification_endpoints(self, notification_endpoints): + """Set the notification_endpoints of this TemplateSummaryDiff. + + :param notification_endpoints: The notification_endpoints of this TemplateSummaryDiff. + :type: list[TemplateSummaryDiffNotificationEndpoints] + """ # noqa: E501 + self._notification_endpoints = notification_endpoints + + @property + def notification_rules(self): + """Get the notification_rules of this TemplateSummaryDiff. + + :return: The notification_rules of this TemplateSummaryDiff. + :rtype: list[TemplateSummaryDiffNotificationRules] + """ # noqa: E501 + return self._notification_rules + + @notification_rules.setter + def notification_rules(self, notification_rules): + """Set the notification_rules of this TemplateSummaryDiff. + + :param notification_rules: The notification_rules of this TemplateSummaryDiff. + :type: list[TemplateSummaryDiffNotificationRules] + """ # noqa: E501 + self._notification_rules = notification_rules + + @property + def tasks(self): + """Get the tasks of this TemplateSummaryDiff. + + :return: The tasks of this TemplateSummaryDiff. + :rtype: list[TemplateSummaryDiffTasks] + """ # noqa: E501 + return self._tasks + + @tasks.setter + def tasks(self, tasks): + """Set the tasks of this TemplateSummaryDiff. + + :param tasks: The tasks of this TemplateSummaryDiff. + :type: list[TemplateSummaryDiffTasks] + """ # noqa: E501 + self._tasks = tasks + + @property + def telegraf_configs(self): + """Get the telegraf_configs of this TemplateSummaryDiff. + + :return: The telegraf_configs of this TemplateSummaryDiff. + :rtype: list[TemplateSummaryDiffTelegrafConfigs] + """ # noqa: E501 + return self._telegraf_configs + + @telegraf_configs.setter + def telegraf_configs(self, telegraf_configs): + """Set the telegraf_configs of this TemplateSummaryDiff. + + :param telegraf_configs: The telegraf_configs of this TemplateSummaryDiff. + :type: list[TemplateSummaryDiffTelegrafConfigs] + """ # noqa: E501 + self._telegraf_configs = telegraf_configs + + @property + def variables(self): + """Get the variables of this TemplateSummaryDiff. + + :return: The variables of this TemplateSummaryDiff. + :rtype: list[TemplateSummaryDiffVariables] + """ # noqa: E501 + return self._variables + + @variables.setter + def variables(self, variables): + """Set the variables of this TemplateSummaryDiff. + + :param variables: The variables of this TemplateSummaryDiff. + :type: list[TemplateSummaryDiffVariables] + """ # noqa: E501 + self._variables = variables + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiff): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_buckets.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_buckets.py new file mode 100644 index 0000000..48e1c04 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_buckets.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffBuckets(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'state_status': 'str', + 'id': 'str', + 'template_meta_name': 'str', + 'new': 'TemplateSummaryDiffBucketsNewOld', + 'old': 'TemplateSummaryDiffBucketsNewOld' + } + + attribute_map = { + 'kind': 'kind', + 'state_status': 'stateStatus', + 'id': 'id', + 'template_meta_name': 'templateMetaName', + 'new': 'new', + 'old': 'old' + } + + def __init__(self, kind=None, state_status=None, id=None, template_meta_name=None, new=None, old=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffBuckets - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._state_status = None + self._id = None + self._template_meta_name = None + self._new = None + self._old = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if state_status is not None: + self.state_status = state_status + if id is not None: + self.id = id + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if new is not None: + self.new = new + if old is not None: + self.old = old + + @property + def kind(self): + """Get the kind of this TemplateSummaryDiffBuckets. + + :return: The kind of this TemplateSummaryDiffBuckets. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummaryDiffBuckets. + + :param kind: The kind of this TemplateSummaryDiffBuckets. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def state_status(self): + """Get the state_status of this TemplateSummaryDiffBuckets. + + :return: The state_status of this TemplateSummaryDiffBuckets. + :rtype: str + """ # noqa: E501 + return self._state_status + + @state_status.setter + def state_status(self, state_status): + """Set the state_status of this TemplateSummaryDiffBuckets. + + :param state_status: The state_status of this TemplateSummaryDiffBuckets. + :type: str + """ # noqa: E501 + self._state_status = state_status + + @property + def id(self): + """Get the id of this TemplateSummaryDiffBuckets. + + :return: The id of this TemplateSummaryDiffBuckets. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummaryDiffBuckets. + + :param id: The id of this TemplateSummaryDiffBuckets. + :type: str + """ # noqa: E501 + self._id = id + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummaryDiffBuckets. + + :return: The template_meta_name of this TemplateSummaryDiffBuckets. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummaryDiffBuckets. + + :param template_meta_name: The template_meta_name of this TemplateSummaryDiffBuckets. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def new(self): + """Get the new of this TemplateSummaryDiffBuckets. + + :return: The new of this TemplateSummaryDiffBuckets. + :rtype: TemplateSummaryDiffBucketsNewOld + """ # noqa: E501 + return self._new + + @new.setter + def new(self, new): + """Set the new of this TemplateSummaryDiffBuckets. + + :param new: The new of this TemplateSummaryDiffBuckets. + :type: TemplateSummaryDiffBucketsNewOld + """ # noqa: E501 + self._new = new + + @property + def old(self): + """Get the old of this TemplateSummaryDiffBuckets. + + :return: The old of this TemplateSummaryDiffBuckets. + :rtype: TemplateSummaryDiffBucketsNewOld + """ # noqa: E501 + return self._old + + @old.setter + def old(self, old): + """Set the old of this TemplateSummaryDiffBuckets. + + :param old: The old of this TemplateSummaryDiffBuckets. + :type: TemplateSummaryDiffBucketsNewOld + """ # noqa: E501 + self._old = old + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffBuckets): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_buckets_new_old.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_buckets_new_old.py new file mode 100644 index 0000000..7caf279 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_buckets_new_old.py @@ -0,0 +1,157 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffBucketsNewOld(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'retention_rules': 'list[BucketRetentionRules]' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'retention_rules': 'retentionRules' + } + + def __init__(self, name=None, description=None, retention_rules=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffBucketsNewOld - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._retention_rules = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if retention_rules is not None: + self.retention_rules = retention_rules + + @property + def name(self): + """Get the name of this TemplateSummaryDiffBucketsNewOld. + + :return: The name of this TemplateSummaryDiffBucketsNewOld. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummaryDiffBucketsNewOld. + + :param name: The name of this TemplateSummaryDiffBucketsNewOld. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this TemplateSummaryDiffBucketsNewOld. + + :return: The description of this TemplateSummaryDiffBucketsNewOld. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummaryDiffBucketsNewOld. + + :param description: The description of this TemplateSummaryDiffBucketsNewOld. + :type: str + """ # noqa: E501 + self._description = description + + @property + def retention_rules(self): + """Get the retention_rules of this TemplateSummaryDiffBucketsNewOld. + + Retention rules to expire or retain data. The InfluxDB `/api/v2` API uses `RetentionRules` to configure the [retention period](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-period). #### InfluxDB Cloud - `retentionRules` is required. #### InfluxDB OSS - `retentionRules` isn't required. + + :return: The retention_rules of this TemplateSummaryDiffBucketsNewOld. + :rtype: list[BucketRetentionRules] + """ # noqa: E501 + return self._retention_rules + + @retention_rules.setter + def retention_rules(self, retention_rules): + """Set the retention_rules of this TemplateSummaryDiffBucketsNewOld. + + Retention rules to expire or retain data. The InfluxDB `/api/v2` API uses `RetentionRules` to configure the [retention period](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-period). #### InfluxDB Cloud - `retentionRules` is required. #### InfluxDB OSS - `retentionRules` isn't required. + + :param retention_rules: The retention_rules of this TemplateSummaryDiffBucketsNewOld. + :type: list[BucketRetentionRules] + """ # noqa: E501 + self._retention_rules = retention_rules + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffBucketsNewOld): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_checks.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_checks.py new file mode 100644 index 0000000..3617fa3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_checks.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffChecks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'state_status': 'str', + 'id': 'str', + 'template_meta_name': 'str', + 'new': 'CheckDiscriminator', + 'old': 'CheckDiscriminator' + } + + attribute_map = { + 'kind': 'kind', + 'state_status': 'stateStatus', + 'id': 'id', + 'template_meta_name': 'templateMetaName', + 'new': 'new', + 'old': 'old' + } + + def __init__(self, kind=None, state_status=None, id=None, template_meta_name=None, new=None, old=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffChecks - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._state_status = None + self._id = None + self._template_meta_name = None + self._new = None + self._old = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if state_status is not None: + self.state_status = state_status + if id is not None: + self.id = id + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if new is not None: + self.new = new + if old is not None: + self.old = old + + @property + def kind(self): + """Get the kind of this TemplateSummaryDiffChecks. + + :return: The kind of this TemplateSummaryDiffChecks. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummaryDiffChecks. + + :param kind: The kind of this TemplateSummaryDiffChecks. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def state_status(self): + """Get the state_status of this TemplateSummaryDiffChecks. + + :return: The state_status of this TemplateSummaryDiffChecks. + :rtype: str + """ # noqa: E501 + return self._state_status + + @state_status.setter + def state_status(self, state_status): + """Set the state_status of this TemplateSummaryDiffChecks. + + :param state_status: The state_status of this TemplateSummaryDiffChecks. + :type: str + """ # noqa: E501 + self._state_status = state_status + + @property + def id(self): + """Get the id of this TemplateSummaryDiffChecks. + + :return: The id of this TemplateSummaryDiffChecks. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummaryDiffChecks. + + :param id: The id of this TemplateSummaryDiffChecks. + :type: str + """ # noqa: E501 + self._id = id + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummaryDiffChecks. + + :return: The template_meta_name of this TemplateSummaryDiffChecks. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummaryDiffChecks. + + :param template_meta_name: The template_meta_name of this TemplateSummaryDiffChecks. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def new(self): + """Get the new of this TemplateSummaryDiffChecks. + + :return: The new of this TemplateSummaryDiffChecks. + :rtype: CheckDiscriminator + """ # noqa: E501 + return self._new + + @new.setter + def new(self, new): + """Set the new of this TemplateSummaryDiffChecks. + + :param new: The new of this TemplateSummaryDiffChecks. + :type: CheckDiscriminator + """ # noqa: E501 + self._new = new + + @property + def old(self): + """Get the old of this TemplateSummaryDiffChecks. + + :return: The old of this TemplateSummaryDiffChecks. + :rtype: CheckDiscriminator + """ # noqa: E501 + return self._old + + @old.setter + def old(self, old): + """Set the old of this TemplateSummaryDiffChecks. + + :param old: The old of this TemplateSummaryDiffChecks. + :type: CheckDiscriminator + """ # noqa: E501 + self._old = old + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffChecks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_dashboards.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_dashboards.py new file mode 100644 index 0000000..748b2a0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_dashboards.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffDashboards(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'state_status': 'str', + 'id': 'str', + 'kind': 'TemplateKind', + 'template_meta_name': 'str', + 'new': 'TemplateSummaryDiffDashboardsNewOld', + 'old': 'TemplateSummaryDiffDashboardsNewOld' + } + + attribute_map = { + 'state_status': 'stateStatus', + 'id': 'id', + 'kind': 'kind', + 'template_meta_name': 'templateMetaName', + 'new': 'new', + 'old': 'old' + } + + def __init__(self, state_status=None, id=None, kind=None, template_meta_name=None, new=None, old=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffDashboards - a model defined in OpenAPI.""" # noqa: E501 + self._state_status = None + self._id = None + self._kind = None + self._template_meta_name = None + self._new = None + self._old = None + self.discriminator = None + + if state_status is not None: + self.state_status = state_status + if id is not None: + self.id = id + if kind is not None: + self.kind = kind + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if new is not None: + self.new = new + if old is not None: + self.old = old + + @property + def state_status(self): + """Get the state_status of this TemplateSummaryDiffDashboards. + + :return: The state_status of this TemplateSummaryDiffDashboards. + :rtype: str + """ # noqa: E501 + return self._state_status + + @state_status.setter + def state_status(self, state_status): + """Set the state_status of this TemplateSummaryDiffDashboards. + + :param state_status: The state_status of this TemplateSummaryDiffDashboards. + :type: str + """ # noqa: E501 + self._state_status = state_status + + @property + def id(self): + """Get the id of this TemplateSummaryDiffDashboards. + + :return: The id of this TemplateSummaryDiffDashboards. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummaryDiffDashboards. + + :param id: The id of this TemplateSummaryDiffDashboards. + :type: str + """ # noqa: E501 + self._id = id + + @property + def kind(self): + """Get the kind of this TemplateSummaryDiffDashboards. + + :return: The kind of this TemplateSummaryDiffDashboards. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummaryDiffDashboards. + + :param kind: The kind of this TemplateSummaryDiffDashboards. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummaryDiffDashboards. + + :return: The template_meta_name of this TemplateSummaryDiffDashboards. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummaryDiffDashboards. + + :param template_meta_name: The template_meta_name of this TemplateSummaryDiffDashboards. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def new(self): + """Get the new of this TemplateSummaryDiffDashboards. + + :return: The new of this TemplateSummaryDiffDashboards. + :rtype: TemplateSummaryDiffDashboardsNewOld + """ # noqa: E501 + return self._new + + @new.setter + def new(self, new): + """Set the new of this TemplateSummaryDiffDashboards. + + :param new: The new of this TemplateSummaryDiffDashboards. + :type: TemplateSummaryDiffDashboardsNewOld + """ # noqa: E501 + self._new = new + + @property + def old(self): + """Get the old of this TemplateSummaryDiffDashboards. + + :return: The old of this TemplateSummaryDiffDashboards. + :rtype: TemplateSummaryDiffDashboardsNewOld + """ # noqa: E501 + return self._old + + @old.setter + def old(self, old): + """Set the old of this TemplateSummaryDiffDashboards. + + :param old: The old of this TemplateSummaryDiffDashboards. + :type: TemplateSummaryDiffDashboardsNewOld + """ # noqa: E501 + self._old = old + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffDashboards): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_dashboards_new_old.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_dashboards_new_old.py new file mode 100644 index 0000000..a2b727d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_dashboards_new_old.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffDashboardsNewOld(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'charts': 'list[TemplateChart]' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'charts': 'charts' + } + + def __init__(self, name=None, description=None, charts=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffDashboardsNewOld - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._charts = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if charts is not None: + self.charts = charts + + @property + def name(self): + """Get the name of this TemplateSummaryDiffDashboardsNewOld. + + :return: The name of this TemplateSummaryDiffDashboardsNewOld. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummaryDiffDashboardsNewOld. + + :param name: The name of this TemplateSummaryDiffDashboardsNewOld. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this TemplateSummaryDiffDashboardsNewOld. + + :return: The description of this TemplateSummaryDiffDashboardsNewOld. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummaryDiffDashboardsNewOld. + + :param description: The description of this TemplateSummaryDiffDashboardsNewOld. + :type: str + """ # noqa: E501 + self._description = description + + @property + def charts(self): + """Get the charts of this TemplateSummaryDiffDashboardsNewOld. + + :return: The charts of this TemplateSummaryDiffDashboardsNewOld. + :rtype: list[TemplateChart] + """ # noqa: E501 + return self._charts + + @charts.setter + def charts(self, charts): + """Set the charts of this TemplateSummaryDiffDashboardsNewOld. + + :param charts: The charts of this TemplateSummaryDiffDashboardsNewOld. + :type: list[TemplateChart] + """ # noqa: E501 + self._charts = charts + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffDashboardsNewOld): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_label_mappings.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_label_mappings.py new file mode 100644 index 0000000..2abb3dd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_label_mappings.py @@ -0,0 +1,268 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffLabelMappings(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'status': 'str', + 'resource_type': 'str', + 'resource_id': 'str', + 'resource_template_meta_name': 'str', + 'resource_name': 'str', + 'label_id': 'str', + 'label_template_meta_name': 'str', + 'label_name': 'str' + } + + attribute_map = { + 'status': 'status', + 'resource_type': 'resourceType', + 'resource_id': 'resourceID', + 'resource_template_meta_name': 'resourceTemplateMetaName', + 'resource_name': 'resourceName', + 'label_id': 'labelID', + 'label_template_meta_name': 'labelTemplateMetaName', + 'label_name': 'labelName' + } + + def __init__(self, status=None, resource_type=None, resource_id=None, resource_template_meta_name=None, resource_name=None, label_id=None, label_template_meta_name=None, label_name=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffLabelMappings - a model defined in OpenAPI.""" # noqa: E501 + self._status = None + self._resource_type = None + self._resource_id = None + self._resource_template_meta_name = None + self._resource_name = None + self._label_id = None + self._label_template_meta_name = None + self._label_name = None + self.discriminator = None + + if status is not None: + self.status = status + if resource_type is not None: + self.resource_type = resource_type + if resource_id is not None: + self.resource_id = resource_id + if resource_template_meta_name is not None: + self.resource_template_meta_name = resource_template_meta_name + if resource_name is not None: + self.resource_name = resource_name + if label_id is not None: + self.label_id = label_id + if label_template_meta_name is not None: + self.label_template_meta_name = label_template_meta_name + if label_name is not None: + self.label_name = label_name + + @property + def status(self): + """Get the status of this TemplateSummaryDiffLabelMappings. + + :return: The status of this TemplateSummaryDiffLabelMappings. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this TemplateSummaryDiffLabelMappings. + + :param status: The status of this TemplateSummaryDiffLabelMappings. + :type: str + """ # noqa: E501 + self._status = status + + @property + def resource_type(self): + """Get the resource_type of this TemplateSummaryDiffLabelMappings. + + :return: The resource_type of this TemplateSummaryDiffLabelMappings. + :rtype: str + """ # noqa: E501 + return self._resource_type + + @resource_type.setter + def resource_type(self, resource_type): + """Set the resource_type of this TemplateSummaryDiffLabelMappings. + + :param resource_type: The resource_type of this TemplateSummaryDiffLabelMappings. + :type: str + """ # noqa: E501 + self._resource_type = resource_type + + @property + def resource_id(self): + """Get the resource_id of this TemplateSummaryDiffLabelMappings. + + :return: The resource_id of this TemplateSummaryDiffLabelMappings. + :rtype: str + """ # noqa: E501 + return self._resource_id + + @resource_id.setter + def resource_id(self, resource_id): + """Set the resource_id of this TemplateSummaryDiffLabelMappings. + + :param resource_id: The resource_id of this TemplateSummaryDiffLabelMappings. + :type: str + """ # noqa: E501 + self._resource_id = resource_id + + @property + def resource_template_meta_name(self): + """Get the resource_template_meta_name of this TemplateSummaryDiffLabelMappings. + + :return: The resource_template_meta_name of this TemplateSummaryDiffLabelMappings. + :rtype: str + """ # noqa: E501 + return self._resource_template_meta_name + + @resource_template_meta_name.setter + def resource_template_meta_name(self, resource_template_meta_name): + """Set the resource_template_meta_name of this TemplateSummaryDiffLabelMappings. + + :param resource_template_meta_name: The resource_template_meta_name of this TemplateSummaryDiffLabelMappings. + :type: str + """ # noqa: E501 + self._resource_template_meta_name = resource_template_meta_name + + @property + def resource_name(self): + """Get the resource_name of this TemplateSummaryDiffLabelMappings. + + :return: The resource_name of this TemplateSummaryDiffLabelMappings. + :rtype: str + """ # noqa: E501 + return self._resource_name + + @resource_name.setter + def resource_name(self, resource_name): + """Set the resource_name of this TemplateSummaryDiffLabelMappings. + + :param resource_name: The resource_name of this TemplateSummaryDiffLabelMappings. + :type: str + """ # noqa: E501 + self._resource_name = resource_name + + @property + def label_id(self): + """Get the label_id of this TemplateSummaryDiffLabelMappings. + + :return: The label_id of this TemplateSummaryDiffLabelMappings. + :rtype: str + """ # noqa: E501 + return self._label_id + + @label_id.setter + def label_id(self, label_id): + """Set the label_id of this TemplateSummaryDiffLabelMappings. + + :param label_id: The label_id of this TemplateSummaryDiffLabelMappings. + :type: str + """ # noqa: E501 + self._label_id = label_id + + @property + def label_template_meta_name(self): + """Get the label_template_meta_name of this TemplateSummaryDiffLabelMappings. + + :return: The label_template_meta_name of this TemplateSummaryDiffLabelMappings. + :rtype: str + """ # noqa: E501 + return self._label_template_meta_name + + @label_template_meta_name.setter + def label_template_meta_name(self, label_template_meta_name): + """Set the label_template_meta_name of this TemplateSummaryDiffLabelMappings. + + :param label_template_meta_name: The label_template_meta_name of this TemplateSummaryDiffLabelMappings. + :type: str + """ # noqa: E501 + self._label_template_meta_name = label_template_meta_name + + @property + def label_name(self): + """Get the label_name of this TemplateSummaryDiffLabelMappings. + + :return: The label_name of this TemplateSummaryDiffLabelMappings. + :rtype: str + """ # noqa: E501 + return self._label_name + + @label_name.setter + def label_name(self, label_name): + """Set the label_name of this TemplateSummaryDiffLabelMappings. + + :param label_name: The label_name of this TemplateSummaryDiffLabelMappings. + :type: str + """ # noqa: E501 + self._label_name = label_name + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffLabelMappings): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_labels.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_labels.py new file mode 100644 index 0000000..ab6b9c0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_labels.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffLabels(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'state_status': 'str', + 'kind': 'TemplateKind', + 'id': 'str', + 'template_meta_name': 'str', + 'new': 'TemplateSummaryDiffLabelsNewOld', + 'old': 'TemplateSummaryDiffLabelsNewOld' + } + + attribute_map = { + 'state_status': 'stateStatus', + 'kind': 'kind', + 'id': 'id', + 'template_meta_name': 'templateMetaName', + 'new': 'new', + 'old': 'old' + } + + def __init__(self, state_status=None, kind=None, id=None, template_meta_name=None, new=None, old=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffLabels - a model defined in OpenAPI.""" # noqa: E501 + self._state_status = None + self._kind = None + self._id = None + self._template_meta_name = None + self._new = None + self._old = None + self.discriminator = None + + if state_status is not None: + self.state_status = state_status + if kind is not None: + self.kind = kind + if id is not None: + self.id = id + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if new is not None: + self.new = new + if old is not None: + self.old = old + + @property + def state_status(self): + """Get the state_status of this TemplateSummaryDiffLabels. + + :return: The state_status of this TemplateSummaryDiffLabels. + :rtype: str + """ # noqa: E501 + return self._state_status + + @state_status.setter + def state_status(self, state_status): + """Set the state_status of this TemplateSummaryDiffLabels. + + :param state_status: The state_status of this TemplateSummaryDiffLabels. + :type: str + """ # noqa: E501 + self._state_status = state_status + + @property + def kind(self): + """Get the kind of this TemplateSummaryDiffLabels. + + :return: The kind of this TemplateSummaryDiffLabels. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummaryDiffLabels. + + :param kind: The kind of this TemplateSummaryDiffLabels. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def id(self): + """Get the id of this TemplateSummaryDiffLabels. + + :return: The id of this TemplateSummaryDiffLabels. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummaryDiffLabels. + + :param id: The id of this TemplateSummaryDiffLabels. + :type: str + """ # noqa: E501 + self._id = id + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummaryDiffLabels. + + :return: The template_meta_name of this TemplateSummaryDiffLabels. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummaryDiffLabels. + + :param template_meta_name: The template_meta_name of this TemplateSummaryDiffLabels. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def new(self): + """Get the new of this TemplateSummaryDiffLabels. + + :return: The new of this TemplateSummaryDiffLabels. + :rtype: TemplateSummaryDiffLabelsNewOld + """ # noqa: E501 + return self._new + + @new.setter + def new(self, new): + """Set the new of this TemplateSummaryDiffLabels. + + :param new: The new of this TemplateSummaryDiffLabels. + :type: TemplateSummaryDiffLabelsNewOld + """ # noqa: E501 + self._new = new + + @property + def old(self): + """Get the old of this TemplateSummaryDiffLabels. + + :return: The old of this TemplateSummaryDiffLabels. + :rtype: TemplateSummaryDiffLabelsNewOld + """ # noqa: E501 + return self._old + + @old.setter + def old(self, old): + """Set the old of this TemplateSummaryDiffLabels. + + :param old: The old of this TemplateSummaryDiffLabels. + :type: TemplateSummaryDiffLabelsNewOld + """ # noqa: E501 + self._old = old + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffLabels): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_labels_new_old.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_labels_new_old.py new file mode 100644 index 0000000..27e7bdd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_labels_new_old.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffLabelsNewOld(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'color': 'str', + 'description': 'str' + } + + attribute_map = { + 'name': 'name', + 'color': 'color', + 'description': 'description' + } + + def __init__(self, name=None, color=None, description=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffLabelsNewOld - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._color = None + self._description = None + self.discriminator = None + + if name is not None: + self.name = name + if color is not None: + self.color = color + if description is not None: + self.description = description + + @property + def name(self): + """Get the name of this TemplateSummaryDiffLabelsNewOld. + + :return: The name of this TemplateSummaryDiffLabelsNewOld. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummaryDiffLabelsNewOld. + + :param name: The name of this TemplateSummaryDiffLabelsNewOld. + :type: str + """ # noqa: E501 + self._name = name + + @property + def color(self): + """Get the color of this TemplateSummaryDiffLabelsNewOld. + + :return: The color of this TemplateSummaryDiffLabelsNewOld. + :rtype: str + """ # noqa: E501 + return self._color + + @color.setter + def color(self, color): + """Set the color of this TemplateSummaryDiffLabelsNewOld. + + :param color: The color of this TemplateSummaryDiffLabelsNewOld. + :type: str + """ # noqa: E501 + self._color = color + + @property + def description(self): + """Get the description of this TemplateSummaryDiffLabelsNewOld. + + :return: The description of this TemplateSummaryDiffLabelsNewOld. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummaryDiffLabelsNewOld. + + :param description: The description of this TemplateSummaryDiffLabelsNewOld. + :type: str + """ # noqa: E501 + self._description = description + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffLabelsNewOld): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_notification_endpoints.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_notification_endpoints.py new file mode 100644 index 0000000..47dc4ca --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_notification_endpoints.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffNotificationEndpoints(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'state_status': 'str', + 'id': 'str', + 'template_meta_name': 'str', + 'new': 'NotificationEndpointDiscriminator', + 'old': 'NotificationEndpointDiscriminator' + } + + attribute_map = { + 'kind': 'kind', + 'state_status': 'stateStatus', + 'id': 'id', + 'template_meta_name': 'templateMetaName', + 'new': 'new', + 'old': 'old' + } + + def __init__(self, kind=None, state_status=None, id=None, template_meta_name=None, new=None, old=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffNotificationEndpoints - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._state_status = None + self._id = None + self._template_meta_name = None + self._new = None + self._old = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if state_status is not None: + self.state_status = state_status + if id is not None: + self.id = id + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if new is not None: + self.new = new + if old is not None: + self.old = old + + @property + def kind(self): + """Get the kind of this TemplateSummaryDiffNotificationEndpoints. + + :return: The kind of this TemplateSummaryDiffNotificationEndpoints. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummaryDiffNotificationEndpoints. + + :param kind: The kind of this TemplateSummaryDiffNotificationEndpoints. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def state_status(self): + """Get the state_status of this TemplateSummaryDiffNotificationEndpoints. + + :return: The state_status of this TemplateSummaryDiffNotificationEndpoints. + :rtype: str + """ # noqa: E501 + return self._state_status + + @state_status.setter + def state_status(self, state_status): + """Set the state_status of this TemplateSummaryDiffNotificationEndpoints. + + :param state_status: The state_status of this TemplateSummaryDiffNotificationEndpoints. + :type: str + """ # noqa: E501 + self._state_status = state_status + + @property + def id(self): + """Get the id of this TemplateSummaryDiffNotificationEndpoints. + + :return: The id of this TemplateSummaryDiffNotificationEndpoints. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummaryDiffNotificationEndpoints. + + :param id: The id of this TemplateSummaryDiffNotificationEndpoints. + :type: str + """ # noqa: E501 + self._id = id + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummaryDiffNotificationEndpoints. + + :return: The template_meta_name of this TemplateSummaryDiffNotificationEndpoints. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummaryDiffNotificationEndpoints. + + :param template_meta_name: The template_meta_name of this TemplateSummaryDiffNotificationEndpoints. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def new(self): + """Get the new of this TemplateSummaryDiffNotificationEndpoints. + + :return: The new of this TemplateSummaryDiffNotificationEndpoints. + :rtype: NotificationEndpointDiscriminator + """ # noqa: E501 + return self._new + + @new.setter + def new(self, new): + """Set the new of this TemplateSummaryDiffNotificationEndpoints. + + :param new: The new of this TemplateSummaryDiffNotificationEndpoints. + :type: NotificationEndpointDiscriminator + """ # noqa: E501 + self._new = new + + @property + def old(self): + """Get the old of this TemplateSummaryDiffNotificationEndpoints. + + :return: The old of this TemplateSummaryDiffNotificationEndpoints. + :rtype: NotificationEndpointDiscriminator + """ # noqa: E501 + return self._old + + @old.setter + def old(self, old): + """Set the old of this TemplateSummaryDiffNotificationEndpoints. + + :param old: The old of this TemplateSummaryDiffNotificationEndpoints. + :type: NotificationEndpointDiscriminator + """ # noqa: E501 + self._old = old + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffNotificationEndpoints): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_notification_rules.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_notification_rules.py new file mode 100644 index 0000000..cc4ab1e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_notification_rules.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffNotificationRules(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'state_status': 'str', + 'id': 'str', + 'template_meta_name': 'str', + 'new': 'TemplateSummaryDiffNotificationRulesNewOld', + 'old': 'TemplateSummaryDiffNotificationRulesNewOld' + } + + attribute_map = { + 'kind': 'kind', + 'state_status': 'stateStatus', + 'id': 'id', + 'template_meta_name': 'templateMetaName', + 'new': 'new', + 'old': 'old' + } + + def __init__(self, kind=None, state_status=None, id=None, template_meta_name=None, new=None, old=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffNotificationRules - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._state_status = None + self._id = None + self._template_meta_name = None + self._new = None + self._old = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if state_status is not None: + self.state_status = state_status + if id is not None: + self.id = id + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if new is not None: + self.new = new + if old is not None: + self.old = old + + @property + def kind(self): + """Get the kind of this TemplateSummaryDiffNotificationRules. + + :return: The kind of this TemplateSummaryDiffNotificationRules. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummaryDiffNotificationRules. + + :param kind: The kind of this TemplateSummaryDiffNotificationRules. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def state_status(self): + """Get the state_status of this TemplateSummaryDiffNotificationRules. + + :return: The state_status of this TemplateSummaryDiffNotificationRules. + :rtype: str + """ # noqa: E501 + return self._state_status + + @state_status.setter + def state_status(self, state_status): + """Set the state_status of this TemplateSummaryDiffNotificationRules. + + :param state_status: The state_status of this TemplateSummaryDiffNotificationRules. + :type: str + """ # noqa: E501 + self._state_status = state_status + + @property + def id(self): + """Get the id of this TemplateSummaryDiffNotificationRules. + + :return: The id of this TemplateSummaryDiffNotificationRules. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummaryDiffNotificationRules. + + :param id: The id of this TemplateSummaryDiffNotificationRules. + :type: str + """ # noqa: E501 + self._id = id + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummaryDiffNotificationRules. + + :return: The template_meta_name of this TemplateSummaryDiffNotificationRules. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummaryDiffNotificationRules. + + :param template_meta_name: The template_meta_name of this TemplateSummaryDiffNotificationRules. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def new(self): + """Get the new of this TemplateSummaryDiffNotificationRules. + + :return: The new of this TemplateSummaryDiffNotificationRules. + :rtype: TemplateSummaryDiffNotificationRulesNewOld + """ # noqa: E501 + return self._new + + @new.setter + def new(self, new): + """Set the new of this TemplateSummaryDiffNotificationRules. + + :param new: The new of this TemplateSummaryDiffNotificationRules. + :type: TemplateSummaryDiffNotificationRulesNewOld + """ # noqa: E501 + self._new = new + + @property + def old(self): + """Get the old of this TemplateSummaryDiffNotificationRules. + + :return: The old of this TemplateSummaryDiffNotificationRules. + :rtype: TemplateSummaryDiffNotificationRulesNewOld + """ # noqa: E501 + return self._old + + @old.setter + def old(self, old): + """Set the old of this TemplateSummaryDiffNotificationRules. + + :param old: The old of this TemplateSummaryDiffNotificationRules. + :type: TemplateSummaryDiffNotificationRulesNewOld + """ # noqa: E501 + self._old = old + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffNotificationRules): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_notification_rules_new_old.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_notification_rules_new_old.py new file mode 100644 index 0000000..b769118 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_notification_rules_new_old.py @@ -0,0 +1,337 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffNotificationRulesNewOld(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'endpoint_name': 'str', + 'endpoint_id': 'str', + 'endpoint_type': 'str', + 'every': 'str', + 'offset': 'str', + 'message_template': 'str', + 'status': 'str', + 'status_rules': 'list[TemplateSummarySummaryStatusRules]', + 'tag_rules': 'list[TemplateSummarySummaryTagRules]' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'endpoint_name': 'endpointName', + 'endpoint_id': 'endpointID', + 'endpoint_type': 'endpointType', + 'every': 'every', + 'offset': 'offset', + 'message_template': 'messageTemplate', + 'status': 'status', + 'status_rules': 'statusRules', + 'tag_rules': 'tagRules' + } + + def __init__(self, name=None, description=None, endpoint_name=None, endpoint_id=None, endpoint_type=None, every=None, offset=None, message_template=None, status=None, status_rules=None, tag_rules=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffNotificationRulesNewOld - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._endpoint_name = None + self._endpoint_id = None + self._endpoint_type = None + self._every = None + self._offset = None + self._message_template = None + self._status = None + self._status_rules = None + self._tag_rules = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if endpoint_name is not None: + self.endpoint_name = endpoint_name + if endpoint_id is not None: + self.endpoint_id = endpoint_id + if endpoint_type is not None: + self.endpoint_type = endpoint_type + if every is not None: + self.every = every + if offset is not None: + self.offset = offset + if message_template is not None: + self.message_template = message_template + if status is not None: + self.status = status + if status_rules is not None: + self.status_rules = status_rules + if tag_rules is not None: + self.tag_rules = tag_rules + + @property + def name(self): + """Get the name of this TemplateSummaryDiffNotificationRulesNewOld. + + :return: The name of this TemplateSummaryDiffNotificationRulesNewOld. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummaryDiffNotificationRulesNewOld. + + :param name: The name of this TemplateSummaryDiffNotificationRulesNewOld. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this TemplateSummaryDiffNotificationRulesNewOld. + + :return: The description of this TemplateSummaryDiffNotificationRulesNewOld. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummaryDiffNotificationRulesNewOld. + + :param description: The description of this TemplateSummaryDiffNotificationRulesNewOld. + :type: str + """ # noqa: E501 + self._description = description + + @property + def endpoint_name(self): + """Get the endpoint_name of this TemplateSummaryDiffNotificationRulesNewOld. + + :return: The endpoint_name of this TemplateSummaryDiffNotificationRulesNewOld. + :rtype: str + """ # noqa: E501 + return self._endpoint_name + + @endpoint_name.setter + def endpoint_name(self, endpoint_name): + """Set the endpoint_name of this TemplateSummaryDiffNotificationRulesNewOld. + + :param endpoint_name: The endpoint_name of this TemplateSummaryDiffNotificationRulesNewOld. + :type: str + """ # noqa: E501 + self._endpoint_name = endpoint_name + + @property + def endpoint_id(self): + """Get the endpoint_id of this TemplateSummaryDiffNotificationRulesNewOld. + + :return: The endpoint_id of this TemplateSummaryDiffNotificationRulesNewOld. + :rtype: str + """ # noqa: E501 + return self._endpoint_id + + @endpoint_id.setter + def endpoint_id(self, endpoint_id): + """Set the endpoint_id of this TemplateSummaryDiffNotificationRulesNewOld. + + :param endpoint_id: The endpoint_id of this TemplateSummaryDiffNotificationRulesNewOld. + :type: str + """ # noqa: E501 + self._endpoint_id = endpoint_id + + @property + def endpoint_type(self): + """Get the endpoint_type of this TemplateSummaryDiffNotificationRulesNewOld. + + :return: The endpoint_type of this TemplateSummaryDiffNotificationRulesNewOld. + :rtype: str + """ # noqa: E501 + return self._endpoint_type + + @endpoint_type.setter + def endpoint_type(self, endpoint_type): + """Set the endpoint_type of this TemplateSummaryDiffNotificationRulesNewOld. + + :param endpoint_type: The endpoint_type of this TemplateSummaryDiffNotificationRulesNewOld. + :type: str + """ # noqa: E501 + self._endpoint_type = endpoint_type + + @property + def every(self): + """Get the every of this TemplateSummaryDiffNotificationRulesNewOld. + + :return: The every of this TemplateSummaryDiffNotificationRulesNewOld. + :rtype: str + """ # noqa: E501 + return self._every + + @every.setter + def every(self, every): + """Set the every of this TemplateSummaryDiffNotificationRulesNewOld. + + :param every: The every of this TemplateSummaryDiffNotificationRulesNewOld. + :type: str + """ # noqa: E501 + self._every = every + + @property + def offset(self): + """Get the offset of this TemplateSummaryDiffNotificationRulesNewOld. + + :return: The offset of this TemplateSummaryDiffNotificationRulesNewOld. + :rtype: str + """ # noqa: E501 + return self._offset + + @offset.setter + def offset(self, offset): + """Set the offset of this TemplateSummaryDiffNotificationRulesNewOld. + + :param offset: The offset of this TemplateSummaryDiffNotificationRulesNewOld. + :type: str + """ # noqa: E501 + self._offset = offset + + @property + def message_template(self): + """Get the message_template of this TemplateSummaryDiffNotificationRulesNewOld. + + :return: The message_template of this TemplateSummaryDiffNotificationRulesNewOld. + :rtype: str + """ # noqa: E501 + return self._message_template + + @message_template.setter + def message_template(self, message_template): + """Set the message_template of this TemplateSummaryDiffNotificationRulesNewOld. + + :param message_template: The message_template of this TemplateSummaryDiffNotificationRulesNewOld. + :type: str + """ # noqa: E501 + self._message_template = message_template + + @property + def status(self): + """Get the status of this TemplateSummaryDiffNotificationRulesNewOld. + + :return: The status of this TemplateSummaryDiffNotificationRulesNewOld. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this TemplateSummaryDiffNotificationRulesNewOld. + + :param status: The status of this TemplateSummaryDiffNotificationRulesNewOld. + :type: str + """ # noqa: E501 + self._status = status + + @property + def status_rules(self): + """Get the status_rules of this TemplateSummaryDiffNotificationRulesNewOld. + + :return: The status_rules of this TemplateSummaryDiffNotificationRulesNewOld. + :rtype: list[TemplateSummarySummaryStatusRules] + """ # noqa: E501 + return self._status_rules + + @status_rules.setter + def status_rules(self, status_rules): + """Set the status_rules of this TemplateSummaryDiffNotificationRulesNewOld. + + :param status_rules: The status_rules of this TemplateSummaryDiffNotificationRulesNewOld. + :type: list[TemplateSummarySummaryStatusRules] + """ # noqa: E501 + self._status_rules = status_rules + + @property + def tag_rules(self): + """Get the tag_rules of this TemplateSummaryDiffNotificationRulesNewOld. + + :return: The tag_rules of this TemplateSummaryDiffNotificationRulesNewOld. + :rtype: list[TemplateSummarySummaryTagRules] + """ # noqa: E501 + return self._tag_rules + + @tag_rules.setter + def tag_rules(self, tag_rules): + """Set the tag_rules of this TemplateSummaryDiffNotificationRulesNewOld. + + :param tag_rules: The tag_rules of this TemplateSummaryDiffNotificationRulesNewOld. + :type: list[TemplateSummarySummaryTagRules] + """ # noqa: E501 + self._tag_rules = tag_rules + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffNotificationRulesNewOld): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_tasks.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_tasks.py new file mode 100644 index 0000000..8dca893 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_tasks.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffTasks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'state_status': 'str', + 'id': 'str', + 'template_meta_name': 'str', + 'new': 'TemplateSummaryDiffTasksNewOld', + 'old': 'TemplateSummaryDiffTasksNewOld' + } + + attribute_map = { + 'kind': 'kind', + 'state_status': 'stateStatus', + 'id': 'id', + 'template_meta_name': 'templateMetaName', + 'new': 'new', + 'old': 'old' + } + + def __init__(self, kind=None, state_status=None, id=None, template_meta_name=None, new=None, old=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffTasks - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._state_status = None + self._id = None + self._template_meta_name = None + self._new = None + self._old = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if state_status is not None: + self.state_status = state_status + if id is not None: + self.id = id + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if new is not None: + self.new = new + if old is not None: + self.old = old + + @property + def kind(self): + """Get the kind of this TemplateSummaryDiffTasks. + + :return: The kind of this TemplateSummaryDiffTasks. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummaryDiffTasks. + + :param kind: The kind of this TemplateSummaryDiffTasks. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def state_status(self): + """Get the state_status of this TemplateSummaryDiffTasks. + + :return: The state_status of this TemplateSummaryDiffTasks. + :rtype: str + """ # noqa: E501 + return self._state_status + + @state_status.setter + def state_status(self, state_status): + """Set the state_status of this TemplateSummaryDiffTasks. + + :param state_status: The state_status of this TemplateSummaryDiffTasks. + :type: str + """ # noqa: E501 + self._state_status = state_status + + @property + def id(self): + """Get the id of this TemplateSummaryDiffTasks. + + :return: The id of this TemplateSummaryDiffTasks. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummaryDiffTasks. + + :param id: The id of this TemplateSummaryDiffTasks. + :type: str + """ # noqa: E501 + self._id = id + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummaryDiffTasks. + + :return: The template_meta_name of this TemplateSummaryDiffTasks. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummaryDiffTasks. + + :param template_meta_name: The template_meta_name of this TemplateSummaryDiffTasks. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def new(self): + """Get the new of this TemplateSummaryDiffTasks. + + :return: The new of this TemplateSummaryDiffTasks. + :rtype: TemplateSummaryDiffTasksNewOld + """ # noqa: E501 + return self._new + + @new.setter + def new(self, new): + """Set the new of this TemplateSummaryDiffTasks. + + :param new: The new of this TemplateSummaryDiffTasks. + :type: TemplateSummaryDiffTasksNewOld + """ # noqa: E501 + self._new = new + + @property + def old(self): + """Get the old of this TemplateSummaryDiffTasks. + + :return: The old of this TemplateSummaryDiffTasks. + :rtype: TemplateSummaryDiffTasksNewOld + """ # noqa: E501 + return self._old + + @old.setter + def old(self, old): + """Set the old of this TemplateSummaryDiffTasks. + + :param old: The old of this TemplateSummaryDiffTasks. + :type: TemplateSummaryDiffTasksNewOld + """ # noqa: E501 + self._old = old + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffTasks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_tasks_new_old.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_tasks_new_old.py new file mode 100644 index 0000000..57a83e8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_tasks_new_old.py @@ -0,0 +1,245 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffTasksNewOld(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'cron': 'str', + 'description': 'str', + 'every': 'str', + 'offset': 'str', + 'query': 'str', + 'status': 'str' + } + + attribute_map = { + 'name': 'name', + 'cron': 'cron', + 'description': 'description', + 'every': 'every', + 'offset': 'offset', + 'query': 'query', + 'status': 'status' + } + + def __init__(self, name=None, cron=None, description=None, every=None, offset=None, query=None, status=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffTasksNewOld - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._cron = None + self._description = None + self._every = None + self._offset = None + self._query = None + self._status = None + self.discriminator = None + + if name is not None: + self.name = name + if cron is not None: + self.cron = cron + if description is not None: + self.description = description + if every is not None: + self.every = every + if offset is not None: + self.offset = offset + if query is not None: + self.query = query + if status is not None: + self.status = status + + @property + def name(self): + """Get the name of this TemplateSummaryDiffTasksNewOld. + + :return: The name of this TemplateSummaryDiffTasksNewOld. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummaryDiffTasksNewOld. + + :param name: The name of this TemplateSummaryDiffTasksNewOld. + :type: str + """ # noqa: E501 + self._name = name + + @property + def cron(self): + """Get the cron of this TemplateSummaryDiffTasksNewOld. + + :return: The cron of this TemplateSummaryDiffTasksNewOld. + :rtype: str + """ # noqa: E501 + return self._cron + + @cron.setter + def cron(self, cron): + """Set the cron of this TemplateSummaryDiffTasksNewOld. + + :param cron: The cron of this TemplateSummaryDiffTasksNewOld. + :type: str + """ # noqa: E501 + self._cron = cron + + @property + def description(self): + """Get the description of this TemplateSummaryDiffTasksNewOld. + + :return: The description of this TemplateSummaryDiffTasksNewOld. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummaryDiffTasksNewOld. + + :param description: The description of this TemplateSummaryDiffTasksNewOld. + :type: str + """ # noqa: E501 + self._description = description + + @property + def every(self): + """Get the every of this TemplateSummaryDiffTasksNewOld. + + :return: The every of this TemplateSummaryDiffTasksNewOld. + :rtype: str + """ # noqa: E501 + return self._every + + @every.setter + def every(self, every): + """Set the every of this TemplateSummaryDiffTasksNewOld. + + :param every: The every of this TemplateSummaryDiffTasksNewOld. + :type: str + """ # noqa: E501 + self._every = every + + @property + def offset(self): + """Get the offset of this TemplateSummaryDiffTasksNewOld. + + :return: The offset of this TemplateSummaryDiffTasksNewOld. + :rtype: str + """ # noqa: E501 + return self._offset + + @offset.setter + def offset(self, offset): + """Set the offset of this TemplateSummaryDiffTasksNewOld. + + :param offset: The offset of this TemplateSummaryDiffTasksNewOld. + :type: str + """ # noqa: E501 + self._offset = offset + + @property + def query(self): + """Get the query of this TemplateSummaryDiffTasksNewOld. + + :return: The query of this TemplateSummaryDiffTasksNewOld. + :rtype: str + """ # noqa: E501 + return self._query + + @query.setter + def query(self, query): + """Set the query of this TemplateSummaryDiffTasksNewOld. + + :param query: The query of this TemplateSummaryDiffTasksNewOld. + :type: str + """ # noqa: E501 + self._query = query + + @property + def status(self): + """Get the status of this TemplateSummaryDiffTasksNewOld. + + :return: The status of this TemplateSummaryDiffTasksNewOld. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this TemplateSummaryDiffTasksNewOld. + + :param status: The status of this TemplateSummaryDiffTasksNewOld. + :type: str + """ # noqa: E501 + self._status = status + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffTasksNewOld): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_telegraf_configs.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_telegraf_configs.py new file mode 100644 index 0000000..2aa8ef8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_telegraf_configs.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffTelegrafConfigs(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'state_status': 'str', + 'id': 'str', + 'template_meta_name': 'str', + 'new': 'TelegrafRequest', + 'old': 'TelegrafRequest' + } + + attribute_map = { + 'kind': 'kind', + 'state_status': 'stateStatus', + 'id': 'id', + 'template_meta_name': 'templateMetaName', + 'new': 'new', + 'old': 'old' + } + + def __init__(self, kind=None, state_status=None, id=None, template_meta_name=None, new=None, old=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffTelegrafConfigs - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._state_status = None + self._id = None + self._template_meta_name = None + self._new = None + self._old = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if state_status is not None: + self.state_status = state_status + if id is not None: + self.id = id + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if new is not None: + self.new = new + if old is not None: + self.old = old + + @property + def kind(self): + """Get the kind of this TemplateSummaryDiffTelegrafConfigs. + + :return: The kind of this TemplateSummaryDiffTelegrafConfigs. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummaryDiffTelegrafConfigs. + + :param kind: The kind of this TemplateSummaryDiffTelegrafConfigs. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def state_status(self): + """Get the state_status of this TemplateSummaryDiffTelegrafConfigs. + + :return: The state_status of this TemplateSummaryDiffTelegrafConfigs. + :rtype: str + """ # noqa: E501 + return self._state_status + + @state_status.setter + def state_status(self, state_status): + """Set the state_status of this TemplateSummaryDiffTelegrafConfigs. + + :param state_status: The state_status of this TemplateSummaryDiffTelegrafConfigs. + :type: str + """ # noqa: E501 + self._state_status = state_status + + @property + def id(self): + """Get the id of this TemplateSummaryDiffTelegrafConfigs. + + :return: The id of this TemplateSummaryDiffTelegrafConfigs. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummaryDiffTelegrafConfigs. + + :param id: The id of this TemplateSummaryDiffTelegrafConfigs. + :type: str + """ # noqa: E501 + self._id = id + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummaryDiffTelegrafConfigs. + + :return: The template_meta_name of this TemplateSummaryDiffTelegrafConfigs. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummaryDiffTelegrafConfigs. + + :param template_meta_name: The template_meta_name of this TemplateSummaryDiffTelegrafConfigs. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def new(self): + """Get the new of this TemplateSummaryDiffTelegrafConfigs. + + :return: The new of this TemplateSummaryDiffTelegrafConfigs. + :rtype: TelegrafRequest + """ # noqa: E501 + return self._new + + @new.setter + def new(self, new): + """Set the new of this TemplateSummaryDiffTelegrafConfigs. + + :param new: The new of this TemplateSummaryDiffTelegrafConfigs. + :type: TelegrafRequest + """ # noqa: E501 + self._new = new + + @property + def old(self): + """Get the old of this TemplateSummaryDiffTelegrafConfigs. + + :return: The old of this TemplateSummaryDiffTelegrafConfigs. + :rtype: TelegrafRequest + """ # noqa: E501 + return self._old + + @old.setter + def old(self, old): + """Set the old of this TemplateSummaryDiffTelegrafConfigs. + + :param old: The old of this TemplateSummaryDiffTelegrafConfigs. + :type: TelegrafRequest + """ # noqa: E501 + self._old = old + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffTelegrafConfigs): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_variables.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_variables.py new file mode 100644 index 0000000..3d4712e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_variables.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffVariables(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'state_status': 'str', + 'id': 'str', + 'template_meta_name': 'str', + 'new': 'TemplateSummaryDiffVariablesNewOld', + 'old': 'TemplateSummaryDiffVariablesNewOld' + } + + attribute_map = { + 'kind': 'kind', + 'state_status': 'stateStatus', + 'id': 'id', + 'template_meta_name': 'templateMetaName', + 'new': 'new', + 'old': 'old' + } + + def __init__(self, kind=None, state_status=None, id=None, template_meta_name=None, new=None, old=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffVariables - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._state_status = None + self._id = None + self._template_meta_name = None + self._new = None + self._old = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if state_status is not None: + self.state_status = state_status + if id is not None: + self.id = id + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if new is not None: + self.new = new + if old is not None: + self.old = old + + @property + def kind(self): + """Get the kind of this TemplateSummaryDiffVariables. + + :return: The kind of this TemplateSummaryDiffVariables. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummaryDiffVariables. + + :param kind: The kind of this TemplateSummaryDiffVariables. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def state_status(self): + """Get the state_status of this TemplateSummaryDiffVariables. + + :return: The state_status of this TemplateSummaryDiffVariables. + :rtype: str + """ # noqa: E501 + return self._state_status + + @state_status.setter + def state_status(self, state_status): + """Set the state_status of this TemplateSummaryDiffVariables. + + :param state_status: The state_status of this TemplateSummaryDiffVariables. + :type: str + """ # noqa: E501 + self._state_status = state_status + + @property + def id(self): + """Get the id of this TemplateSummaryDiffVariables. + + :return: The id of this TemplateSummaryDiffVariables. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummaryDiffVariables. + + :param id: The id of this TemplateSummaryDiffVariables. + :type: str + """ # noqa: E501 + self._id = id + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummaryDiffVariables. + + :return: The template_meta_name of this TemplateSummaryDiffVariables. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummaryDiffVariables. + + :param template_meta_name: The template_meta_name of this TemplateSummaryDiffVariables. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def new(self): + """Get the new of this TemplateSummaryDiffVariables. + + :return: The new of this TemplateSummaryDiffVariables. + :rtype: TemplateSummaryDiffVariablesNewOld + """ # noqa: E501 + return self._new + + @new.setter + def new(self, new): + """Set the new of this TemplateSummaryDiffVariables. + + :param new: The new of this TemplateSummaryDiffVariables. + :type: TemplateSummaryDiffVariablesNewOld + """ # noqa: E501 + self._new = new + + @property + def old(self): + """Get the old of this TemplateSummaryDiffVariables. + + :return: The old of this TemplateSummaryDiffVariables. + :rtype: TemplateSummaryDiffVariablesNewOld + """ # noqa: E501 + return self._old + + @old.setter + def old(self, old): + """Set the old of this TemplateSummaryDiffVariables. + + :param old: The old of this TemplateSummaryDiffVariables. + :type: TemplateSummaryDiffVariablesNewOld + """ # noqa: E501 + self._old = old + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffVariables): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_variables_new_old.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_variables_new_old.py new file mode 100644 index 0000000..1537e5d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_diff_variables_new_old.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryDiffVariablesNewOld(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'description': 'str', + 'args': 'VariableProperties' + } + + attribute_map = { + 'name': 'name', + 'description': 'description', + 'args': 'args' + } + + def __init__(self, name=None, description=None, args=None): # noqa: E501,D401,D403 + """TemplateSummaryDiffVariablesNewOld - a model defined in OpenAPI.""" # noqa: E501 + self._name = None + self._description = None + self._args = None + self.discriminator = None + + if name is not None: + self.name = name + if description is not None: + self.description = description + if args is not None: + self.args = args + + @property + def name(self): + """Get the name of this TemplateSummaryDiffVariablesNewOld. + + :return: The name of this TemplateSummaryDiffVariablesNewOld. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummaryDiffVariablesNewOld. + + :param name: The name of this TemplateSummaryDiffVariablesNewOld. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this TemplateSummaryDiffVariablesNewOld. + + :return: The description of this TemplateSummaryDiffVariablesNewOld. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummaryDiffVariablesNewOld. + + :param description: The description of this TemplateSummaryDiffVariablesNewOld. + :type: str + """ # noqa: E501 + self._description = description + + @property + def args(self): + """Get the args of this TemplateSummaryDiffVariablesNewOld. + + :return: The args of this TemplateSummaryDiffVariablesNewOld. + :rtype: VariableProperties + """ # noqa: E501 + return self._args + + @args.setter + def args(self, args): + """Set the args of this TemplateSummaryDiffVariablesNewOld. + + :param args: The args of this TemplateSummaryDiffVariablesNewOld. + :type: VariableProperties + """ # noqa: E501 + self._args = args + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryDiffVariablesNewOld): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_errors.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_errors.py new file mode 100644 index 0000000..3d41ef4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_errors.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryErrors(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'reason': 'str', + 'fields': 'list[str]', + 'indexes': 'list[int]' + } + + attribute_map = { + 'kind': 'kind', + 'reason': 'reason', + 'fields': 'fields', + 'indexes': 'indexes' + } + + def __init__(self, kind=None, reason=None, fields=None, indexes=None): # noqa: E501,D401,D403 + """TemplateSummaryErrors - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._reason = None + self._fields = None + self._indexes = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if reason is not None: + self.reason = reason + if fields is not None: + self.fields = fields + if indexes is not None: + self.indexes = indexes + + @property + def kind(self): + """Get the kind of this TemplateSummaryErrors. + + :return: The kind of this TemplateSummaryErrors. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummaryErrors. + + :param kind: The kind of this TemplateSummaryErrors. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def reason(self): + """Get the reason of this TemplateSummaryErrors. + + :return: The reason of this TemplateSummaryErrors. + :rtype: str + """ # noqa: E501 + return self._reason + + @reason.setter + def reason(self, reason): + """Set the reason of this TemplateSummaryErrors. + + :param reason: The reason of this TemplateSummaryErrors. + :type: str + """ # noqa: E501 + self._reason = reason + + @property + def fields(self): + """Get the fields of this TemplateSummaryErrors. + + :return: The fields of this TemplateSummaryErrors. + :rtype: list[str] + """ # noqa: E501 + return self._fields + + @fields.setter + def fields(self, fields): + """Set the fields of this TemplateSummaryErrors. + + :param fields: The fields of this TemplateSummaryErrors. + :type: list[str] + """ # noqa: E501 + self._fields = fields + + @property + def indexes(self): + """Get the indexes of this TemplateSummaryErrors. + + :return: The indexes of this TemplateSummaryErrors. + :rtype: list[int] + """ # noqa: E501 + return self._indexes + + @indexes.setter + def indexes(self, indexes): + """Set the indexes of this TemplateSummaryErrors. + + :param indexes: The indexes of this TemplateSummaryErrors. + :type: list[int] + """ # noqa: E501 + self._indexes = indexes + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryErrors): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_label.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_label.py new file mode 100644 index 0000000..727260a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_label.py @@ -0,0 +1,245 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryLabel(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'org_id': 'str', + 'kind': 'TemplateKind', + 'template_meta_name': 'str', + 'name': 'str', + 'properties': 'TemplateSummaryLabelProperties', + 'env_references': 'list[object]' + } + + attribute_map = { + 'id': 'id', + 'org_id': 'orgID', + 'kind': 'kind', + 'template_meta_name': 'templateMetaName', + 'name': 'name', + 'properties': 'properties', + 'env_references': 'envReferences' + } + + def __init__(self, id=None, org_id=None, kind=None, template_meta_name=None, name=None, properties=None, env_references=None): # noqa: E501,D401,D403 + """TemplateSummaryLabel - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._org_id = None + self._kind = None + self._template_meta_name = None + self._name = None + self._properties = None + self._env_references = None + self.discriminator = None + + if id is not None: + self.id = id + if org_id is not None: + self.org_id = org_id + if kind is not None: + self.kind = kind + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if name is not None: + self.name = name + if properties is not None: + self.properties = properties + if env_references is not None: + self.env_references = env_references + + @property + def id(self): + """Get the id of this TemplateSummaryLabel. + + :return: The id of this TemplateSummaryLabel. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummaryLabel. + + :param id: The id of this TemplateSummaryLabel. + :type: str + """ # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this TemplateSummaryLabel. + + :return: The org_id of this TemplateSummaryLabel. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this TemplateSummaryLabel. + + :param org_id: The org_id of this TemplateSummaryLabel. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def kind(self): + """Get the kind of this TemplateSummaryLabel. + + :return: The kind of this TemplateSummaryLabel. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummaryLabel. + + :param kind: The kind of this TemplateSummaryLabel. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummaryLabel. + + :return: The template_meta_name of this TemplateSummaryLabel. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummaryLabel. + + :param template_meta_name: The template_meta_name of this TemplateSummaryLabel. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def name(self): + """Get the name of this TemplateSummaryLabel. + + :return: The name of this TemplateSummaryLabel. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummaryLabel. + + :param name: The name of this TemplateSummaryLabel. + :type: str + """ # noqa: E501 + self._name = name + + @property + def properties(self): + """Get the properties of this TemplateSummaryLabel. + + :return: The properties of this TemplateSummaryLabel. + :rtype: TemplateSummaryLabelProperties + """ # noqa: E501 + return self._properties + + @properties.setter + def properties(self, properties): + """Set the properties of this TemplateSummaryLabel. + + :param properties: The properties of this TemplateSummaryLabel. + :type: TemplateSummaryLabelProperties + """ # noqa: E501 + self._properties = properties + + @property + def env_references(self): + """Get the env_references of this TemplateSummaryLabel. + + :return: The env_references of this TemplateSummaryLabel. + :rtype: list[object] + """ # noqa: E501 + return self._env_references + + @env_references.setter + def env_references(self, env_references): + """Set the env_references of this TemplateSummaryLabel. + + :param env_references: The env_references of this TemplateSummaryLabel. + :type: list[object] + """ # noqa: E501 + self._env_references = env_references + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryLabel): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_label_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_label_properties.py new file mode 100644 index 0000000..82bab1e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_label_properties.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummaryLabelProperties(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'color': 'str', + 'description': 'str' + } + + attribute_map = { + 'color': 'color', + 'description': 'description' + } + + def __init__(self, color=None, description=None): # noqa: E501,D401,D403 + """TemplateSummaryLabelProperties - a model defined in OpenAPI.""" # noqa: E501 + self._color = None + self._description = None + self.discriminator = None + + if color is not None: + self.color = color + if description is not None: + self.description = description + + @property + def color(self): + """Get the color of this TemplateSummaryLabelProperties. + + :return: The color of this TemplateSummaryLabelProperties. + :rtype: str + """ # noqa: E501 + return self._color + + @color.setter + def color(self, color): + """Set the color of this TemplateSummaryLabelProperties. + + :param color: The color of this TemplateSummaryLabelProperties. + :type: str + """ # noqa: E501 + self._color = color + + @property + def description(self): + """Get the description of this TemplateSummaryLabelProperties. + + :return: The description of this TemplateSummaryLabelProperties. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummaryLabelProperties. + + :param description: The description of this TemplateSummaryLabelProperties. + :type: str + """ # noqa: E501 + self._description = description + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummaryLabelProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary.py new file mode 100644 index 0000000..5ca1002 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary.py @@ -0,0 +1,360 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummarySummary(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'buckets': 'list[TemplateSummarySummaryBuckets]', + 'checks': 'list[CheckDiscriminator]', + 'dashboards': 'list[TemplateSummarySummaryDashboards]', + 'labels': 'list[TemplateSummaryLabel]', + 'label_mappings': 'list[TemplateSummarySummaryLabelMappings]', + 'missing_env_refs': 'list[str]', + 'missing_secrets': 'list[str]', + 'notification_endpoints': 'list[NotificationEndpointDiscriminator]', + 'notification_rules': 'list[TemplateSummarySummaryNotificationRules]', + 'tasks': 'list[TemplateSummarySummaryTasks]', + 'telegraf_configs': 'list[TelegrafRequest]', + 'variables': 'list[TemplateSummarySummaryVariables]' + } + + attribute_map = { + 'buckets': 'buckets', + 'checks': 'checks', + 'dashboards': 'dashboards', + 'labels': 'labels', + 'label_mappings': 'labelMappings', + 'missing_env_refs': 'missingEnvRefs', + 'missing_secrets': 'missingSecrets', + 'notification_endpoints': 'notificationEndpoints', + 'notification_rules': 'notificationRules', + 'tasks': 'tasks', + 'telegraf_configs': 'telegrafConfigs', + 'variables': 'variables' + } + + def __init__(self, buckets=None, checks=None, dashboards=None, labels=None, label_mappings=None, missing_env_refs=None, missing_secrets=None, notification_endpoints=None, notification_rules=None, tasks=None, telegraf_configs=None, variables=None): # noqa: E501,D401,D403 + """TemplateSummarySummary - a model defined in OpenAPI.""" # noqa: E501 + self._buckets = None + self._checks = None + self._dashboards = None + self._labels = None + self._label_mappings = None + self._missing_env_refs = None + self._missing_secrets = None + self._notification_endpoints = None + self._notification_rules = None + self._tasks = None + self._telegraf_configs = None + self._variables = None + self.discriminator = None + + if buckets is not None: + self.buckets = buckets + if checks is not None: + self.checks = checks + if dashboards is not None: + self.dashboards = dashboards + if labels is not None: + self.labels = labels + if label_mappings is not None: + self.label_mappings = label_mappings + if missing_env_refs is not None: + self.missing_env_refs = missing_env_refs + if missing_secrets is not None: + self.missing_secrets = missing_secrets + if notification_endpoints is not None: + self.notification_endpoints = notification_endpoints + if notification_rules is not None: + self.notification_rules = notification_rules + if tasks is not None: + self.tasks = tasks + if telegraf_configs is not None: + self.telegraf_configs = telegraf_configs + if variables is not None: + self.variables = variables + + @property + def buckets(self): + """Get the buckets of this TemplateSummarySummary. + + :return: The buckets of this TemplateSummarySummary. + :rtype: list[TemplateSummarySummaryBuckets] + """ # noqa: E501 + return self._buckets + + @buckets.setter + def buckets(self, buckets): + """Set the buckets of this TemplateSummarySummary. + + :param buckets: The buckets of this TemplateSummarySummary. + :type: list[TemplateSummarySummaryBuckets] + """ # noqa: E501 + self._buckets = buckets + + @property + def checks(self): + """Get the checks of this TemplateSummarySummary. + + :return: The checks of this TemplateSummarySummary. + :rtype: list[CheckDiscriminator] + """ # noqa: E501 + return self._checks + + @checks.setter + def checks(self, checks): + """Set the checks of this TemplateSummarySummary. + + :param checks: The checks of this TemplateSummarySummary. + :type: list[CheckDiscriminator] + """ # noqa: E501 + self._checks = checks + + @property + def dashboards(self): + """Get the dashboards of this TemplateSummarySummary. + + :return: The dashboards of this TemplateSummarySummary. + :rtype: list[TemplateSummarySummaryDashboards] + """ # noqa: E501 + return self._dashboards + + @dashboards.setter + def dashboards(self, dashboards): + """Set the dashboards of this TemplateSummarySummary. + + :param dashboards: The dashboards of this TemplateSummarySummary. + :type: list[TemplateSummarySummaryDashboards] + """ # noqa: E501 + self._dashboards = dashboards + + @property + def labels(self): + """Get the labels of this TemplateSummarySummary. + + :return: The labels of this TemplateSummarySummary. + :rtype: list[TemplateSummaryLabel] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this TemplateSummarySummary. + + :param labels: The labels of this TemplateSummarySummary. + :type: list[TemplateSummaryLabel] + """ # noqa: E501 + self._labels = labels + + @property + def label_mappings(self): + """Get the label_mappings of this TemplateSummarySummary. + + :return: The label_mappings of this TemplateSummarySummary. + :rtype: list[TemplateSummarySummaryLabelMappings] + """ # noqa: E501 + return self._label_mappings + + @label_mappings.setter + def label_mappings(self, label_mappings): + """Set the label_mappings of this TemplateSummarySummary. + + :param label_mappings: The label_mappings of this TemplateSummarySummary. + :type: list[TemplateSummarySummaryLabelMappings] + """ # noqa: E501 + self._label_mappings = label_mappings + + @property + def missing_env_refs(self): + """Get the missing_env_refs of this TemplateSummarySummary. + + :return: The missing_env_refs of this TemplateSummarySummary. + :rtype: list[str] + """ # noqa: E501 + return self._missing_env_refs + + @missing_env_refs.setter + def missing_env_refs(self, missing_env_refs): + """Set the missing_env_refs of this TemplateSummarySummary. + + :param missing_env_refs: The missing_env_refs of this TemplateSummarySummary. + :type: list[str] + """ # noqa: E501 + self._missing_env_refs = missing_env_refs + + @property + def missing_secrets(self): + """Get the missing_secrets of this TemplateSummarySummary. + + :return: The missing_secrets of this TemplateSummarySummary. + :rtype: list[str] + """ # noqa: E501 + return self._missing_secrets + + @missing_secrets.setter + def missing_secrets(self, missing_secrets): + """Set the missing_secrets of this TemplateSummarySummary. + + :param missing_secrets: The missing_secrets of this TemplateSummarySummary. + :type: list[str] + """ # noqa: E501 + self._missing_secrets = missing_secrets + + @property + def notification_endpoints(self): + """Get the notification_endpoints of this TemplateSummarySummary. + + :return: The notification_endpoints of this TemplateSummarySummary. + :rtype: list[NotificationEndpointDiscriminator] + """ # noqa: E501 + return self._notification_endpoints + + @notification_endpoints.setter + def notification_endpoints(self, notification_endpoints): + """Set the notification_endpoints of this TemplateSummarySummary. + + :param notification_endpoints: The notification_endpoints of this TemplateSummarySummary. + :type: list[NotificationEndpointDiscriminator] + """ # noqa: E501 + self._notification_endpoints = notification_endpoints + + @property + def notification_rules(self): + """Get the notification_rules of this TemplateSummarySummary. + + :return: The notification_rules of this TemplateSummarySummary. + :rtype: list[TemplateSummarySummaryNotificationRules] + """ # noqa: E501 + return self._notification_rules + + @notification_rules.setter + def notification_rules(self, notification_rules): + """Set the notification_rules of this TemplateSummarySummary. + + :param notification_rules: The notification_rules of this TemplateSummarySummary. + :type: list[TemplateSummarySummaryNotificationRules] + """ # noqa: E501 + self._notification_rules = notification_rules + + @property + def tasks(self): + """Get the tasks of this TemplateSummarySummary. + + :return: The tasks of this TemplateSummarySummary. + :rtype: list[TemplateSummarySummaryTasks] + """ # noqa: E501 + return self._tasks + + @tasks.setter + def tasks(self, tasks): + """Set the tasks of this TemplateSummarySummary. + + :param tasks: The tasks of this TemplateSummarySummary. + :type: list[TemplateSummarySummaryTasks] + """ # noqa: E501 + self._tasks = tasks + + @property + def telegraf_configs(self): + """Get the telegraf_configs of this TemplateSummarySummary. + + :return: The telegraf_configs of this TemplateSummarySummary. + :rtype: list[TelegrafRequest] + """ # noqa: E501 + return self._telegraf_configs + + @telegraf_configs.setter + def telegraf_configs(self, telegraf_configs): + """Set the telegraf_configs of this TemplateSummarySummary. + + :param telegraf_configs: The telegraf_configs of this TemplateSummarySummary. + :type: list[TelegrafRequest] + """ # noqa: E501 + self._telegraf_configs = telegraf_configs + + @property + def variables(self): + """Get the variables of this TemplateSummarySummary. + + :return: The variables of this TemplateSummarySummary. + :rtype: list[TemplateSummarySummaryVariables] + """ # noqa: E501 + return self._variables + + @variables.setter + def variables(self, variables): + """Set the variables of this TemplateSummarySummary. + + :param variables: The variables of this TemplateSummarySummary. + :type: list[TemplateSummarySummaryVariables] + """ # noqa: E501 + self._variables = variables + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummarySummary): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_buckets.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_buckets.py new file mode 100644 index 0000000..ba43e8c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_buckets.py @@ -0,0 +1,291 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummarySummaryBuckets(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'org_id': 'str', + 'kind': 'TemplateKind', + 'template_meta_name': 'str', + 'name': 'str', + 'description': 'str', + 'retention_period': 'int', + 'label_associations': 'list[TemplateSummaryLabel]', + 'env_references': 'list[object]' + } + + attribute_map = { + 'id': 'id', + 'org_id': 'orgID', + 'kind': 'kind', + 'template_meta_name': 'templateMetaName', + 'name': 'name', + 'description': 'description', + 'retention_period': 'retentionPeriod', + 'label_associations': 'labelAssociations', + 'env_references': 'envReferences' + } + + def __init__(self, id=None, org_id=None, kind=None, template_meta_name=None, name=None, description=None, retention_period=None, label_associations=None, env_references=None): # noqa: E501,D401,D403 + """TemplateSummarySummaryBuckets - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._org_id = None + self._kind = None + self._template_meta_name = None + self._name = None + self._description = None + self._retention_period = None + self._label_associations = None + self._env_references = None + self.discriminator = None + + if id is not None: + self.id = id + if org_id is not None: + self.org_id = org_id + if kind is not None: + self.kind = kind + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if name is not None: + self.name = name + if description is not None: + self.description = description + if retention_period is not None: + self.retention_period = retention_period + if label_associations is not None: + self.label_associations = label_associations + if env_references is not None: + self.env_references = env_references + + @property + def id(self): + """Get the id of this TemplateSummarySummaryBuckets. + + :return: The id of this TemplateSummarySummaryBuckets. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummarySummaryBuckets. + + :param id: The id of this TemplateSummarySummaryBuckets. + :type: str + """ # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this TemplateSummarySummaryBuckets. + + :return: The org_id of this TemplateSummarySummaryBuckets. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this TemplateSummarySummaryBuckets. + + :param org_id: The org_id of this TemplateSummarySummaryBuckets. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def kind(self): + """Get the kind of this TemplateSummarySummaryBuckets. + + :return: The kind of this TemplateSummarySummaryBuckets. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummarySummaryBuckets. + + :param kind: The kind of this TemplateSummarySummaryBuckets. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummarySummaryBuckets. + + :return: The template_meta_name of this TemplateSummarySummaryBuckets. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummarySummaryBuckets. + + :param template_meta_name: The template_meta_name of this TemplateSummarySummaryBuckets. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def name(self): + """Get the name of this TemplateSummarySummaryBuckets. + + :return: The name of this TemplateSummarySummaryBuckets. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummarySummaryBuckets. + + :param name: The name of this TemplateSummarySummaryBuckets. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this TemplateSummarySummaryBuckets. + + :return: The description of this TemplateSummarySummaryBuckets. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummarySummaryBuckets. + + :param description: The description of this TemplateSummarySummaryBuckets. + :type: str + """ # noqa: E501 + self._description = description + + @property + def retention_period(self): + """Get the retention_period of this TemplateSummarySummaryBuckets. + + :return: The retention_period of this TemplateSummarySummaryBuckets. + :rtype: int + """ # noqa: E501 + return self._retention_period + + @retention_period.setter + def retention_period(self, retention_period): + """Set the retention_period of this TemplateSummarySummaryBuckets. + + :param retention_period: The retention_period of this TemplateSummarySummaryBuckets. + :type: int + """ # noqa: E501 + self._retention_period = retention_period + + @property + def label_associations(self): + """Get the label_associations of this TemplateSummarySummaryBuckets. + + :return: The label_associations of this TemplateSummarySummaryBuckets. + :rtype: list[TemplateSummaryLabel] + """ # noqa: E501 + return self._label_associations + + @label_associations.setter + def label_associations(self, label_associations): + """Set the label_associations of this TemplateSummarySummaryBuckets. + + :param label_associations: The label_associations of this TemplateSummarySummaryBuckets. + :type: list[TemplateSummaryLabel] + """ # noqa: E501 + self._label_associations = label_associations + + @property + def env_references(self): + """Get the env_references of this TemplateSummarySummaryBuckets. + + :return: The env_references of this TemplateSummarySummaryBuckets. + :rtype: list[object] + """ # noqa: E501 + return self._env_references + + @env_references.setter + def env_references(self, env_references): + """Set the env_references of this TemplateSummarySummaryBuckets. + + :param env_references: The env_references of this TemplateSummarySummaryBuckets. + :type: list[object] + """ # noqa: E501 + self._env_references = env_references + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummarySummaryBuckets): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_dashboards.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_dashboards.py new file mode 100644 index 0000000..91a5484 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_dashboards.py @@ -0,0 +1,291 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummarySummaryDashboards(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'org_id': 'str', + 'kind': 'TemplateKind', + 'template_meta_name': 'str', + 'name': 'str', + 'description': 'str', + 'label_associations': 'list[TemplateSummaryLabel]', + 'charts': 'list[TemplateChart]', + 'env_references': 'list[object]' + } + + attribute_map = { + 'id': 'id', + 'org_id': 'orgID', + 'kind': 'kind', + 'template_meta_name': 'templateMetaName', + 'name': 'name', + 'description': 'description', + 'label_associations': 'labelAssociations', + 'charts': 'charts', + 'env_references': 'envReferences' + } + + def __init__(self, id=None, org_id=None, kind=None, template_meta_name=None, name=None, description=None, label_associations=None, charts=None, env_references=None): # noqa: E501,D401,D403 + """TemplateSummarySummaryDashboards - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._org_id = None + self._kind = None + self._template_meta_name = None + self._name = None + self._description = None + self._label_associations = None + self._charts = None + self._env_references = None + self.discriminator = None + + if id is not None: + self.id = id + if org_id is not None: + self.org_id = org_id + if kind is not None: + self.kind = kind + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if name is not None: + self.name = name + if description is not None: + self.description = description + if label_associations is not None: + self.label_associations = label_associations + if charts is not None: + self.charts = charts + if env_references is not None: + self.env_references = env_references + + @property + def id(self): + """Get the id of this TemplateSummarySummaryDashboards. + + :return: The id of this TemplateSummarySummaryDashboards. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummarySummaryDashboards. + + :param id: The id of this TemplateSummarySummaryDashboards. + :type: str + """ # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this TemplateSummarySummaryDashboards. + + :return: The org_id of this TemplateSummarySummaryDashboards. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this TemplateSummarySummaryDashboards. + + :param org_id: The org_id of this TemplateSummarySummaryDashboards. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def kind(self): + """Get the kind of this TemplateSummarySummaryDashboards. + + :return: The kind of this TemplateSummarySummaryDashboards. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummarySummaryDashboards. + + :param kind: The kind of this TemplateSummarySummaryDashboards. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummarySummaryDashboards. + + :return: The template_meta_name of this TemplateSummarySummaryDashboards. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummarySummaryDashboards. + + :param template_meta_name: The template_meta_name of this TemplateSummarySummaryDashboards. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def name(self): + """Get the name of this TemplateSummarySummaryDashboards. + + :return: The name of this TemplateSummarySummaryDashboards. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummarySummaryDashboards. + + :param name: The name of this TemplateSummarySummaryDashboards. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this TemplateSummarySummaryDashboards. + + :return: The description of this TemplateSummarySummaryDashboards. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummarySummaryDashboards. + + :param description: The description of this TemplateSummarySummaryDashboards. + :type: str + """ # noqa: E501 + self._description = description + + @property + def label_associations(self): + """Get the label_associations of this TemplateSummarySummaryDashboards. + + :return: The label_associations of this TemplateSummarySummaryDashboards. + :rtype: list[TemplateSummaryLabel] + """ # noqa: E501 + return self._label_associations + + @label_associations.setter + def label_associations(self, label_associations): + """Set the label_associations of this TemplateSummarySummaryDashboards. + + :param label_associations: The label_associations of this TemplateSummarySummaryDashboards. + :type: list[TemplateSummaryLabel] + """ # noqa: E501 + self._label_associations = label_associations + + @property + def charts(self): + """Get the charts of this TemplateSummarySummaryDashboards. + + :return: The charts of this TemplateSummarySummaryDashboards. + :rtype: list[TemplateChart] + """ # noqa: E501 + return self._charts + + @charts.setter + def charts(self, charts): + """Set the charts of this TemplateSummarySummaryDashboards. + + :param charts: The charts of this TemplateSummarySummaryDashboards. + :type: list[TemplateChart] + """ # noqa: E501 + self._charts = charts + + @property + def env_references(self): + """Get the env_references of this TemplateSummarySummaryDashboards. + + :return: The env_references of this TemplateSummarySummaryDashboards. + :rtype: list[object] + """ # noqa: E501 + return self._env_references + + @env_references.setter + def env_references(self, env_references): + """Set the env_references of this TemplateSummarySummaryDashboards. + + :param env_references: The env_references of this TemplateSummarySummaryDashboards. + :type: list[object] + """ # noqa: E501 + self._env_references = env_references + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummarySummaryDashboards): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_label_mappings.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_label_mappings.py new file mode 100644 index 0000000..9051866 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_label_mappings.py @@ -0,0 +1,268 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummarySummaryLabelMappings(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'status': 'str', + 'resource_template_meta_name': 'str', + 'resource_name': 'str', + 'resource_id': 'str', + 'resource_type': 'str', + 'label_template_meta_name': 'str', + 'label_name': 'str', + 'label_id': 'str' + } + + attribute_map = { + 'status': 'status', + 'resource_template_meta_name': 'resourceTemplateMetaName', + 'resource_name': 'resourceName', + 'resource_id': 'resourceID', + 'resource_type': 'resourceType', + 'label_template_meta_name': 'labelTemplateMetaName', + 'label_name': 'labelName', + 'label_id': 'labelID' + } + + def __init__(self, status=None, resource_template_meta_name=None, resource_name=None, resource_id=None, resource_type=None, label_template_meta_name=None, label_name=None, label_id=None): # noqa: E501,D401,D403 + """TemplateSummarySummaryLabelMappings - a model defined in OpenAPI.""" # noqa: E501 + self._status = None + self._resource_template_meta_name = None + self._resource_name = None + self._resource_id = None + self._resource_type = None + self._label_template_meta_name = None + self._label_name = None + self._label_id = None + self.discriminator = None + + if status is not None: + self.status = status + if resource_template_meta_name is not None: + self.resource_template_meta_name = resource_template_meta_name + if resource_name is not None: + self.resource_name = resource_name + if resource_id is not None: + self.resource_id = resource_id + if resource_type is not None: + self.resource_type = resource_type + if label_template_meta_name is not None: + self.label_template_meta_name = label_template_meta_name + if label_name is not None: + self.label_name = label_name + if label_id is not None: + self.label_id = label_id + + @property + def status(self): + """Get the status of this TemplateSummarySummaryLabelMappings. + + :return: The status of this TemplateSummarySummaryLabelMappings. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this TemplateSummarySummaryLabelMappings. + + :param status: The status of this TemplateSummarySummaryLabelMappings. + :type: str + """ # noqa: E501 + self._status = status + + @property + def resource_template_meta_name(self): + """Get the resource_template_meta_name of this TemplateSummarySummaryLabelMappings. + + :return: The resource_template_meta_name of this TemplateSummarySummaryLabelMappings. + :rtype: str + """ # noqa: E501 + return self._resource_template_meta_name + + @resource_template_meta_name.setter + def resource_template_meta_name(self, resource_template_meta_name): + """Set the resource_template_meta_name of this TemplateSummarySummaryLabelMappings. + + :param resource_template_meta_name: The resource_template_meta_name of this TemplateSummarySummaryLabelMappings. + :type: str + """ # noqa: E501 + self._resource_template_meta_name = resource_template_meta_name + + @property + def resource_name(self): + """Get the resource_name of this TemplateSummarySummaryLabelMappings. + + :return: The resource_name of this TemplateSummarySummaryLabelMappings. + :rtype: str + """ # noqa: E501 + return self._resource_name + + @resource_name.setter + def resource_name(self, resource_name): + """Set the resource_name of this TemplateSummarySummaryLabelMappings. + + :param resource_name: The resource_name of this TemplateSummarySummaryLabelMappings. + :type: str + """ # noqa: E501 + self._resource_name = resource_name + + @property + def resource_id(self): + """Get the resource_id of this TemplateSummarySummaryLabelMappings. + + :return: The resource_id of this TemplateSummarySummaryLabelMappings. + :rtype: str + """ # noqa: E501 + return self._resource_id + + @resource_id.setter + def resource_id(self, resource_id): + """Set the resource_id of this TemplateSummarySummaryLabelMappings. + + :param resource_id: The resource_id of this TemplateSummarySummaryLabelMappings. + :type: str + """ # noqa: E501 + self._resource_id = resource_id + + @property + def resource_type(self): + """Get the resource_type of this TemplateSummarySummaryLabelMappings. + + :return: The resource_type of this TemplateSummarySummaryLabelMappings. + :rtype: str + """ # noqa: E501 + return self._resource_type + + @resource_type.setter + def resource_type(self, resource_type): + """Set the resource_type of this TemplateSummarySummaryLabelMappings. + + :param resource_type: The resource_type of this TemplateSummarySummaryLabelMappings. + :type: str + """ # noqa: E501 + self._resource_type = resource_type + + @property + def label_template_meta_name(self): + """Get the label_template_meta_name of this TemplateSummarySummaryLabelMappings. + + :return: The label_template_meta_name of this TemplateSummarySummaryLabelMappings. + :rtype: str + """ # noqa: E501 + return self._label_template_meta_name + + @label_template_meta_name.setter + def label_template_meta_name(self, label_template_meta_name): + """Set the label_template_meta_name of this TemplateSummarySummaryLabelMappings. + + :param label_template_meta_name: The label_template_meta_name of this TemplateSummarySummaryLabelMappings. + :type: str + """ # noqa: E501 + self._label_template_meta_name = label_template_meta_name + + @property + def label_name(self): + """Get the label_name of this TemplateSummarySummaryLabelMappings. + + :return: The label_name of this TemplateSummarySummaryLabelMappings. + :rtype: str + """ # noqa: E501 + return self._label_name + + @label_name.setter + def label_name(self, label_name): + """Set the label_name of this TemplateSummarySummaryLabelMappings. + + :param label_name: The label_name of this TemplateSummarySummaryLabelMappings. + :type: str + """ # noqa: E501 + self._label_name = label_name + + @property + def label_id(self): + """Get the label_id of this TemplateSummarySummaryLabelMappings. + + :return: The label_id of this TemplateSummarySummaryLabelMappings. + :rtype: str + """ # noqa: E501 + return self._label_id + + @label_id.setter + def label_id(self, label_id): + """Set the label_id of this TemplateSummarySummaryLabelMappings. + + :param label_id: The label_id of this TemplateSummarySummaryLabelMappings. + :type: str + """ # noqa: E501 + self._label_id = label_id + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummarySummaryLabelMappings): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_notification_rules.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_notification_rules.py new file mode 100644 index 0000000..63168ed --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_notification_rules.py @@ -0,0 +1,429 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummarySummaryNotificationRules(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'template_meta_name': 'str', + 'name': 'str', + 'description': 'str', + 'endpoint_template_meta_name': 'str', + 'endpoint_id': 'str', + 'endpoint_type': 'str', + 'every': 'str', + 'offset': 'str', + 'message_template': 'str', + 'status': 'str', + 'status_rules': 'list[TemplateSummarySummaryStatusRules]', + 'tag_rules': 'list[TemplateSummarySummaryTagRules]', + 'label_associations': 'list[TemplateSummaryLabel]', + 'env_references': 'list[object]' + } + + attribute_map = { + 'kind': 'kind', + 'template_meta_name': 'templateMetaName', + 'name': 'name', + 'description': 'description', + 'endpoint_template_meta_name': 'endpointTemplateMetaName', + 'endpoint_id': 'endpointID', + 'endpoint_type': 'endpointType', + 'every': 'every', + 'offset': 'offset', + 'message_template': 'messageTemplate', + 'status': 'status', + 'status_rules': 'statusRules', + 'tag_rules': 'tagRules', + 'label_associations': 'labelAssociations', + 'env_references': 'envReferences' + } + + def __init__(self, kind=None, template_meta_name=None, name=None, description=None, endpoint_template_meta_name=None, endpoint_id=None, endpoint_type=None, every=None, offset=None, message_template=None, status=None, status_rules=None, tag_rules=None, label_associations=None, env_references=None): # noqa: E501,D401,D403 + """TemplateSummarySummaryNotificationRules - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._template_meta_name = None + self._name = None + self._description = None + self._endpoint_template_meta_name = None + self._endpoint_id = None + self._endpoint_type = None + self._every = None + self._offset = None + self._message_template = None + self._status = None + self._status_rules = None + self._tag_rules = None + self._label_associations = None + self._env_references = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if name is not None: + self.name = name + if description is not None: + self.description = description + if endpoint_template_meta_name is not None: + self.endpoint_template_meta_name = endpoint_template_meta_name + if endpoint_id is not None: + self.endpoint_id = endpoint_id + if endpoint_type is not None: + self.endpoint_type = endpoint_type + if every is not None: + self.every = every + if offset is not None: + self.offset = offset + if message_template is not None: + self.message_template = message_template + if status is not None: + self.status = status + if status_rules is not None: + self.status_rules = status_rules + if tag_rules is not None: + self.tag_rules = tag_rules + if label_associations is not None: + self.label_associations = label_associations + if env_references is not None: + self.env_references = env_references + + @property + def kind(self): + """Get the kind of this TemplateSummarySummaryNotificationRules. + + :return: The kind of this TemplateSummarySummaryNotificationRules. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummarySummaryNotificationRules. + + :param kind: The kind of this TemplateSummarySummaryNotificationRules. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummarySummaryNotificationRules. + + :return: The template_meta_name of this TemplateSummarySummaryNotificationRules. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummarySummaryNotificationRules. + + :param template_meta_name: The template_meta_name of this TemplateSummarySummaryNotificationRules. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def name(self): + """Get the name of this TemplateSummarySummaryNotificationRules. + + :return: The name of this TemplateSummarySummaryNotificationRules. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummarySummaryNotificationRules. + + :param name: The name of this TemplateSummarySummaryNotificationRules. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this TemplateSummarySummaryNotificationRules. + + :return: The description of this TemplateSummarySummaryNotificationRules. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummarySummaryNotificationRules. + + :param description: The description of this TemplateSummarySummaryNotificationRules. + :type: str + """ # noqa: E501 + self._description = description + + @property + def endpoint_template_meta_name(self): + """Get the endpoint_template_meta_name of this TemplateSummarySummaryNotificationRules. + + :return: The endpoint_template_meta_name of this TemplateSummarySummaryNotificationRules. + :rtype: str + """ # noqa: E501 + return self._endpoint_template_meta_name + + @endpoint_template_meta_name.setter + def endpoint_template_meta_name(self, endpoint_template_meta_name): + """Set the endpoint_template_meta_name of this TemplateSummarySummaryNotificationRules. + + :param endpoint_template_meta_name: The endpoint_template_meta_name of this TemplateSummarySummaryNotificationRules. + :type: str + """ # noqa: E501 + self._endpoint_template_meta_name = endpoint_template_meta_name + + @property + def endpoint_id(self): + """Get the endpoint_id of this TemplateSummarySummaryNotificationRules. + + :return: The endpoint_id of this TemplateSummarySummaryNotificationRules. + :rtype: str + """ # noqa: E501 + return self._endpoint_id + + @endpoint_id.setter + def endpoint_id(self, endpoint_id): + """Set the endpoint_id of this TemplateSummarySummaryNotificationRules. + + :param endpoint_id: The endpoint_id of this TemplateSummarySummaryNotificationRules. + :type: str + """ # noqa: E501 + self._endpoint_id = endpoint_id + + @property + def endpoint_type(self): + """Get the endpoint_type of this TemplateSummarySummaryNotificationRules. + + :return: The endpoint_type of this TemplateSummarySummaryNotificationRules. + :rtype: str + """ # noqa: E501 + return self._endpoint_type + + @endpoint_type.setter + def endpoint_type(self, endpoint_type): + """Set the endpoint_type of this TemplateSummarySummaryNotificationRules. + + :param endpoint_type: The endpoint_type of this TemplateSummarySummaryNotificationRules. + :type: str + """ # noqa: E501 + self._endpoint_type = endpoint_type + + @property + def every(self): + """Get the every of this TemplateSummarySummaryNotificationRules. + + :return: The every of this TemplateSummarySummaryNotificationRules. + :rtype: str + """ # noqa: E501 + return self._every + + @every.setter + def every(self, every): + """Set the every of this TemplateSummarySummaryNotificationRules. + + :param every: The every of this TemplateSummarySummaryNotificationRules. + :type: str + """ # noqa: E501 + self._every = every + + @property + def offset(self): + """Get the offset of this TemplateSummarySummaryNotificationRules. + + :return: The offset of this TemplateSummarySummaryNotificationRules. + :rtype: str + """ # noqa: E501 + return self._offset + + @offset.setter + def offset(self, offset): + """Set the offset of this TemplateSummarySummaryNotificationRules. + + :param offset: The offset of this TemplateSummarySummaryNotificationRules. + :type: str + """ # noqa: E501 + self._offset = offset + + @property + def message_template(self): + """Get the message_template of this TemplateSummarySummaryNotificationRules. + + :return: The message_template of this TemplateSummarySummaryNotificationRules. + :rtype: str + """ # noqa: E501 + return self._message_template + + @message_template.setter + def message_template(self, message_template): + """Set the message_template of this TemplateSummarySummaryNotificationRules. + + :param message_template: The message_template of this TemplateSummarySummaryNotificationRules. + :type: str + """ # noqa: E501 + self._message_template = message_template + + @property + def status(self): + """Get the status of this TemplateSummarySummaryNotificationRules. + + :return: The status of this TemplateSummarySummaryNotificationRules. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this TemplateSummarySummaryNotificationRules. + + :param status: The status of this TemplateSummarySummaryNotificationRules. + :type: str + """ # noqa: E501 + self._status = status + + @property + def status_rules(self): + """Get the status_rules of this TemplateSummarySummaryNotificationRules. + + :return: The status_rules of this TemplateSummarySummaryNotificationRules. + :rtype: list[TemplateSummarySummaryStatusRules] + """ # noqa: E501 + return self._status_rules + + @status_rules.setter + def status_rules(self, status_rules): + """Set the status_rules of this TemplateSummarySummaryNotificationRules. + + :param status_rules: The status_rules of this TemplateSummarySummaryNotificationRules. + :type: list[TemplateSummarySummaryStatusRules] + """ # noqa: E501 + self._status_rules = status_rules + + @property + def tag_rules(self): + """Get the tag_rules of this TemplateSummarySummaryNotificationRules. + + :return: The tag_rules of this TemplateSummarySummaryNotificationRules. + :rtype: list[TemplateSummarySummaryTagRules] + """ # noqa: E501 + return self._tag_rules + + @tag_rules.setter + def tag_rules(self, tag_rules): + """Set the tag_rules of this TemplateSummarySummaryNotificationRules. + + :param tag_rules: The tag_rules of this TemplateSummarySummaryNotificationRules. + :type: list[TemplateSummarySummaryTagRules] + """ # noqa: E501 + self._tag_rules = tag_rules + + @property + def label_associations(self): + """Get the label_associations of this TemplateSummarySummaryNotificationRules. + + :return: The label_associations of this TemplateSummarySummaryNotificationRules. + :rtype: list[TemplateSummaryLabel] + """ # noqa: E501 + return self._label_associations + + @label_associations.setter + def label_associations(self, label_associations): + """Set the label_associations of this TemplateSummarySummaryNotificationRules. + + :param label_associations: The label_associations of this TemplateSummarySummaryNotificationRules. + :type: list[TemplateSummaryLabel] + """ # noqa: E501 + self._label_associations = label_associations + + @property + def env_references(self): + """Get the env_references of this TemplateSummarySummaryNotificationRules. + + :return: The env_references of this TemplateSummarySummaryNotificationRules. + :rtype: list[object] + """ # noqa: E501 + return self._env_references + + @env_references.setter + def env_references(self, env_references): + """Set the env_references of this TemplateSummarySummaryNotificationRules. + + :param env_references: The env_references of this TemplateSummarySummaryNotificationRules. + :type: list[object] + """ # noqa: E501 + self._env_references = env_references + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummarySummaryNotificationRules): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_status_rules.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_status_rules.py new file mode 100644 index 0000000..90a7c5a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_status_rules.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummarySummaryStatusRules(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'current_level': 'str', + 'previous_level': 'str' + } + + attribute_map = { + 'current_level': 'currentLevel', + 'previous_level': 'previousLevel' + } + + def __init__(self, current_level=None, previous_level=None): # noqa: E501,D401,D403 + """TemplateSummarySummaryStatusRules - a model defined in OpenAPI.""" # noqa: E501 + self._current_level = None + self._previous_level = None + self.discriminator = None + + if current_level is not None: + self.current_level = current_level + if previous_level is not None: + self.previous_level = previous_level + + @property + def current_level(self): + """Get the current_level of this TemplateSummarySummaryStatusRules. + + :return: The current_level of this TemplateSummarySummaryStatusRules. + :rtype: str + """ # noqa: E501 + return self._current_level + + @current_level.setter + def current_level(self, current_level): + """Set the current_level of this TemplateSummarySummaryStatusRules. + + :param current_level: The current_level of this TemplateSummarySummaryStatusRules. + :type: str + """ # noqa: E501 + self._current_level = current_level + + @property + def previous_level(self): + """Get the previous_level of this TemplateSummarySummaryStatusRules. + + :return: The previous_level of this TemplateSummarySummaryStatusRules. + :rtype: str + """ # noqa: E501 + return self._previous_level + + @previous_level.setter + def previous_level(self, previous_level): + """Set the previous_level of this TemplateSummarySummaryStatusRules. + + :param previous_level: The previous_level of this TemplateSummarySummaryStatusRules. + :type: str + """ # noqa: E501 + self._previous_level = previous_level + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummarySummaryStatusRules): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_tag_rules.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_tag_rules.py new file mode 100644 index 0000000..3c1a447 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_tag_rules.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummarySummaryTagRules(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'key': 'str', + 'value': 'str', + 'operator': 'str' + } + + attribute_map = { + 'key': 'key', + 'value': 'value', + 'operator': 'operator' + } + + def __init__(self, key=None, value=None, operator=None): # noqa: E501,D401,D403 + """TemplateSummarySummaryTagRules - a model defined in OpenAPI.""" # noqa: E501 + self._key = None + self._value = None + self._operator = None + self.discriminator = None + + if key is not None: + self.key = key + if value is not None: + self.value = value + if operator is not None: + self.operator = operator + + @property + def key(self): + """Get the key of this TemplateSummarySummaryTagRules. + + :return: The key of this TemplateSummarySummaryTagRules. + :rtype: str + """ # noqa: E501 + return self._key + + @key.setter + def key(self, key): + """Set the key of this TemplateSummarySummaryTagRules. + + :param key: The key of this TemplateSummarySummaryTagRules. + :type: str + """ # noqa: E501 + self._key = key + + @property + def value(self): + """Get the value of this TemplateSummarySummaryTagRules. + + :return: The value of this TemplateSummarySummaryTagRules. + :rtype: str + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this TemplateSummarySummaryTagRules. + + :param value: The value of this TemplateSummarySummaryTagRules. + :type: str + """ # noqa: E501 + self._value = value + + @property + def operator(self): + """Get the operator of this TemplateSummarySummaryTagRules. + + :return: The operator of this TemplateSummarySummaryTagRules. + :rtype: str + """ # noqa: E501 + return self._operator + + @operator.setter + def operator(self, operator): + """Set the operator of this TemplateSummarySummaryTagRules. + + :param operator: The operator of this TemplateSummarySummaryTagRules. + :type: str + """ # noqa: E501 + self._operator = operator + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummarySummaryTagRules): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_tasks.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_tasks.py new file mode 100644 index 0000000..7b8db75 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_tasks.py @@ -0,0 +1,337 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummarySummaryTasks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'template_meta_name': 'str', + 'id': 'str', + 'name': 'str', + 'cron': 'str', + 'description': 'str', + 'every': 'str', + 'offset': 'str', + 'query': 'str', + 'status': 'str', + 'env_references': 'list[object]' + } + + attribute_map = { + 'kind': 'kind', + 'template_meta_name': 'templateMetaName', + 'id': 'id', + 'name': 'name', + 'cron': 'cron', + 'description': 'description', + 'every': 'every', + 'offset': 'offset', + 'query': 'query', + 'status': 'status', + 'env_references': 'envReferences' + } + + def __init__(self, kind=None, template_meta_name=None, id=None, name=None, cron=None, description=None, every=None, offset=None, query=None, status=None, env_references=None): # noqa: E501,D401,D403 + """TemplateSummarySummaryTasks - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._template_meta_name = None + self._id = None + self._name = None + self._cron = None + self._description = None + self._every = None + self._offset = None + self._query = None + self._status = None + self._env_references = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if id is not None: + self.id = id + if name is not None: + self.name = name + if cron is not None: + self.cron = cron + if description is not None: + self.description = description + if every is not None: + self.every = every + if offset is not None: + self.offset = offset + if query is not None: + self.query = query + if status is not None: + self.status = status + if env_references is not None: + self.env_references = env_references + + @property + def kind(self): + """Get the kind of this TemplateSummarySummaryTasks. + + :return: The kind of this TemplateSummarySummaryTasks. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummarySummaryTasks. + + :param kind: The kind of this TemplateSummarySummaryTasks. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummarySummaryTasks. + + :return: The template_meta_name of this TemplateSummarySummaryTasks. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummarySummaryTasks. + + :param template_meta_name: The template_meta_name of this TemplateSummarySummaryTasks. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def id(self): + """Get the id of this TemplateSummarySummaryTasks. + + :return: The id of this TemplateSummarySummaryTasks. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummarySummaryTasks. + + :param id: The id of this TemplateSummarySummaryTasks. + :type: str + """ # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this TemplateSummarySummaryTasks. + + :return: The name of this TemplateSummarySummaryTasks. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummarySummaryTasks. + + :param name: The name of this TemplateSummarySummaryTasks. + :type: str + """ # noqa: E501 + self._name = name + + @property + def cron(self): + """Get the cron of this TemplateSummarySummaryTasks. + + :return: The cron of this TemplateSummarySummaryTasks. + :rtype: str + """ # noqa: E501 + return self._cron + + @cron.setter + def cron(self, cron): + """Set the cron of this TemplateSummarySummaryTasks. + + :param cron: The cron of this TemplateSummarySummaryTasks. + :type: str + """ # noqa: E501 + self._cron = cron + + @property + def description(self): + """Get the description of this TemplateSummarySummaryTasks. + + :return: The description of this TemplateSummarySummaryTasks. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummarySummaryTasks. + + :param description: The description of this TemplateSummarySummaryTasks. + :type: str + """ # noqa: E501 + self._description = description + + @property + def every(self): + """Get the every of this TemplateSummarySummaryTasks. + + :return: The every of this TemplateSummarySummaryTasks. + :rtype: str + """ # noqa: E501 + return self._every + + @every.setter + def every(self, every): + """Set the every of this TemplateSummarySummaryTasks. + + :param every: The every of this TemplateSummarySummaryTasks. + :type: str + """ # noqa: E501 + self._every = every + + @property + def offset(self): + """Get the offset of this TemplateSummarySummaryTasks. + + :return: The offset of this TemplateSummarySummaryTasks. + :rtype: str + """ # noqa: E501 + return self._offset + + @offset.setter + def offset(self, offset): + """Set the offset of this TemplateSummarySummaryTasks. + + :param offset: The offset of this TemplateSummarySummaryTasks. + :type: str + """ # noqa: E501 + self._offset = offset + + @property + def query(self): + """Get the query of this TemplateSummarySummaryTasks. + + :return: The query of this TemplateSummarySummaryTasks. + :rtype: str + """ # noqa: E501 + return self._query + + @query.setter + def query(self, query): + """Set the query of this TemplateSummarySummaryTasks. + + :param query: The query of this TemplateSummarySummaryTasks. + :type: str + """ # noqa: E501 + self._query = query + + @property + def status(self): + """Get the status of this TemplateSummarySummaryTasks. + + :return: The status of this TemplateSummarySummaryTasks. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this TemplateSummarySummaryTasks. + + :param status: The status of this TemplateSummarySummaryTasks. + :type: str + """ # noqa: E501 + self._status = status + + @property + def env_references(self): + """Get the env_references of this TemplateSummarySummaryTasks. + + :return: The env_references of this TemplateSummarySummaryTasks. + :rtype: list[object] + """ # noqa: E501 + return self._env_references + + @env_references.setter + def env_references(self, env_references): + """Set the env_references of this TemplateSummarySummaryTasks. + + :param env_references: The env_references of this TemplateSummarySummaryTasks. + :type: list[object] + """ # noqa: E501 + self._env_references = env_references + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummarySummaryTasks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_variables.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_variables.py new file mode 100644 index 0000000..458ea47 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/template_summary_summary_variables.py @@ -0,0 +1,291 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class TemplateSummarySummaryVariables(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'kind': 'TemplateKind', + 'template_meta_name': 'str', + 'id': 'str', + 'org_id': 'str', + 'name': 'str', + 'description': 'str', + 'arguments': 'VariableProperties', + 'label_associations': 'list[TemplateSummaryLabel]', + 'env_references': 'list[object]' + } + + attribute_map = { + 'kind': 'kind', + 'template_meta_name': 'templateMetaName', + 'id': 'id', + 'org_id': 'orgID', + 'name': 'name', + 'description': 'description', + 'arguments': 'arguments', + 'label_associations': 'labelAssociations', + 'env_references': 'envReferences' + } + + def __init__(self, kind=None, template_meta_name=None, id=None, org_id=None, name=None, description=None, arguments=None, label_associations=None, env_references=None): # noqa: E501,D401,D403 + """TemplateSummarySummaryVariables - a model defined in OpenAPI.""" # noqa: E501 + self._kind = None + self._template_meta_name = None + self._id = None + self._org_id = None + self._name = None + self._description = None + self._arguments = None + self._label_associations = None + self._env_references = None + self.discriminator = None + + if kind is not None: + self.kind = kind + if template_meta_name is not None: + self.template_meta_name = template_meta_name + if id is not None: + self.id = id + if org_id is not None: + self.org_id = org_id + if name is not None: + self.name = name + if description is not None: + self.description = description + if arguments is not None: + self.arguments = arguments + if label_associations is not None: + self.label_associations = label_associations + if env_references is not None: + self.env_references = env_references + + @property + def kind(self): + """Get the kind of this TemplateSummarySummaryVariables. + + :return: The kind of this TemplateSummarySummaryVariables. + :rtype: TemplateKind + """ # noqa: E501 + return self._kind + + @kind.setter + def kind(self, kind): + """Set the kind of this TemplateSummarySummaryVariables. + + :param kind: The kind of this TemplateSummarySummaryVariables. + :type: TemplateKind + """ # noqa: E501 + self._kind = kind + + @property + def template_meta_name(self): + """Get the template_meta_name of this TemplateSummarySummaryVariables. + + :return: The template_meta_name of this TemplateSummarySummaryVariables. + :rtype: str + """ # noqa: E501 + return self._template_meta_name + + @template_meta_name.setter + def template_meta_name(self, template_meta_name): + """Set the template_meta_name of this TemplateSummarySummaryVariables. + + :param template_meta_name: The template_meta_name of this TemplateSummarySummaryVariables. + :type: str + """ # noqa: E501 + self._template_meta_name = template_meta_name + + @property + def id(self): + """Get the id of this TemplateSummarySummaryVariables. + + :return: The id of this TemplateSummarySummaryVariables. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this TemplateSummarySummaryVariables. + + :param id: The id of this TemplateSummarySummaryVariables. + :type: str + """ # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this TemplateSummarySummaryVariables. + + :return: The org_id of this TemplateSummarySummaryVariables. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this TemplateSummarySummaryVariables. + + :param org_id: The org_id of this TemplateSummarySummaryVariables. + :type: str + """ # noqa: E501 + self._org_id = org_id + + @property + def name(self): + """Get the name of this TemplateSummarySummaryVariables. + + :return: The name of this TemplateSummarySummaryVariables. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this TemplateSummarySummaryVariables. + + :param name: The name of this TemplateSummarySummaryVariables. + :type: str + """ # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this TemplateSummarySummaryVariables. + + :return: The description of this TemplateSummarySummaryVariables. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this TemplateSummarySummaryVariables. + + :param description: The description of this TemplateSummarySummaryVariables. + :type: str + """ # noqa: E501 + self._description = description + + @property + def arguments(self): + """Get the arguments of this TemplateSummarySummaryVariables. + + :return: The arguments of this TemplateSummarySummaryVariables. + :rtype: VariableProperties + """ # noqa: E501 + return self._arguments + + @arguments.setter + def arguments(self, arguments): + """Set the arguments of this TemplateSummarySummaryVariables. + + :param arguments: The arguments of this TemplateSummarySummaryVariables. + :type: VariableProperties + """ # noqa: E501 + self._arguments = arguments + + @property + def label_associations(self): + """Get the label_associations of this TemplateSummarySummaryVariables. + + :return: The label_associations of this TemplateSummarySummaryVariables. + :rtype: list[TemplateSummaryLabel] + """ # noqa: E501 + return self._label_associations + + @label_associations.setter + def label_associations(self, label_associations): + """Set the label_associations of this TemplateSummarySummaryVariables. + + :param label_associations: The label_associations of this TemplateSummarySummaryVariables. + :type: list[TemplateSummaryLabel] + """ # noqa: E501 + self._label_associations = label_associations + + @property + def env_references(self): + """Get the env_references of this TemplateSummarySummaryVariables. + + :return: The env_references of this TemplateSummarySummaryVariables. + :rtype: list[object] + """ # noqa: E501 + return self._env_references + + @env_references.setter + def env_references(self, env_references): + """Set the env_references of this TemplateSummarySummaryVariables. + + :param env_references: The env_references of this TemplateSummarySummaryVariables. + :type: list[object] + """ # noqa: E501 + self._env_references = env_references + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TemplateSummarySummaryVariables): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/test_statement.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/test_statement.py new file mode 100644 index 0000000..e957536 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/test_statement.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.statement import Statement + + +class TestStatement(Statement): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'assignment': 'VariableAssignment' + } + + attribute_map = { + 'type': 'type', + 'assignment': 'assignment' + } + + def __init__(self, type=None, assignment=None): # noqa: E501,D401,D403 + """TestStatement - a model defined in OpenAPI.""" # noqa: E501 + Statement.__init__(self) # noqa: E501 + + self._type = None + self._assignment = None + self.discriminator = None + + if type is not None: + self.type = type + if assignment is not None: + self.assignment = assignment + + @property + def type(self): + """Get the type of this TestStatement. + + Type of AST node + + :return: The type of this TestStatement. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this TestStatement. + + Type of AST node + + :param type: The type of this TestStatement. + :type: str + """ # noqa: E501 + self._type = type + + @property + def assignment(self): + """Get the assignment of this TestStatement. + + :return: The assignment of this TestStatement. + :rtype: VariableAssignment + """ # noqa: E501 + return self._assignment + + @assignment.setter + def assignment(self, assignment): + """Set the assignment of this TestStatement. + + :param assignment: The assignment of this TestStatement. + :type: VariableAssignment + """ # noqa: E501 + self._assignment = assignment + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, TestStatement): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/threshold.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/threshold.py new file mode 100644 index 0000000..0999dc7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/threshold.py @@ -0,0 +1,120 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Threshold(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + } + + attribute_map = { + 'type': 'type', + } + + discriminator_value_class_map = { + 'greater': 'GreaterThreshold', + 'lesser': 'LesserThreshold', + 'range': 'RangeThreshold' + } + + def __init__(self, type=None): # noqa: E501,D401,D403 + """Threshold - a model defined in OpenAPI.""" # noqa: E501 + self._type = None + self.discriminator = 'type' + + self.type = type + + @property + def type(self): + """Get the type of this Threshold. + + :return: The type of this Threshold. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this Threshold. + + :param type: The type of this Threshold. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + def get_real_child_model(self, data): + """Return the real base class specified by the discriminator.""" + discriminator_key = self.attribute_map[self.discriminator] + discriminator_value = data[discriminator_key] + return self.discriminator_value_class_map.get(discriminator_value) + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Threshold): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/threshold_base.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/threshold_base.py new file mode 100644 index 0000000..dad715c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/threshold_base.py @@ -0,0 +1,134 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ThresholdBase(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'level': 'CheckStatusLevel', + 'all_values': 'bool' + } + + attribute_map = { + 'level': 'level', + 'all_values': 'allValues' + } + + def __init__(self, level=None, all_values=None): # noqa: E501,D401,D403 + """ThresholdBase - a model defined in OpenAPI.""" # noqa: E501 + self._level = None + self._all_values = None + self.discriminator = None + + if level is not None: + self.level = level + if all_values is not None: + self.all_values = all_values + + @property + def level(self): + """Get the level of this ThresholdBase. + + :return: The level of this ThresholdBase. + :rtype: CheckStatusLevel + """ # noqa: E501 + return self._level + + @level.setter + def level(self, level): + """Set the level of this ThresholdBase. + + :param level: The level of this ThresholdBase. + :type: CheckStatusLevel + """ # noqa: E501 + self._level = level + + @property + def all_values(self): + """Get the all_values of this ThresholdBase. + + If true, only alert if all values meet threshold. + + :return: The all_values of this ThresholdBase. + :rtype: bool + """ # noqa: E501 + return self._all_values + + @all_values.setter + def all_values(self, all_values): + """Set the all_values of this ThresholdBase. + + If true, only alert if all values meet threshold. + + :param all_values: The all_values of this ThresholdBase. + :type: bool + """ # noqa: E501 + self._all_values = all_values + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ThresholdBase): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/threshold_check.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/threshold_check.py new file mode 100644 index 0000000..30504cb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/threshold_check.py @@ -0,0 +1,273 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.check_discriminator import CheckDiscriminator + + +class ThresholdCheck(CheckDiscriminator): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'thresholds': 'list[Threshold]', + 'every': 'str', + 'offset': 'str', + 'tags': 'list[object]', + 'status_message_template': 'str', + 'id': 'str', + 'name': 'str', + 'org_id': 'str', + 'task_id': 'str', + 'owner_id': 'str', + 'created_at': 'datetime', + 'updated_at': 'datetime', + 'query': 'DashboardQuery', + 'status': 'TaskStatusType', + 'description': 'str', + 'latest_completed': 'datetime', + 'last_run_status': 'str', + 'last_run_error': 'str', + 'labels': 'list[Label]', + 'links': 'CheckBaseLinks' + } + + attribute_map = { + 'type': 'type', + 'thresholds': 'thresholds', + 'every': 'every', + 'offset': 'offset', + 'tags': 'tags', + 'status_message_template': 'statusMessageTemplate', + 'id': 'id', + 'name': 'name', + 'org_id': 'orgID', + 'task_id': 'taskID', + 'owner_id': 'ownerID', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt', + 'query': 'query', + 'status': 'status', + 'description': 'description', + 'latest_completed': 'latestCompleted', + 'last_run_status': 'lastRunStatus', + 'last_run_error': 'lastRunError', + 'labels': 'labels', + 'links': 'links' + } + + def __init__(self, type="threshold", thresholds=None, every=None, offset=None, tags=None, status_message_template=None, id=None, name=None, org_id=None, task_id=None, owner_id=None, created_at=None, updated_at=None, query=None, status=None, description=None, latest_completed=None, last_run_status=None, last_run_error=None, labels=None, links=None): # noqa: E501,D401,D403 + """ThresholdCheck - a model defined in OpenAPI.""" # noqa: E501 + CheckDiscriminator.__init__(self, id=id, name=name, org_id=org_id, task_id=task_id, owner_id=owner_id, created_at=created_at, updated_at=updated_at, query=query, status=status, description=description, latest_completed=latest_completed, last_run_status=last_run_status, last_run_error=last_run_error, labels=labels, links=links) # noqa: E501 + + self._type = None + self._thresholds = None + self._every = None + self._offset = None + self._tags = None + self._status_message_template = None + self.discriminator = None + + self.type = type + if thresholds is not None: + self.thresholds = thresholds + if every is not None: + self.every = every + if offset is not None: + self.offset = offset + if tags is not None: + self.tags = tags + if status_message_template is not None: + self.status_message_template = status_message_template + + @property + def type(self): + """Get the type of this ThresholdCheck. + + :return: The type of this ThresholdCheck. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this ThresholdCheck. + + :param type: The type of this ThresholdCheck. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def thresholds(self): + """Get the thresholds of this ThresholdCheck. + + :return: The thresholds of this ThresholdCheck. + :rtype: list[Threshold] + """ # noqa: E501 + return self._thresholds + + @thresholds.setter + def thresholds(self, thresholds): + """Set the thresholds of this ThresholdCheck. + + :param thresholds: The thresholds of this ThresholdCheck. + :type: list[Threshold] + """ # noqa: E501 + self._thresholds = thresholds + + @property + def every(self): + """Get the every of this ThresholdCheck. + + Check repetition interval. + + :return: The every of this ThresholdCheck. + :rtype: str + """ # noqa: E501 + return self._every + + @every.setter + def every(self, every): + """Set the every of this ThresholdCheck. + + Check repetition interval. + + :param every: The every of this ThresholdCheck. + :type: str + """ # noqa: E501 + self._every = every + + @property + def offset(self): + """Get the offset of this ThresholdCheck. + + Duration to delay after the schedule, before executing check. + + :return: The offset of this ThresholdCheck. + :rtype: str + """ # noqa: E501 + return self._offset + + @offset.setter + def offset(self, offset): + """Set the offset of this ThresholdCheck. + + Duration to delay after the schedule, before executing check. + + :param offset: The offset of this ThresholdCheck. + :type: str + """ # noqa: E501 + self._offset = offset + + @property + def tags(self): + """Get the tags of this ThresholdCheck. + + List of tags to write to each status. + + :return: The tags of this ThresholdCheck. + :rtype: list[object] + """ # noqa: E501 + return self._tags + + @tags.setter + def tags(self, tags): + """Set the tags of this ThresholdCheck. + + List of tags to write to each status. + + :param tags: The tags of this ThresholdCheck. + :type: list[object] + """ # noqa: E501 + self._tags = tags + + @property + def status_message_template(self): + """Get the status_message_template of this ThresholdCheck. + + The template used to generate and write a status message. + + :return: The status_message_template of this ThresholdCheck. + :rtype: str + """ # noqa: E501 + return self._status_message_template + + @status_message_template.setter + def status_message_template(self, status_message_template): + """Set the status_message_template of this ThresholdCheck. + + The template used to generate and write a status message. + + :param status_message_template: The status_message_template of this ThresholdCheck. + :type: str + """ # noqa: E501 + self._status_message_template = status_message_template + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ThresholdCheck): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/unary_expression.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/unary_expression.py new file mode 100644 index 0000000..0d6fae0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/unary_expression.py @@ -0,0 +1,161 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class UnaryExpression(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'operator': 'str', + 'argument': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'operator': 'operator', + 'argument': 'argument' + } + + def __init__(self, type=None, operator=None, argument=None): # noqa: E501,D401,D403 + """UnaryExpression - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._operator = None + self._argument = None + self.discriminator = None + + if type is not None: + self.type = type + if operator is not None: + self.operator = operator + if argument is not None: + self.argument = argument + + @property + def type(self): + """Get the type of this UnaryExpression. + + Type of AST node + + :return: The type of this UnaryExpression. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this UnaryExpression. + + Type of AST node + + :param type: The type of this UnaryExpression. + :type: str + """ # noqa: E501 + self._type = type + + @property + def operator(self): + """Get the operator of this UnaryExpression. + + :return: The operator of this UnaryExpression. + :rtype: str + """ # noqa: E501 + return self._operator + + @operator.setter + def operator(self, operator): + """Set the operator of this UnaryExpression. + + :param operator: The operator of this UnaryExpression. + :type: str + """ # noqa: E501 + self._operator = operator + + @property + def argument(self): + """Get the argument of this UnaryExpression. + + :return: The argument of this UnaryExpression. + :rtype: Expression + """ # noqa: E501 + return self._argument + + @argument.setter + def argument(self, argument): + """Set the argument of this UnaryExpression. + + :param argument: The argument of this UnaryExpression. + :type: Expression + """ # noqa: E501 + self._argument = argument + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, UnaryExpression): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/unsigned_integer_literal.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/unsigned_integer_literal.py new file mode 100644 index 0000000..b7692a1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/unsigned_integer_literal.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.expression import Expression + + +class UnsignedIntegerLiteral(Expression): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'value': 'str' + } + + attribute_map = { + 'type': 'type', + 'value': 'value' + } + + def __init__(self, type=None, value=None): # noqa: E501,D401,D403 + """UnsignedIntegerLiteral - a model defined in OpenAPI.""" # noqa: E501 + Expression.__init__(self) # noqa: E501 + + self._type = None + self._value = None + self.discriminator = None + + if type is not None: + self.type = type + if value is not None: + self.value = value + + @property + def type(self): + """Get the type of this UnsignedIntegerLiteral. + + Type of AST node + + :return: The type of this UnsignedIntegerLiteral. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this UnsignedIntegerLiteral. + + Type of AST node + + :param type: The type of this UnsignedIntegerLiteral. + :type: str + """ # noqa: E501 + self._type = type + + @property + def value(self): + """Get the value of this UnsignedIntegerLiteral. + + :return: The value of this UnsignedIntegerLiteral. + :rtype: str + """ # noqa: E501 + return self._value + + @value.setter + def value(self, value): + """Set the value of this UnsignedIntegerLiteral. + + :param value: The value of this UnsignedIntegerLiteral. + :type: str + """ # noqa: E501 + self._value = value + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, UnsignedIntegerLiteral): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/user.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/user.py new file mode 100644 index 0000000..6b7a28c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/user.py @@ -0,0 +1,166 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class User(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'status': 'str' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'status': 'status' + } + + def __init__(self, id=None, name=None, status='active'): # noqa: E501,D401,D403 + """User - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._name = None + self._status = None + self.discriminator = None + + if id is not None: + self.id = id + self.name = name + if status is not None: + self.status = status + + @property + def id(self): + """Get the id of this User. + + The user ID. + + :return: The id of this User. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this User. + + The user ID. + + :param id: The id of this User. + :type: str + """ # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this User. + + The user name. + + :return: The name of this User. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this User. + + The user name. + + :param name: The name of this User. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def status(self): + """Get the status of this User. + + If `inactive`, the user is inactive. Default is `active`. + + :return: The status of this User. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this User. + + If `inactive`, the user is inactive. Default is `active`. + + :param status: The status of this User. + :type: str + """ # noqa: E501 + self._status = status + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, User): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/user_response.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/user_response.py new file mode 100644 index 0000000..a4a163f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/user_response.py @@ -0,0 +1,189 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class UserResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'name': 'str', + 'status': 'str', + 'links': 'UserResponseLinks' + } + + attribute_map = { + 'id': 'id', + 'name': 'name', + 'status': 'status', + 'links': 'links' + } + + def __init__(self, id=None, name=None, status='active', links=None): # noqa: E501,D401,D403 + """UserResponse - a model defined in OpenAPI.""" # noqa: E501 + self._id = None + self._name = None + self._status = None + self._links = None + self.discriminator = None + + if id is not None: + self.id = id + self.name = name + if status is not None: + self.status = status + if links is not None: + self.links = links + + @property + def id(self): + """Get the id of this UserResponse. + + The user ID. + + :return: The id of this UserResponse. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this UserResponse. + + The user ID. + + :param id: The id of this UserResponse. + :type: str + """ # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this UserResponse. + + The user name. + + :return: The name of this UserResponse. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this UserResponse. + + The user name. + + :param name: The name of this UserResponse. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def status(self): + """Get the status of this UserResponse. + + The status of a user. An inactive user can't read or write resources. + + :return: The status of this UserResponse. + :rtype: str + """ # noqa: E501 + return self._status + + @status.setter + def status(self, status): + """Set the status of this UserResponse. + + The status of a user. An inactive user can't read or write resources. + + :param status: The status of this UserResponse. + :type: str + """ # noqa: E501 + self._status = status + + @property + def links(self): + """Get the links of this UserResponse. + + :return: The links of this UserResponse. + :rtype: UserResponseLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this UserResponse. + + :param links: The links of this UserResponse. + :type: UserResponseLinks + """ # noqa: E501 + self._links = links + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, UserResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/user_response_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/user_response_links.py new file mode 100644 index 0000000..bf825cf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/user_response_links.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class UserResponseLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str' + } + + attribute_map = { + '_self': 'self' + } + + def __init__(self, _self=None): # noqa: E501,D401,D403 + """UserResponseLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self.discriminator = None + + if _self is not None: + self._self = _self + + @property + def _self(self): + """Get the _self of this UserResponseLinks. + + :return: The _self of this UserResponseLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this UserResponseLinks. + + :param _self: The _self of this UserResponseLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, UserResponseLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/users.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/users.py new file mode 100644 index 0000000..067116c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/users.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Users(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'ResourceMembersLinks', + 'users': 'list[UserResponse]' + } + + attribute_map = { + 'links': 'links', + 'users': 'users' + } + + def __init__(self, links=None, users=None): # noqa: E501,D401,D403 + """Users - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._users = None + self.discriminator = None + + if links is not None: + self.links = links + if users is not None: + self.users = users + + @property + def links(self): + """Get the links of this Users. + + :return: The links of this Users. + :rtype: ResourceMembersLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Users. + + :param links: The links of this Users. + :type: ResourceMembersLinks + """ # noqa: E501 + self._links = links + + @property + def users(self): + """Get the users of this Users. + + :return: The users of this Users. + :rtype: list[UserResponse] + """ # noqa: E501 + return self._users + + @users.setter + def users(self, users): + """Set the users of this Users. + + :param users: The users of this Users. + :type: list[UserResponse] + """ # noqa: E501 + self._users = users + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Users): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable.py new file mode 100644 index 0000000..4c1b280 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable.py @@ -0,0 +1,317 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Variable(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'VariableLinks', + 'id': 'str', + 'org_id': 'str', + 'name': 'str', + 'description': 'str', + 'selected': 'list[str]', + 'labels': 'list[Label]', + 'arguments': 'VariableProperties', + 'created_at': 'datetime', + 'updated_at': 'datetime' + } + + attribute_map = { + 'links': 'links', + 'id': 'id', + 'org_id': 'orgID', + 'name': 'name', + 'description': 'description', + 'selected': 'selected', + 'labels': 'labels', + 'arguments': 'arguments', + 'created_at': 'createdAt', + 'updated_at': 'updatedAt' + } + + def __init__(self, links=None, id=None, org_id=None, name=None, description=None, selected=None, labels=None, arguments=None, created_at=None, updated_at=None): # noqa: E501,D401,D403 + """Variable - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._id = None + self._org_id = None + self._name = None + self._description = None + self._selected = None + self._labels = None + self._arguments = None + self._created_at = None + self._updated_at = None + self.discriminator = None + + if links is not None: + self.links = links + if id is not None: + self.id = id + self.org_id = org_id + self.name = name + if description is not None: + self.description = description + if selected is not None: + self.selected = selected + if labels is not None: + self.labels = labels + self.arguments = arguments + if created_at is not None: + self.created_at = created_at + if updated_at is not None: + self.updated_at = updated_at + + @property + def links(self): + """Get the links of this Variable. + + :return: The links of this Variable. + :rtype: VariableLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Variable. + + :param links: The links of this Variable. + :type: VariableLinks + """ # noqa: E501 + self._links = links + + @property + def id(self): + """Get the id of this Variable. + + :return: The id of this Variable. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this Variable. + + :param id: The id of this Variable. + :type: str + """ # noqa: E501 + self._id = id + + @property + def org_id(self): + """Get the org_id of this Variable. + + :return: The org_id of this Variable. + :rtype: str + """ # noqa: E501 + return self._org_id + + @org_id.setter + def org_id(self, org_id): + """Set the org_id of this Variable. + + :param org_id: The org_id of this Variable. + :type: str + """ # noqa: E501 + if org_id is None: + raise ValueError("Invalid value for `org_id`, must not be `None`") # noqa: E501 + self._org_id = org_id + + @property + def name(self): + """Get the name of this Variable. + + :return: The name of this Variable. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this Variable. + + :param name: The name of this Variable. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def description(self): + """Get the description of this Variable. + + :return: The description of this Variable. + :rtype: str + """ # noqa: E501 + return self._description + + @description.setter + def description(self, description): + """Set the description of this Variable. + + :param description: The description of this Variable. + :type: str + """ # noqa: E501 + self._description = description + + @property + def selected(self): + """Get the selected of this Variable. + + :return: The selected of this Variable. + :rtype: list[str] + """ # noqa: E501 + return self._selected + + @selected.setter + def selected(self, selected): + """Set the selected of this Variable. + + :param selected: The selected of this Variable. + :type: list[str] + """ # noqa: E501 + self._selected = selected + + @property + def labels(self): + """Get the labels of this Variable. + + :return: The labels of this Variable. + :rtype: list[Label] + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this Variable. + + :param labels: The labels of this Variable. + :type: list[Label] + """ # noqa: E501 + self._labels = labels + + @property + def arguments(self): + """Get the arguments of this Variable. + + :return: The arguments of this Variable. + :rtype: VariableProperties + """ # noqa: E501 + return self._arguments + + @arguments.setter + def arguments(self, arguments): + """Set the arguments of this Variable. + + :param arguments: The arguments of this Variable. + :type: VariableProperties + """ # noqa: E501 + if arguments is None: + raise ValueError("Invalid value for `arguments`, must not be `None`") # noqa: E501 + self._arguments = arguments + + @property + def created_at(self): + """Get the created_at of this Variable. + + :return: The created_at of this Variable. + :rtype: datetime + """ # noqa: E501 + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Set the created_at of this Variable. + + :param created_at: The created_at of this Variable. + :type: datetime + """ # noqa: E501 + self._created_at = created_at + + @property + def updated_at(self): + """Get the updated_at of this Variable. + + :return: The updated_at of this Variable. + :rtype: datetime + """ # noqa: E501 + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + """Set the updated_at of this Variable. + + :param updated_at: The updated_at of this Variable. + :type: datetime + """ # noqa: E501 + self._updated_at = updated_at + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Variable): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable_assignment.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable_assignment.py new file mode 100644 index 0000000..c13e549 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable_assignment.py @@ -0,0 +1,161 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.statement import Statement + + +class VariableAssignment(Statement): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'type': 'str', + 'id': 'Identifier', + 'init': 'Expression' + } + + attribute_map = { + 'type': 'type', + 'id': 'id', + 'init': 'init' + } + + def __init__(self, type=None, id=None, init=None): # noqa: E501,D401,D403 + """VariableAssignment - a model defined in OpenAPI.""" # noqa: E501 + Statement.__init__(self) # noqa: E501 + + self._type = None + self._id = None + self._init = None + self.discriminator = None + + if type is not None: + self.type = type + if id is not None: + self.id = id + if init is not None: + self.init = init + + @property + def type(self): + """Get the type of this VariableAssignment. + + Type of AST node + + :return: The type of this VariableAssignment. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this VariableAssignment. + + Type of AST node + + :param type: The type of this VariableAssignment. + :type: str + """ # noqa: E501 + self._type = type + + @property + def id(self): + """Get the id of this VariableAssignment. + + :return: The id of this VariableAssignment. + :rtype: Identifier + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this VariableAssignment. + + :param id: The id of this VariableAssignment. + :type: Identifier + """ # noqa: E501 + self._id = id + + @property + def init(self): + """Get the init of this VariableAssignment. + + :return: The init of this VariableAssignment. + :rtype: Expression + """ # noqa: E501 + return self._init + + @init.setter + def init(self, init): + """Set the init of this VariableAssignment. + + :param init: The init of this VariableAssignment. + :type: Expression + """ # noqa: E501 + self._init = init + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, VariableAssignment): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable_links.py new file mode 100644 index 0000000..44a8237 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable_links.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class VariableLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str', + 'org': 'str', + 'labels': 'str' + } + + attribute_map = { + '_self': 'self', + 'org': 'org', + 'labels': 'labels' + } + + def __init__(self, _self=None, org=None, labels=None): # noqa: E501,D401,D403 + """VariableLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self._org = None + self._labels = None + self.discriminator = None + + if _self is not None: + self._self = _self + if org is not None: + self.org = org + if labels is not None: + self.labels = labels + + @property + def _self(self): + """Get the _self of this VariableLinks. + + :return: The _self of this VariableLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this VariableLinks. + + :param _self: The _self of this VariableLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + @property + def org(self): + """Get the org of this VariableLinks. + + :return: The org of this VariableLinks. + :rtype: str + """ # noqa: E501 + return self._org + + @org.setter + def org(self, org): + """Set the org of this VariableLinks. + + :param org: The org of this VariableLinks. + :type: str + """ # noqa: E501 + self._org = org + + @property + def labels(self): + """Get the labels of this VariableLinks. + + :return: The labels of this VariableLinks. + :rtype: str + """ # noqa: E501 + return self._labels + + @labels.setter + def labels(self, labels): + """Set the labels of this VariableLinks. + + :param labels: The labels of this VariableLinks. + :type: str + """ # noqa: E501 + self._labels = labels + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, VariableLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable_properties.py new file mode 100644 index 0000000..c9d9d15 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/variable_properties.py @@ -0,0 +1,82 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class VariableProperties(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """VariableProperties - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, VariableProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/variables.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/variables.py new file mode 100644 index 0000000..50ebbf3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/variables.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Variables(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'variables': 'list[Variable]' + } + + attribute_map = { + 'variables': 'variables' + } + + def __init__(self, variables=None): # noqa: E501,D401,D403 + """Variables - a model defined in OpenAPI.""" # noqa: E501 + self._variables = None + self.discriminator = None + + if variables is not None: + self.variables = variables + + @property + def variables(self): + """Get the variables of this Variables. + + :return: The variables of this Variables. + :rtype: list[Variable] + """ # noqa: E501 + return self._variables + + @variables.setter + def variables(self, variables): + """Set the variables of this Variables. + + :param variables: The variables of this Variables. + :type: list[Variable] + """ # noqa: E501 + self._variables = variables + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Variables): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/view.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/view.py new file mode 100644 index 0000000..5671253 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/view.py @@ -0,0 +1,178 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class View(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'ViewLinks', + 'id': 'str', + 'name': 'str', + 'properties': 'ViewProperties' + } + + attribute_map = { + 'links': 'links', + 'id': 'id', + 'name': 'name', + 'properties': 'properties' + } + + def __init__(self, links=None, id=None, name=None, properties=None): # noqa: E501,D401,D403 + """View - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._id = None + self._name = None + self._properties = None + self.discriminator = None + + if links is not None: + self.links = links + if id is not None: + self.id = id + self.name = name + self.properties = properties + + @property + def links(self): + """Get the links of this View. + + :return: The links of this View. + :rtype: ViewLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this View. + + :param links: The links of this View. + :type: ViewLinks + """ # noqa: E501 + self._links = links + + @property + def id(self): + """Get the id of this View. + + :return: The id of this View. + :rtype: str + """ # noqa: E501 + return self._id + + @id.setter + def id(self, id): + """Set the id of this View. + + :param id: The id of this View. + :type: str + """ # noqa: E501 + self._id = id + + @property + def name(self): + """Get the name of this View. + + :return: The name of this View. + :rtype: str + """ # noqa: E501 + return self._name + + @name.setter + def name(self, name): + """Set the name of this View. + + :param name: The name of this View. + :type: str + """ # noqa: E501 + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + self._name = name + + @property + def properties(self): + """Get the properties of this View. + + :return: The properties of this View. + :rtype: ViewProperties + """ # noqa: E501 + return self._properties + + @properties.setter + def properties(self, properties): + """Set the properties of this View. + + :param properties: The properties of this View. + :type: ViewProperties + """ # noqa: E501 + if properties is None: + raise ValueError("Invalid value for `properties`, must not be `None`") # noqa: E501 + self._properties = properties + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, View): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/view_links.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/view_links.py new file mode 100644 index 0000000..8705bc1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/view_links.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ViewLinks(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + '_self': 'str' + } + + attribute_map = { + '_self': 'self' + } + + def __init__(self, _self=None): # noqa: E501,D401,D403 + """ViewLinks - a model defined in OpenAPI.""" # noqa: E501 + self.__self = None + self.discriminator = None + + if _self is not None: + self._self = _self + + @property + def _self(self): + """Get the _self of this ViewLinks. + + :return: The _self of this ViewLinks. + :rtype: str + """ # noqa: E501 + return self.__self + + @_self.setter + def _self(self, _self): + """Set the _self of this ViewLinks. + + :param _self: The _self of this ViewLinks. + :type: str + """ # noqa: E501 + self.__self = _self + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ViewLinks): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/view_properties.py new file mode 100644 index 0000000..e08c354 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/view_properties.py @@ -0,0 +1,82 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class ViewProperties(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """ViewProperties - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, ViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/views.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/views.py new file mode 100644 index 0000000..a9bbb64 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/views.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class Views(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'links': 'ViewLinks', + 'views': 'list[View]' + } + + attribute_map = { + 'links': 'links', + 'views': 'views' + } + + def __init__(self, links=None, views=None): # noqa: E501,D401,D403 + """Views - a model defined in OpenAPI.""" # noqa: E501 + self._links = None + self._views = None + self.discriminator = None + + if links is not None: + self.links = links + if views is not None: + self.views = views + + @property + def links(self): + """Get the links of this Views. + + :return: The links of this Views. + :rtype: ViewLinks + """ # noqa: E501 + return self._links + + @links.setter + def links(self, links): + """Set the links of this Views. + + :param links: The links of this Views. + :type: ViewLinks + """ # noqa: E501 + self._links = links + + @property + def views(self): + """Get the views of this Views. + + :return: The views of this Views. + :rtype: list[View] + """ # noqa: E501 + return self._views + + @views.setter + def views(self, views): + """Set the views of this Views. + + :param views: The views of this Views. + :type: list[View] + """ # noqa: E501 + self._views = views + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, Views): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/write_precision.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/write_precision.py new file mode 100644 index 0000000..41a0db9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/write_precision.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class WritePrecision(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + MS = "ms" + S = "s" + US = "us" + NS = "ns" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """WritePrecision - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, WritePrecision): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/xy_geom.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/xy_geom.py new file mode 100644 index 0000000..367806b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/xy_geom.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + + +class XYGeom(object): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + LINE = "line" + STEP = "step" + STACKED = "stacked" + BAR = "bar" + MONOTONEX = "monotoneX" + STEPBEFORE = "stepBefore" + STEPAFTER = "stepAfter" + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self): # noqa: E501,D401,D403 + """XYGeom - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, XYGeom): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/domain/xy_view_properties.py b/myenv/lib/python3.12/site-packages/influxdb_client/domain/xy_view_properties.py new file mode 100644 index 0000000..817967c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/domain/xy_view_properties.py @@ -0,0 +1,776 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +from influxdb_client.domain.view_properties import ViewProperties + + +class XYViewProperties(ViewProperties): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'adaptive_zoom_hide': 'bool', + 'time_format': 'str', + 'type': 'str', + 'queries': 'list[DashboardQuery]', + 'colors': 'list[DashboardColor]', + 'color_mapping': 'dict(str, str)', + 'shape': 'str', + 'note': 'str', + 'show_note_when_empty': 'bool', + 'axes': 'Axes', + 'static_legend': 'StaticLegend', + 'x_column': 'str', + 'generate_x_axis_ticks': 'list[str]', + 'x_total_ticks': 'int', + 'x_tick_start': 'float', + 'x_tick_step': 'float', + 'y_column': 'str', + 'generate_y_axis_ticks': 'list[str]', + 'y_total_ticks': 'int', + 'y_tick_start': 'float', + 'y_tick_step': 'float', + 'shade_below': 'bool', + 'hover_dimension': 'str', + 'position': 'str', + 'geom': 'XYGeom', + 'legend_colorize_rows': 'bool', + 'legend_hide': 'bool', + 'legend_opacity': 'float', + 'legend_orientation_threshold': 'int' + } + + attribute_map = { + 'adaptive_zoom_hide': 'adaptiveZoomHide', + 'time_format': 'timeFormat', + 'type': 'type', + 'queries': 'queries', + 'colors': 'colors', + 'color_mapping': 'colorMapping', + 'shape': 'shape', + 'note': 'note', + 'show_note_when_empty': 'showNoteWhenEmpty', + 'axes': 'axes', + 'static_legend': 'staticLegend', + 'x_column': 'xColumn', + 'generate_x_axis_ticks': 'generateXAxisTicks', + 'x_total_ticks': 'xTotalTicks', + 'x_tick_start': 'xTickStart', + 'x_tick_step': 'xTickStep', + 'y_column': 'yColumn', + 'generate_y_axis_ticks': 'generateYAxisTicks', + 'y_total_ticks': 'yTotalTicks', + 'y_tick_start': 'yTickStart', + 'y_tick_step': 'yTickStep', + 'shade_below': 'shadeBelow', + 'hover_dimension': 'hoverDimension', + 'position': 'position', + 'geom': 'geom', + 'legend_colorize_rows': 'legendColorizeRows', + 'legend_hide': 'legendHide', + 'legend_opacity': 'legendOpacity', + 'legend_orientation_threshold': 'legendOrientationThreshold' + } + + def __init__(self, adaptive_zoom_hide=None, time_format=None, type=None, queries=None, colors=None, color_mapping=None, shape=None, note=None, show_note_when_empty=None, axes=None, static_legend=None, x_column=None, generate_x_axis_ticks=None, x_total_ticks=None, x_tick_start=None, x_tick_step=None, y_column=None, generate_y_axis_ticks=None, y_total_ticks=None, y_tick_start=None, y_tick_step=None, shade_below=None, hover_dimension=None, position=None, geom=None, legend_colorize_rows=None, legend_hide=None, legend_opacity=None, legend_orientation_threshold=None): # noqa: E501,D401,D403 + """XYViewProperties - a model defined in OpenAPI.""" # noqa: E501 + ViewProperties.__init__(self) # noqa: E501 + + self._adaptive_zoom_hide = None + self._time_format = None + self._type = None + self._queries = None + self._colors = None + self._color_mapping = None + self._shape = None + self._note = None + self._show_note_when_empty = None + self._axes = None + self._static_legend = None + self._x_column = None + self._generate_x_axis_ticks = None + self._x_total_ticks = None + self._x_tick_start = None + self._x_tick_step = None + self._y_column = None + self._generate_y_axis_ticks = None + self._y_total_ticks = None + self._y_tick_start = None + self._y_tick_step = None + self._shade_below = None + self._hover_dimension = None + self._position = None + self._geom = None + self._legend_colorize_rows = None + self._legend_hide = None + self._legend_opacity = None + self._legend_orientation_threshold = None + self.discriminator = None + + if adaptive_zoom_hide is not None: + self.adaptive_zoom_hide = adaptive_zoom_hide + if time_format is not None: + self.time_format = time_format + self.type = type + self.queries = queries + self.colors = colors + if color_mapping is not None: + self.color_mapping = color_mapping + self.shape = shape + self.note = note + self.show_note_when_empty = show_note_when_empty + self.axes = axes + if static_legend is not None: + self.static_legend = static_legend + if x_column is not None: + self.x_column = x_column + if generate_x_axis_ticks is not None: + self.generate_x_axis_ticks = generate_x_axis_ticks + if x_total_ticks is not None: + self.x_total_ticks = x_total_ticks + if x_tick_start is not None: + self.x_tick_start = x_tick_start + if x_tick_step is not None: + self.x_tick_step = x_tick_step + if y_column is not None: + self.y_column = y_column + if generate_y_axis_ticks is not None: + self.generate_y_axis_ticks = generate_y_axis_ticks + if y_total_ticks is not None: + self.y_total_ticks = y_total_ticks + if y_tick_start is not None: + self.y_tick_start = y_tick_start + if y_tick_step is not None: + self.y_tick_step = y_tick_step + if shade_below is not None: + self.shade_below = shade_below + if hover_dimension is not None: + self.hover_dimension = hover_dimension + self.position = position + self.geom = geom + if legend_colorize_rows is not None: + self.legend_colorize_rows = legend_colorize_rows + if legend_hide is not None: + self.legend_hide = legend_hide + if legend_opacity is not None: + self.legend_opacity = legend_opacity + if legend_orientation_threshold is not None: + self.legend_orientation_threshold = legend_orientation_threshold + + @property + def adaptive_zoom_hide(self): + """Get the adaptive_zoom_hide of this XYViewProperties. + + :return: The adaptive_zoom_hide of this XYViewProperties. + :rtype: bool + """ # noqa: E501 + return self._adaptive_zoom_hide + + @adaptive_zoom_hide.setter + def adaptive_zoom_hide(self, adaptive_zoom_hide): + """Set the adaptive_zoom_hide of this XYViewProperties. + + :param adaptive_zoom_hide: The adaptive_zoom_hide of this XYViewProperties. + :type: bool + """ # noqa: E501 + self._adaptive_zoom_hide = adaptive_zoom_hide + + @property + def time_format(self): + """Get the time_format of this XYViewProperties. + + :return: The time_format of this XYViewProperties. + :rtype: str + """ # noqa: E501 + return self._time_format + + @time_format.setter + def time_format(self, time_format): + """Set the time_format of this XYViewProperties. + + :param time_format: The time_format of this XYViewProperties. + :type: str + """ # noqa: E501 + self._time_format = time_format + + @property + def type(self): + """Get the type of this XYViewProperties. + + :return: The type of this XYViewProperties. + :rtype: str + """ # noqa: E501 + return self._type + + @type.setter + def type(self, type): + """Set the type of this XYViewProperties. + + :param type: The type of this XYViewProperties. + :type: str + """ # noqa: E501 + if type is None: + raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 + self._type = type + + @property + def queries(self): + """Get the queries of this XYViewProperties. + + :return: The queries of this XYViewProperties. + :rtype: list[DashboardQuery] + """ # noqa: E501 + return self._queries + + @queries.setter + def queries(self, queries): + """Set the queries of this XYViewProperties. + + :param queries: The queries of this XYViewProperties. + :type: list[DashboardQuery] + """ # noqa: E501 + if queries is None: + raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 + self._queries = queries + + @property + def colors(self): + """Get the colors of this XYViewProperties. + + Colors define color encoding of data into a visualization + + :return: The colors of this XYViewProperties. + :rtype: list[DashboardColor] + """ # noqa: E501 + return self._colors + + @colors.setter + def colors(self, colors): + """Set the colors of this XYViewProperties. + + Colors define color encoding of data into a visualization + + :param colors: The colors of this XYViewProperties. + :type: list[DashboardColor] + """ # noqa: E501 + if colors is None: + raise ValueError("Invalid value for `colors`, must not be `None`") # noqa: E501 + self._colors = colors + + @property + def color_mapping(self): + """Get the color_mapping of this XYViewProperties. + + A color mapping is an object that maps time series data to a UI color scheme to allow the UI to render graphs consistent colors across reloads. + + :return: The color_mapping of this XYViewProperties. + :rtype: dict(str, str) + """ # noqa: E501 + return self._color_mapping + + @color_mapping.setter + def color_mapping(self, color_mapping): + """Set the color_mapping of this XYViewProperties. + + A color mapping is an object that maps time series data to a UI color scheme to allow the UI to render graphs consistent colors across reloads. + + :param color_mapping: The color_mapping of this XYViewProperties. + :type: dict(str, str) + """ # noqa: E501 + self._color_mapping = color_mapping + + @property + def shape(self): + """Get the shape of this XYViewProperties. + + :return: The shape of this XYViewProperties. + :rtype: str + """ # noqa: E501 + return self._shape + + @shape.setter + def shape(self, shape): + """Set the shape of this XYViewProperties. + + :param shape: The shape of this XYViewProperties. + :type: str + """ # noqa: E501 + if shape is None: + raise ValueError("Invalid value for `shape`, must not be `None`") # noqa: E501 + self._shape = shape + + @property + def note(self): + """Get the note of this XYViewProperties. + + :return: The note of this XYViewProperties. + :rtype: str + """ # noqa: E501 + return self._note + + @note.setter + def note(self, note): + """Set the note of this XYViewProperties. + + :param note: The note of this XYViewProperties. + :type: str + """ # noqa: E501 + if note is None: + raise ValueError("Invalid value for `note`, must not be `None`") # noqa: E501 + self._note = note + + @property + def show_note_when_empty(self): + """Get the show_note_when_empty of this XYViewProperties. + + If true, will display note when empty + + :return: The show_note_when_empty of this XYViewProperties. + :rtype: bool + """ # noqa: E501 + return self._show_note_when_empty + + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty): + """Set the show_note_when_empty of this XYViewProperties. + + If true, will display note when empty + + :param show_note_when_empty: The show_note_when_empty of this XYViewProperties. + :type: bool + """ # noqa: E501 + if show_note_when_empty is None: + raise ValueError("Invalid value for `show_note_when_empty`, must not be `None`") # noqa: E501 + self._show_note_when_empty = show_note_when_empty + + @property + def axes(self): + """Get the axes of this XYViewProperties. + + :return: The axes of this XYViewProperties. + :rtype: Axes + """ # noqa: E501 + return self._axes + + @axes.setter + def axes(self, axes): + """Set the axes of this XYViewProperties. + + :param axes: The axes of this XYViewProperties. + :type: Axes + """ # noqa: E501 + if axes is None: + raise ValueError("Invalid value for `axes`, must not be `None`") # noqa: E501 + self._axes = axes + + @property + def static_legend(self): + """Get the static_legend of this XYViewProperties. + + :return: The static_legend of this XYViewProperties. + :rtype: StaticLegend + """ # noqa: E501 + return self._static_legend + + @static_legend.setter + def static_legend(self, static_legend): + """Set the static_legend of this XYViewProperties. + + :param static_legend: The static_legend of this XYViewProperties. + :type: StaticLegend + """ # noqa: E501 + self._static_legend = static_legend + + @property + def x_column(self): + """Get the x_column of this XYViewProperties. + + :return: The x_column of this XYViewProperties. + :rtype: str + """ # noqa: E501 + return self._x_column + + @x_column.setter + def x_column(self, x_column): + """Set the x_column of this XYViewProperties. + + :param x_column: The x_column of this XYViewProperties. + :type: str + """ # noqa: E501 + self._x_column = x_column + + @property + def generate_x_axis_ticks(self): + """Get the generate_x_axis_ticks of this XYViewProperties. + + :return: The generate_x_axis_ticks of this XYViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._generate_x_axis_ticks + + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks): + """Set the generate_x_axis_ticks of this XYViewProperties. + + :param generate_x_axis_ticks: The generate_x_axis_ticks of this XYViewProperties. + :type: list[str] + """ # noqa: E501 + self._generate_x_axis_ticks = generate_x_axis_ticks + + @property + def x_total_ticks(self): + """Get the x_total_ticks of this XYViewProperties. + + :return: The x_total_ticks of this XYViewProperties. + :rtype: int + """ # noqa: E501 + return self._x_total_ticks + + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks): + """Set the x_total_ticks of this XYViewProperties. + + :param x_total_ticks: The x_total_ticks of this XYViewProperties. + :type: int + """ # noqa: E501 + self._x_total_ticks = x_total_ticks + + @property + def x_tick_start(self): + """Get the x_tick_start of this XYViewProperties. + + :return: The x_tick_start of this XYViewProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_start + + @x_tick_start.setter + def x_tick_start(self, x_tick_start): + """Set the x_tick_start of this XYViewProperties. + + :param x_tick_start: The x_tick_start of this XYViewProperties. + :type: float + """ # noqa: E501 + self._x_tick_start = x_tick_start + + @property + def x_tick_step(self): + """Get the x_tick_step of this XYViewProperties. + + :return: The x_tick_step of this XYViewProperties. + :rtype: float + """ # noqa: E501 + return self._x_tick_step + + @x_tick_step.setter + def x_tick_step(self, x_tick_step): + """Set the x_tick_step of this XYViewProperties. + + :param x_tick_step: The x_tick_step of this XYViewProperties. + :type: float + """ # noqa: E501 + self._x_tick_step = x_tick_step + + @property + def y_column(self): + """Get the y_column of this XYViewProperties. + + :return: The y_column of this XYViewProperties. + :rtype: str + """ # noqa: E501 + return self._y_column + + @y_column.setter + def y_column(self, y_column): + """Set the y_column of this XYViewProperties. + + :param y_column: The y_column of this XYViewProperties. + :type: str + """ # noqa: E501 + self._y_column = y_column + + @property + def generate_y_axis_ticks(self): + """Get the generate_y_axis_ticks of this XYViewProperties. + + :return: The generate_y_axis_ticks of this XYViewProperties. + :rtype: list[str] + """ # noqa: E501 + return self._generate_y_axis_ticks + + @generate_y_axis_ticks.setter + def generate_y_axis_ticks(self, generate_y_axis_ticks): + """Set the generate_y_axis_ticks of this XYViewProperties. + + :param generate_y_axis_ticks: The generate_y_axis_ticks of this XYViewProperties. + :type: list[str] + """ # noqa: E501 + self._generate_y_axis_ticks = generate_y_axis_ticks + + @property + def y_total_ticks(self): + """Get the y_total_ticks of this XYViewProperties. + + :return: The y_total_ticks of this XYViewProperties. + :rtype: int + """ # noqa: E501 + return self._y_total_ticks + + @y_total_ticks.setter + def y_total_ticks(self, y_total_ticks): + """Set the y_total_ticks of this XYViewProperties. + + :param y_total_ticks: The y_total_ticks of this XYViewProperties. + :type: int + """ # noqa: E501 + self._y_total_ticks = y_total_ticks + + @property + def y_tick_start(self): + """Get the y_tick_start of this XYViewProperties. + + :return: The y_tick_start of this XYViewProperties. + :rtype: float + """ # noqa: E501 + return self._y_tick_start + + @y_tick_start.setter + def y_tick_start(self, y_tick_start): + """Set the y_tick_start of this XYViewProperties. + + :param y_tick_start: The y_tick_start of this XYViewProperties. + :type: float + """ # noqa: E501 + self._y_tick_start = y_tick_start + + @property + def y_tick_step(self): + """Get the y_tick_step of this XYViewProperties. + + :return: The y_tick_step of this XYViewProperties. + :rtype: float + """ # noqa: E501 + return self._y_tick_step + + @y_tick_step.setter + def y_tick_step(self, y_tick_step): + """Set the y_tick_step of this XYViewProperties. + + :param y_tick_step: The y_tick_step of this XYViewProperties. + :type: float + """ # noqa: E501 + self._y_tick_step = y_tick_step + + @property + def shade_below(self): + """Get the shade_below of this XYViewProperties. + + :return: The shade_below of this XYViewProperties. + :rtype: bool + """ # noqa: E501 + return self._shade_below + + @shade_below.setter + def shade_below(self, shade_below): + """Set the shade_below of this XYViewProperties. + + :param shade_below: The shade_below of this XYViewProperties. + :type: bool + """ # noqa: E501 + self._shade_below = shade_below + + @property + def hover_dimension(self): + """Get the hover_dimension of this XYViewProperties. + + :return: The hover_dimension of this XYViewProperties. + :rtype: str + """ # noqa: E501 + return self._hover_dimension + + @hover_dimension.setter + def hover_dimension(self, hover_dimension): + """Set the hover_dimension of this XYViewProperties. + + :param hover_dimension: The hover_dimension of this XYViewProperties. + :type: str + """ # noqa: E501 + self._hover_dimension = hover_dimension + + @property + def position(self): + """Get the position of this XYViewProperties. + + :return: The position of this XYViewProperties. + :rtype: str + """ # noqa: E501 + return self._position + + @position.setter + def position(self, position): + """Set the position of this XYViewProperties. + + :param position: The position of this XYViewProperties. + :type: str + """ # noqa: E501 + if position is None: + raise ValueError("Invalid value for `position`, must not be `None`") # noqa: E501 + self._position = position + + @property + def geom(self): + """Get the geom of this XYViewProperties. + + :return: The geom of this XYViewProperties. + :rtype: XYGeom + """ # noqa: E501 + return self._geom + + @geom.setter + def geom(self, geom): + """Set the geom of this XYViewProperties. + + :param geom: The geom of this XYViewProperties. + :type: XYGeom + """ # noqa: E501 + if geom is None: + raise ValueError("Invalid value for `geom`, must not be `None`") # noqa: E501 + self._geom = geom + + @property + def legend_colorize_rows(self): + """Get the legend_colorize_rows of this XYViewProperties. + + :return: The legend_colorize_rows of this XYViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_colorize_rows + + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows): + """Set the legend_colorize_rows of this XYViewProperties. + + :param legend_colorize_rows: The legend_colorize_rows of this XYViewProperties. + :type: bool + """ # noqa: E501 + self._legend_colorize_rows = legend_colorize_rows + + @property + def legend_hide(self): + """Get the legend_hide of this XYViewProperties. + + :return: The legend_hide of this XYViewProperties. + :rtype: bool + """ # noqa: E501 + return self._legend_hide + + @legend_hide.setter + def legend_hide(self, legend_hide): + """Set the legend_hide of this XYViewProperties. + + :param legend_hide: The legend_hide of this XYViewProperties. + :type: bool + """ # noqa: E501 + self._legend_hide = legend_hide + + @property + def legend_opacity(self): + """Get the legend_opacity of this XYViewProperties. + + :return: The legend_opacity of this XYViewProperties. + :rtype: float + """ # noqa: E501 + return self._legend_opacity + + @legend_opacity.setter + def legend_opacity(self, legend_opacity): + """Set the legend_opacity of this XYViewProperties. + + :param legend_opacity: The legend_opacity of this XYViewProperties. + :type: float + """ # noqa: E501 + self._legend_opacity = legend_opacity + + @property + def legend_orientation_threshold(self): + """Get the legend_orientation_threshold of this XYViewProperties. + + :return: The legend_orientation_threshold of this XYViewProperties. + :rtype: int + """ # noqa: E501 + return self._legend_orientation_threshold + + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold): + """Set the legend_orientation_threshold of this XYViewProperties. + + :param legend_orientation_threshold: The legend_orientation_threshold of this XYViewProperties. + :type: int + """ # noqa: E501 + self._legend_orientation_threshold = legend_orientation_threshold + + def to_dict(self): + """Return the model properties as a dict.""" + result = {} + + for attr, _ in self.openapi_types.items(): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Return the string representation of the model.""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`.""" + return self.to_str() + + def __eq__(self, other): + """Return true if both objects are equal.""" + if not isinstance(other, XYViewProperties): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Return true if both objects are not equal.""" + return not self == other diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/extras.py b/myenv/lib/python3.12/site-packages/influxdb_client/extras.py new file mode 100644 index 0000000..63ceb3b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/extras.py @@ -0,0 +1,13 @@ +"""Extras to selectively import Pandas or NumPy.""" + +try: + import pandas as pd +except ModuleNotFoundError as err: + raise ImportError(f"`query_data_frame` requires Pandas which couldn't be imported due: {err}") + +try: + import numpy as np +except ModuleNotFoundError as err: + raise ImportError(f"`data_frame` requires numpy which couldn't be imported due: {err}") + +__all__ = ['pd', 'np'] diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/py.typed b/myenv/lib/python3.12/site-packages/influxdb_client/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/rest.py b/myenv/lib/python3.12/site-packages/influxdb_client/rest.py new file mode 100644 index 0000000..cd4dbff --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/rest.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + +from __future__ import absolute_import + +import logging +from typing import Dict +from urllib3 import HTTPResponse +from influxdb_client.client.exceptions import InfluxDBError +from influxdb_client.configuration import Configuration + +_UTF_8_encoding = 'utf-8' + + +class ApiException(InfluxDBError): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + Do not edit the class manually. + """ + + def __init__(self, status=None, reason=None, http_resp=None): + """Initialize with HTTP response.""" + super().__init__(response=http_resp) + if http_resp: + self.status = http_resp.status + self.reason = http_resp.reason + self.body = http_resp.data + if isinstance(http_resp, HTTPResponse): # response is HTTPResponse + self.headers = http_resp.headers + else: # response is RESTResponse + self.headers = http_resp.getheaders() + else: + self.status = status + self.reason = reason + self.body = None + self.headers = None + + def __str__(self): + """Get custom error messages for exception.""" + error_message = "({0})\n" \ + "Reason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format( + self.headers) + + if self.body: + error_message += "HTTP response body: {0}\n".format(self.body) + + return error_message + + +class _BaseRESTClient(object): + logger = logging.getLogger('influxdb_client.client.http') + + @staticmethod + def log_request(method: str, url: str): + _BaseRESTClient.logger.debug(f">>> Request: '{method} {url}'") + + @staticmethod + def log_response(status: str): + _BaseRESTClient.logger.debug(f"<<< Response: {status}") + + @staticmethod + def log_body(body: object, prefix: str): + _BaseRESTClient.logger.debug(f"{prefix} Body: {body}") + + @staticmethod + def log_headers(headers: Dict[str, str], prefix: str): + for key, v in headers.items(): + value = v + if 'authorization' == key.lower(): + value = '***' + _BaseRESTClient.logger.debug(f"{prefix} {key}: {value}") + + +def _requires_create_user_session(configuration: Configuration, cookie: str, resource_path: str): + _unauthorized = ['/api/v2/signin', '/api/v2/signout'] + return configuration.username and configuration.password and not cookie and resource_path not in _unauthorized + + +def _requires_expire_user_session(configuration: Configuration, cookie: str): + return configuration.username and configuration.password and cookie diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__init__.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/__init__.py new file mode 100644 index 0000000..0d21a43 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/__init__.py @@ -0,0 +1,56 @@ +# flake8: noqa + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +# import apis into api package +from influxdb_client.service.authorizations_service import AuthorizationsService +from influxdb_client.service.backup_service import BackupService +from influxdb_client.service.bucket_schemas_service import BucketSchemasService +from influxdb_client.service.buckets_service import BucketsService +from influxdb_client.service.cells_service import CellsService +from influxdb_client.service.checks_service import ChecksService +from influxdb_client.service.config_service import ConfigService +from influxdb_client.service.dbr_ps_service import DBRPsService +from influxdb_client.service.dashboards_service import DashboardsService +from influxdb_client.service.delete_service import DeleteService +from influxdb_client.service.health_service import HealthService +from influxdb_client.service.invokable_scripts_service import InvokableScriptsService +from influxdb_client.service.labels_service import LabelsService +from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService +from influxdb_client.service.metrics_service import MetricsService +from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService +from influxdb_client.service.notification_rules_service import NotificationRulesService +from influxdb_client.service.organizations_service import OrganizationsService +from influxdb_client.service.ping_service import PingService +from influxdb_client.service.query_service import QueryService +from influxdb_client.service.ready_service import ReadyService +from influxdb_client.service.remote_connections_service import RemoteConnectionsService +from influxdb_client.service.replications_service import ReplicationsService +from influxdb_client.service.resources_service import ResourcesService +from influxdb_client.service.restore_service import RestoreService +from influxdb_client.service.routes_service import RoutesService +from influxdb_client.service.rules_service import RulesService +from influxdb_client.service.scraper_targets_service import ScraperTargetsService +from influxdb_client.service.secrets_service import SecretsService +from influxdb_client.service.setup_service import SetupService +from influxdb_client.service.signin_service import SigninService +from influxdb_client.service.signout_service import SignoutService +from influxdb_client.service.sources_service import SourcesService +from influxdb_client.service.tasks_service import TasksService +from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService +from influxdb_client.service.telegrafs_service import TelegrafsService +from influxdb_client.service.templates_service import TemplatesService +from influxdb_client.service.users_service import UsersService +from influxdb_client.service.variables_service import VariablesService +from influxdb_client.service.views_service import ViewsService +from influxdb_client.service.write_service import WriteService diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3f84861 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/_base_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/_base_service.cpython-312.pyc new file mode 100644 index 0000000..0d0e23e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/_base_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/authorizations_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/authorizations_service.cpython-312.pyc new file mode 100644 index 0000000..1fe66f6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/authorizations_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/backup_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/backup_service.cpython-312.pyc new file mode 100644 index 0000000..64593f4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/backup_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/bucket_schemas_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/bucket_schemas_service.cpython-312.pyc new file mode 100644 index 0000000..be94da6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/bucket_schemas_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/buckets_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/buckets_service.cpython-312.pyc new file mode 100644 index 0000000..f8a21f2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/buckets_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/cells_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/cells_service.cpython-312.pyc new file mode 100644 index 0000000..85a3b16 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/cells_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/checks_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/checks_service.cpython-312.pyc new file mode 100644 index 0000000..99a67f6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/checks_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/config_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/config_service.cpython-312.pyc new file mode 100644 index 0000000..62ee3fa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/config_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/dashboards_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/dashboards_service.cpython-312.pyc new file mode 100644 index 0000000..17eaffd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/dashboards_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/dbr_ps_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/dbr_ps_service.cpython-312.pyc new file mode 100644 index 0000000..0f31167 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/dbr_ps_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/delete_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/delete_service.cpython-312.pyc new file mode 100644 index 0000000..cad4c94 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/delete_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/health_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/health_service.cpython-312.pyc new file mode 100644 index 0000000..f08abf4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/health_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/invokable_scripts_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/invokable_scripts_service.cpython-312.pyc new file mode 100644 index 0000000..43b8ade Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/invokable_scripts_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/labels_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/labels_service.cpython-312.pyc new file mode 100644 index 0000000..72e9dcb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/labels_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/legacy_authorizations_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/legacy_authorizations_service.cpython-312.pyc new file mode 100644 index 0000000..373a20c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/legacy_authorizations_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/metrics_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/metrics_service.cpython-312.pyc new file mode 100644 index 0000000..d37bccf Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/metrics_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/notification_endpoints_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/notification_endpoints_service.cpython-312.pyc new file mode 100644 index 0000000..1acd855 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/notification_endpoints_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/notification_rules_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/notification_rules_service.cpython-312.pyc new file mode 100644 index 0000000..674f0f4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/notification_rules_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/organizations_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/organizations_service.cpython-312.pyc new file mode 100644 index 0000000..1614a88 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/organizations_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/ping_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/ping_service.cpython-312.pyc new file mode 100644 index 0000000..62ea16b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/ping_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/query_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/query_service.cpython-312.pyc new file mode 100644 index 0000000..27f8e38 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/query_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/ready_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/ready_service.cpython-312.pyc new file mode 100644 index 0000000..72a48c3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/ready_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/remote_connections_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/remote_connections_service.cpython-312.pyc new file mode 100644 index 0000000..39a5715 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/remote_connections_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/replications_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/replications_service.cpython-312.pyc new file mode 100644 index 0000000..6f6d68a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/replications_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/resources_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/resources_service.cpython-312.pyc new file mode 100644 index 0000000..88bca2d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/resources_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/restore_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/restore_service.cpython-312.pyc new file mode 100644 index 0000000..64bca2b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/restore_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/routes_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/routes_service.cpython-312.pyc new file mode 100644 index 0000000..9338340 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/routes_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/rules_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/rules_service.cpython-312.pyc new file mode 100644 index 0000000..827b814 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/rules_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/scraper_targets_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/scraper_targets_service.cpython-312.pyc new file mode 100644 index 0000000..8fa6439 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/scraper_targets_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/secrets_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/secrets_service.cpython-312.pyc new file mode 100644 index 0000000..b72bd89 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/secrets_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/setup_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/setup_service.cpython-312.pyc new file mode 100644 index 0000000..fb518b0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/setup_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/signin_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/signin_service.cpython-312.pyc new file mode 100644 index 0000000..4afbb60 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/signin_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/signout_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/signout_service.cpython-312.pyc new file mode 100644 index 0000000..06725ef Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/signout_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/sources_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/sources_service.cpython-312.pyc new file mode 100644 index 0000000..d19c8be Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/sources_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/tasks_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/tasks_service.cpython-312.pyc new file mode 100644 index 0000000..8e5555a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/tasks_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/telegraf_plugins_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/telegraf_plugins_service.cpython-312.pyc new file mode 100644 index 0000000..9b820d1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/telegraf_plugins_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/telegrafs_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/telegrafs_service.cpython-312.pyc new file mode 100644 index 0000000..e9ccf6b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/telegrafs_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/templates_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/templates_service.cpython-312.pyc new file mode 100644 index 0000000..2753687 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/templates_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/users_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/users_service.cpython-312.pyc new file mode 100644 index 0000000..60d1ac2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/users_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/variables_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/variables_service.cpython-312.pyc new file mode 100644 index 0000000..bac03a2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/variables_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/views_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/views_service.cpython-312.pyc new file mode 100644 index 0000000..ba21060 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/views_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/write_service.cpython-312.pyc b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/write_service.cpython-312.pyc new file mode 100644 index 0000000..77051f6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/influxdb_client/service/__pycache__/write_service.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/_base_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/_base_service.py new file mode 100644 index 0000000..d3e8f99 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/_base_service.py @@ -0,0 +1,67 @@ + + +# noinspection PyMethodMayBeStatic +class _BaseService(object): + + def __init__(self, api_client=None): + """Init common services operation.""" + if api_client is None: + raise ValueError("Invalid value for `api_client`, must be defined.") + self.api_client = api_client + self._build_type = None + + def _check_operation_params(self, operation_id, supported_params, local_params): + supported_params.append('async_req') + supported_params.append('_return_http_data_only') + supported_params.append('_preload_content') + supported_params.append('_request_timeout') + supported_params.append('urlopen_kw') + for key, val in local_params['kwargs'].items(): + if key not in supported_params: + raise TypeError( + f"Got an unexpected keyword argument '{key}'" + f" to method {operation_id}" + ) + local_params[key] = val + del local_params['kwargs'] + + def _is_cloud_instance(self) -> bool: + if not self._build_type: + self._build_type = self.build_type() + return 'cloud' in self._build_type.lower() + + async def _is_cloud_instance_async(self) -> bool: + if not self._build_type: + self._build_type = await self.build_type_async() + return 'cloud' in self._build_type.lower() + + def build_type(self) -> str: + """ + Return the build type of the connected InfluxDB Server. + + :return: The type of InfluxDB build. + """ + from influxdb_client import PingService + ping_service = PingService(self.api_client) + + response = ping_service.get_ping_with_http_info(_return_http_data_only=False) + return self.response_header(response, header_name='X-Influxdb-Build') + + async def build_type_async(self) -> str: + """ + Return the build type of the connected InfluxDB Server. + + :return: The type of InfluxDB build. + """ + from influxdb_client import PingService + ping_service = PingService(self.api_client) + + response = await ping_service.get_ping_async(_return_http_data_only=False) + return self.response_header(response, header_name='X-Influxdb-Build') + + def response_header(self, response, header_name='X-Influxdb-Version') -> str: + if response is not None and len(response) >= 3: + if header_name in response[2]: + return response[2][header_name] + + return "unknown" diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/authorizations_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/authorizations_service.py new file mode 100644 index 0000000..100160e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/authorizations_service.py @@ -0,0 +1,658 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class AuthorizationsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """AuthorizationsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_authorizations_id(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Delete an authorization. + + Deletes an authorization. Use the endpoint to delete an API token. If you want to disable an API token instead of delete it, [update the authorization's status to `inactive`](#operation/PatchAuthorizationsID). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_authorizations_id(auth_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: An authorization ID. Specifies the authorization to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_authorizations_id_with_http_info(auth_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_authorizations_id_with_http_info(auth_id, **kwargs) # noqa: E501 + return data + + def delete_authorizations_id_with_http_info(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Delete an authorization. + + Deletes an authorization. Use the endpoint to delete an API token. If you want to disable an API token instead of delete it, [update the authorization's status to `inactive`](#operation/PatchAuthorizationsID). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_authorizations_id_with_http_info(auth_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: An authorization ID. Specifies the authorization to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_authorizations_id_prepare(auth_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/authorizations/{authID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_authorizations_id_async(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Delete an authorization. + + Deletes an authorization. Use the endpoint to delete an API token. If you want to disable an API token instead of delete it, [update the authorization's status to `inactive`](#operation/PatchAuthorizationsID). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str auth_id: An authorization ID. Specifies the authorization to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_authorizations_id_prepare(auth_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/authorizations/{authID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_authorizations_id_prepare(self, auth_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['auth_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_authorizations_id', all_params, local_var_params) + # verify the required parameter 'auth_id' is set + if ('auth_id' not in local_var_params or + local_var_params['auth_id'] is None): + raise ValueError("Missing the required parameter `auth_id` when calling `delete_authorizations_id`") # noqa: E501 + + path_params = {} + if 'auth_id' in local_var_params: + path_params['authID'] = local_var_params['auth_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_authorizations(self, **kwargs): # noqa: E501,D401,D403 + """List authorizations. + + Lists authorizations. To limit which authorizations are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all authorizations. #### InfluxDB Cloud - InfluxDB Cloud doesn't expose [API token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token) values in `GET /api/v2/authorizations` responses; returns `token: redacted` for all authorizations. #### Required permissions To retrieve an authorization, the request must use an API token that has the following permissions: - `read-authorizations` - `read-user` for the user that the authorization is scoped to #### Related guides - [View tokens](https://docs.influxdata.com/influxdb/latest/security/tokens/view-tokens/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_authorizations(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str user_id: A user ID. Only returns authorizations scoped to the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :param str user: A user name. Only returns authorizations scoped to the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :param str org_id: An organization ID. Only returns authorizations that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :param str org: An organization name. Only returns authorizations that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :param str token: An API [token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token) value. Specifies an authorization by its `token` property value and returns the authorization. #### InfluxDB OSS - Doesn't support this parameter. InfluxDB OSS ignores the `token=` parameter, applies other parameters, and then returns the result. #### Limitations - The parameter is non-repeatable. If you specify more than one, only the first one is used. If a resource with the specified property value doesn't exist, then the response body contains an empty list. + :return: Authorizations + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_authorizations_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_authorizations_with_http_info(**kwargs) # noqa: E501 + return data + + def get_authorizations_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List authorizations. + + Lists authorizations. To limit which authorizations are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all authorizations. #### InfluxDB Cloud - InfluxDB Cloud doesn't expose [API token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token) values in `GET /api/v2/authorizations` responses; returns `token: redacted` for all authorizations. #### Required permissions To retrieve an authorization, the request must use an API token that has the following permissions: - `read-authorizations` - `read-user` for the user that the authorization is scoped to #### Related guides - [View tokens](https://docs.influxdata.com/influxdb/latest/security/tokens/view-tokens/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_authorizations_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str user_id: A user ID. Only returns authorizations scoped to the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :param str user: A user name. Only returns authorizations scoped to the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :param str org_id: An organization ID. Only returns authorizations that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :param str org: An organization name. Only returns authorizations that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :param str token: An API [token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token) value. Specifies an authorization by its `token` property value and returns the authorization. #### InfluxDB OSS - Doesn't support this parameter. InfluxDB OSS ignores the `token=` parameter, applies other parameters, and then returns the result. #### Limitations - The parameter is non-repeatable. If you specify more than one, only the first one is used. If a resource with the specified property value doesn't exist, then the response body contains an empty list. + :return: Authorizations + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_authorizations_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/authorizations', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorizations', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_authorizations_async(self, **kwargs): # noqa: E501,D401,D403 + """List authorizations. + + Lists authorizations. To limit which authorizations are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all authorizations. #### InfluxDB Cloud - InfluxDB Cloud doesn't expose [API token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token) values in `GET /api/v2/authorizations` responses; returns `token: redacted` for all authorizations. #### Required permissions To retrieve an authorization, the request must use an API token that has the following permissions: - `read-authorizations` - `read-user` for the user that the authorization is scoped to #### Related guides - [View tokens](https://docs.influxdata.com/influxdb/latest/security/tokens/view-tokens/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str user_id: A user ID. Only returns authorizations scoped to the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :param str user: A user name. Only returns authorizations scoped to the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :param str org_id: An organization ID. Only returns authorizations that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :param str org: An organization name. Only returns authorizations that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :param str token: An API [token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token) value. Specifies an authorization by its `token` property value and returns the authorization. #### InfluxDB OSS - Doesn't support this parameter. InfluxDB OSS ignores the `token=` parameter, applies other parameters, and then returns the result. #### Limitations - The parameter is non-repeatable. If you specify more than one, only the first one is used. If a resource with the specified property value doesn't exist, then the response body contains an empty list. + :return: Authorizations + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_authorizations_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/authorizations', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorizations', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_authorizations_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'user_id', 'user', 'org_id', 'org', 'token'] # noqa: E501 + self._check_operation_params('get_authorizations', all_params, local_var_params) + + path_params = {} + + query_params = [] + if 'user_id' in local_var_params: + query_params.append(('userID', local_var_params['user_id'])) # noqa: E501 + if 'user' in local_var_params: + query_params.append(('user', local_var_params['user'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'token' in local_var_params: + query_params.append(('token', local_var_params['token'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_authorizations_id(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve an authorization. + + Retrieves an authorization. Use this endpoint to retrieve information about an API token, including the token's permissions and the user that the token is scoped to. #### InfluxDB OSS - InfluxDB OSS returns [API token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token) values in authorizations. - If the request uses an _[operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_, InfluxDB OSS returns authorizations for all organizations in the instance. #### Related guides - [View tokens](https://docs.influxdata.com/influxdb/latest/security/tokens/view-tokens/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_authorizations_id(auth_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: An authorization ID. Specifies the authorization to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_authorizations_id_with_http_info(auth_id, **kwargs) # noqa: E501 + else: + (data) = self.get_authorizations_id_with_http_info(auth_id, **kwargs) # noqa: E501 + return data + + def get_authorizations_id_with_http_info(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve an authorization. + + Retrieves an authorization. Use this endpoint to retrieve information about an API token, including the token's permissions and the user that the token is scoped to. #### InfluxDB OSS - InfluxDB OSS returns [API token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token) values in authorizations. - If the request uses an _[operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_, InfluxDB OSS returns authorizations for all organizations in the instance. #### Related guides - [View tokens](https://docs.influxdata.com/influxdb/latest/security/tokens/view-tokens/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_authorizations_id_with_http_info(auth_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: An authorization ID. Specifies the authorization to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_authorizations_id_prepare(auth_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/authorizations/{authID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_authorizations_id_async(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve an authorization. + + Retrieves an authorization. Use this endpoint to retrieve information about an API token, including the token's permissions and the user that the token is scoped to. #### InfluxDB OSS - InfluxDB OSS returns [API token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token) values in authorizations. - If the request uses an _[operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_, InfluxDB OSS returns authorizations for all organizations in the instance. #### Related guides - [View tokens](https://docs.influxdata.com/influxdb/latest/security/tokens/view-tokens/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str auth_id: An authorization ID. Specifies the authorization to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_authorizations_id_prepare(auth_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/authorizations/{authID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_authorizations_id_prepare(self, auth_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['auth_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_authorizations_id', all_params, local_var_params) + # verify the required parameter 'auth_id' is set + if ('auth_id' not in local_var_params or + local_var_params['auth_id'] is None): + raise ValueError("Missing the required parameter `auth_id` when calling `get_authorizations_id`") # noqa: E501 + + path_params = {} + if 'auth_id' in local_var_params: + path_params['authID'] = local_var_params['auth_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_authorizations_id(self, auth_id, authorization_update_request, **kwargs): # noqa: E501,D401,D403 + """Update an API token to be active or inactive. + + Updates an authorization. Use this endpoint to set an API token's status to be _active_ or _inactive_. InfluxDB rejects requests that use inactive API tokens. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_authorizations_id(auth_id, authorization_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: An authorization ID. Specifies the authorization to update. (required) + :param AuthorizationUpdateRequest authorization_update_request: In the request body, provide the authorization properties to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_authorizations_id_with_http_info(auth_id, authorization_update_request, **kwargs) # noqa: E501 + else: + (data) = self.patch_authorizations_id_with_http_info(auth_id, authorization_update_request, **kwargs) # noqa: E501 + return data + + def patch_authorizations_id_with_http_info(self, auth_id, authorization_update_request, **kwargs): # noqa: E501,D401,D403 + """Update an API token to be active or inactive. + + Updates an authorization. Use this endpoint to set an API token's status to be _active_ or _inactive_. InfluxDB rejects requests that use inactive API tokens. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_authorizations_id_with_http_info(auth_id, authorization_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: An authorization ID. Specifies the authorization to update. (required) + :param AuthorizationUpdateRequest authorization_update_request: In the request body, provide the authorization properties to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_authorizations_id_prepare(auth_id, authorization_update_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/authorizations/{authID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_authorizations_id_async(self, auth_id, authorization_update_request, **kwargs): # noqa: E501,D401,D403 + """Update an API token to be active or inactive. + + Updates an authorization. Use this endpoint to set an API token's status to be _active_ or _inactive_. InfluxDB rejects requests that use inactive API tokens. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str auth_id: An authorization ID. Specifies the authorization to update. (required) + :param AuthorizationUpdateRequest authorization_update_request: In the request body, provide the authorization properties to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_authorizations_id_prepare(auth_id, authorization_update_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/authorizations/{authID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_authorizations_id_prepare(self, auth_id, authorization_update_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['auth_id', 'authorization_update_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_authorizations_id', all_params, local_var_params) + # verify the required parameter 'auth_id' is set + if ('auth_id' not in local_var_params or + local_var_params['auth_id'] is None): + raise ValueError("Missing the required parameter `auth_id` when calling `patch_authorizations_id`") # noqa: E501 + # verify the required parameter 'authorization_update_request' is set + if ('authorization_update_request' not in local_var_params or + local_var_params['authorization_update_request'] is None): + raise ValueError("Missing the required parameter `authorization_update_request` when calling `patch_authorizations_id`") # noqa: E501 + + path_params = {} + if 'auth_id' in local_var_params: + path_params['authID'] = local_var_params['auth_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'authorization_update_request' in local_var_params: + body_params = local_var_params['authorization_update_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_authorizations(self, authorization_post_request, **kwargs): # noqa: E501,D401,D403 + """Create an authorization. + + Creates an authorization and returns the authorization with the generated API [token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token). Use this endpoint to create an authorization, which generates an API token with permissions to `read` or `write` to a specific resource or `type` of resource. The API token is the authorization's `token` property value. To follow best practices for secure API token generation and retrieval, InfluxDB enforces access restrictions on API tokens. - InfluxDB allows access to the API token value immediately after the authorization is created. - You can’t change access (read/write) permissions for an API token after it’s created. - Tokens stop working when the user who created the token is deleted. We recommend the following for managing your tokens: - Create a generic user to create and manage tokens for writing data. - Store your tokens in a secure password vault for future access. #### Required permissions - `write-authorizations` - `write-user` for the user that the authorization is scoped to #### Related guides - [Create a token](https://docs.influxdata.com/influxdb/latest/security/tokens/create-token/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_authorizations(authorization_post_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param AuthorizationPostRequest authorization_post_request: The authorization to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_authorizations_with_http_info(authorization_post_request, **kwargs) # noqa: E501 + else: + (data) = self.post_authorizations_with_http_info(authorization_post_request, **kwargs) # noqa: E501 + return data + + def post_authorizations_with_http_info(self, authorization_post_request, **kwargs): # noqa: E501,D401,D403 + """Create an authorization. + + Creates an authorization and returns the authorization with the generated API [token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token). Use this endpoint to create an authorization, which generates an API token with permissions to `read` or `write` to a specific resource or `type` of resource. The API token is the authorization's `token` property value. To follow best practices for secure API token generation and retrieval, InfluxDB enforces access restrictions on API tokens. - InfluxDB allows access to the API token value immediately after the authorization is created. - You can’t change access (read/write) permissions for an API token after it’s created. - Tokens stop working when the user who created the token is deleted. We recommend the following for managing your tokens: - Create a generic user to create and manage tokens for writing data. - Store your tokens in a secure password vault for future access. #### Required permissions - `write-authorizations` - `write-user` for the user that the authorization is scoped to #### Related guides - [Create a token](https://docs.influxdata.com/influxdb/latest/security/tokens/create-token/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_authorizations_with_http_info(authorization_post_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param AuthorizationPostRequest authorization_post_request: The authorization to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_authorizations_prepare(authorization_post_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/authorizations', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_authorizations_async(self, authorization_post_request, **kwargs): # noqa: E501,D401,D403 + """Create an authorization. + + Creates an authorization and returns the authorization with the generated API [token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token). Use this endpoint to create an authorization, which generates an API token with permissions to `read` or `write` to a specific resource or `type` of resource. The API token is the authorization's `token` property value. To follow best practices for secure API token generation and retrieval, InfluxDB enforces access restrictions on API tokens. - InfluxDB allows access to the API token value immediately after the authorization is created. - You can’t change access (read/write) permissions for an API token after it’s created. - Tokens stop working when the user who created the token is deleted. We recommend the following for managing your tokens: - Create a generic user to create and manage tokens for writing data. - Store your tokens in a secure password vault for future access. #### Required permissions - `write-authorizations` - `write-user` for the user that the authorization is scoped to #### Related guides - [Create a token](https://docs.influxdata.com/influxdb/latest/security/tokens/create-token/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param AuthorizationPostRequest authorization_post_request: The authorization to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_authorizations_prepare(authorization_post_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/authorizations', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_authorizations_prepare(self, authorization_post_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['authorization_post_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_authorizations', all_params, local_var_params) + # verify the required parameter 'authorization_post_request' is set + if ('authorization_post_request' not in local_var_params or + local_var_params['authorization_post_request'] is None): + raise ValueError("Missing the required parameter `authorization_post_request` when calling `post_authorizations`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'authorization_post_request' in local_var_params: + body_params = local_var_params['authorization_post_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/backup_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/backup_service.py new file mode 100644 index 0000000..0bc520c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/backup_service.py @@ -0,0 +1,378 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class BackupService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """BackupService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_backup_kv(self, **kwargs): # noqa: E501,D401,D403 + """Download snapshot of metadata stored in the server's embedded KV store. Don't use with InfluxDB versions greater than InfluxDB 2.1.x.. + + Retrieves a snapshot of metadata stored in the server's embedded KV store. InfluxDB versions greater than 2.1.x don't include metadata stored in embedded SQL; avoid using this endpoint with versions greater than 2.1.x. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_backup_kv(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: file + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_backup_kv_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_backup_kv_with_http_info(**kwargs) # noqa: E501 + return data + + def get_backup_kv_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Download snapshot of metadata stored in the server's embedded KV store. Don't use with InfluxDB versions greater than InfluxDB 2.1.x.. + + Retrieves a snapshot of metadata stored in the server's embedded KV store. InfluxDB versions greater than 2.1.x don't include metadata stored in embedded SQL; avoid using this endpoint with versions greater than 2.1.x. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_backup_kv_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: file + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_backup_kv_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/backup/kv', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='file', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_backup_kv_async(self, **kwargs): # noqa: E501,D401,D403 + """Download snapshot of metadata stored in the server's embedded KV store. Don't use with InfluxDB versions greater than InfluxDB 2.1.x.. + + Retrieves a snapshot of metadata stored in the server's embedded KV store. InfluxDB versions greater than 2.1.x don't include metadata stored in embedded SQL; avoid using this endpoint with versions greater than 2.1.x. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: file + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_backup_kv_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/backup/kv', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='file', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_backup_kv_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('get_backup_kv', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/octet-stream', 'application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_backup_metadata(self, **kwargs): # noqa: E501,D401,D403 + """Download snapshot of all metadata in the server. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_backup_metadata(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str accept_encoding: Indicates the content encoding (usually a compression algorithm) that the client can understand. + :return: MetadataBackup + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_backup_metadata_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_backup_metadata_with_http_info(**kwargs) # noqa: E501 + return data + + def get_backup_metadata_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Download snapshot of all metadata in the server. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_backup_metadata_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str accept_encoding: Indicates the content encoding (usually a compression algorithm) that the client can understand. + :return: MetadataBackup + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_backup_metadata_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/backup/metadata', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='MetadataBackup', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_backup_metadata_async(self, **kwargs): # noqa: E501,D401,D403 + """Download snapshot of all metadata in the server. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str accept_encoding: Indicates the content encoding (usually a compression algorithm) that the client can understand. + :return: MetadataBackup + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_backup_metadata_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/backup/metadata', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='MetadataBackup', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_backup_metadata_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'accept_encoding'] # noqa: E501 + self._check_operation_params('get_backup_metadata', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'accept_encoding' in local_var_params: + header_params['Accept-Encoding'] = local_var_params['accept_encoding'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['multipart/mixed', 'application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_backup_shard_id(self, shard_id, **kwargs): # noqa: E501,D401,D403 + """Download snapshot of all TSM data in a shard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_backup_shard_id(shard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param int shard_id: The shard ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str accept_encoding: Indicates the content encoding (usually a compression algorithm) that the client can understand. + :param datetime since: The earliest time [RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp) to include in the snapshot. + :return: file + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_backup_shard_id_with_http_info(shard_id, **kwargs) # noqa: E501 + else: + (data) = self.get_backup_shard_id_with_http_info(shard_id, **kwargs) # noqa: E501 + return data + + def get_backup_shard_id_with_http_info(self, shard_id, **kwargs): # noqa: E501,D401,D403 + """Download snapshot of all TSM data in a shard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_backup_shard_id_with_http_info(shard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param int shard_id: The shard ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str accept_encoding: Indicates the content encoding (usually a compression algorithm) that the client can understand. + :param datetime since: The earliest time [RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp) to include in the snapshot. + :return: file + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_backup_shard_id_prepare(shard_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/backup/shards/{shardID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='file', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_backup_shard_id_async(self, shard_id, **kwargs): # noqa: E501,D401,D403 + """Download snapshot of all TSM data in a shard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param int shard_id: The shard ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str accept_encoding: Indicates the content encoding (usually a compression algorithm) that the client can understand. + :param datetime since: The earliest time [RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp) to include in the snapshot. + :return: file + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_backup_shard_id_prepare(shard_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/backup/shards/{shardID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='file', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_backup_shard_id_prepare(self, shard_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['shard_id', 'zap_trace_span', 'accept_encoding', 'since'] # noqa: E501 + self._check_operation_params('get_backup_shard_id', all_params, local_var_params) + # verify the required parameter 'shard_id' is set + if ('shard_id' not in local_var_params or + local_var_params['shard_id'] is None): + raise ValueError("Missing the required parameter `shard_id` when calling `get_backup_shard_id`") # noqa: E501 + + path_params = {} + if 'shard_id' in local_var_params: + path_params['shardID'] = local_var_params['shard_id'] # noqa: E501 + + query_params = [] + if 'since' in local_var_params: + query_params.append(('since', local_var_params['since'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'accept_encoding' in local_var_params: + header_params['Accept-Encoding'] = local_var_params['accept_encoding'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/octet-stream', 'application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/bucket_schemas_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/bucket_schemas_service.py new file mode 100644 index 0000000..7a25338 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/bucket_schemas_service.py @@ -0,0 +1,607 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class BucketSchemasService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """BucketSchemasService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def create_measurement_schema(self, bucket_id, measurement_schema_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a measurement schema for a bucket. + + Creates an _explict_ measurement [schema](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema) for a bucket. _Explicit_ schemas are used to enforce column names, tags, fields, and data types for your data. By default, buckets have an _implicit_ schema-type (`"schemaType": "implicit"`) that conforms to your data. Use this endpoint to create schemas that prevent non-conforming write requests. #### Limitations - Buckets must be created with the "explict" `schemaType` in order to use schemas. #### Related guides - [Manage bucket schemas](https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/). - [Create a bucket with an explicit schema](https://docs.influxdata.com/influxdb/cloud/organizations/buckets/create-bucket/#create-a-bucket-with-an-explicit-schema) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.create_measurement_schema(bucket_id, measurement_schema_create_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: A bucket ID. Adds a schema for the specified bucket. (required) + :param MeasurementSchemaCreateRequest measurement_schema_create_request: (required) + :param str org: An organization name. Specifies the organization that owns the schema. + :param str org_id: An organization ID. Specifies the organization that owns the schema. + :return: MeasurementSchema + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.create_measurement_schema_with_http_info(bucket_id, measurement_schema_create_request, **kwargs) # noqa: E501 + else: + (data) = self.create_measurement_schema_with_http_info(bucket_id, measurement_schema_create_request, **kwargs) # noqa: E501 + return data + + def create_measurement_schema_with_http_info(self, bucket_id, measurement_schema_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a measurement schema for a bucket. + + Creates an _explict_ measurement [schema](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema) for a bucket. _Explicit_ schemas are used to enforce column names, tags, fields, and data types for your data. By default, buckets have an _implicit_ schema-type (`"schemaType": "implicit"`) that conforms to your data. Use this endpoint to create schemas that prevent non-conforming write requests. #### Limitations - Buckets must be created with the "explict" `schemaType` in order to use schemas. #### Related guides - [Manage bucket schemas](https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/). - [Create a bucket with an explicit schema](https://docs.influxdata.com/influxdb/cloud/organizations/buckets/create-bucket/#create-a-bucket-with-an-explicit-schema) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.create_measurement_schema_with_http_info(bucket_id, measurement_schema_create_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: A bucket ID. Adds a schema for the specified bucket. (required) + :param MeasurementSchemaCreateRequest measurement_schema_create_request: (required) + :param str org: An organization name. Specifies the organization that owns the schema. + :param str org_id: An organization ID. Specifies the organization that owns the schema. + :return: MeasurementSchema + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not self._is_cloud_instance(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('BucketSchemasService', + 'https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._create_measurement_schema_prepare(bucket_id, measurement_schema_create_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/schema/measurements', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='MeasurementSchema', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def create_measurement_schema_async(self, bucket_id, measurement_schema_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a measurement schema for a bucket. + + Creates an _explict_ measurement [schema](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema) for a bucket. _Explicit_ schemas are used to enforce column names, tags, fields, and data types for your data. By default, buckets have an _implicit_ schema-type (`"schemaType": "implicit"`) that conforms to your data. Use this endpoint to create schemas that prevent non-conforming write requests. #### Limitations - Buckets must be created with the "explict" `schemaType` in order to use schemas. #### Related guides - [Manage bucket schemas](https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/). - [Create a bucket with an explicit schema](https://docs.influxdata.com/influxdb/cloud/organizations/buckets/create-bucket/#create-a-bucket-with-an-explicit-schema) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: A bucket ID. Adds a schema for the specified bucket. (required) + :param MeasurementSchemaCreateRequest measurement_schema_create_request: (required) + :param str org: An organization name. Specifies the organization that owns the schema. + :param str org_id: An organization ID. Specifies the organization that owns the schema. + :return: MeasurementSchema + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not await self._is_cloud_instance_async(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('BucketSchemasService', + 'https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._create_measurement_schema_prepare(bucket_id, measurement_schema_create_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/schema/measurements', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='MeasurementSchema', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _create_measurement_schema_prepare(self, bucket_id, measurement_schema_create_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'measurement_schema_create_request', 'org', 'org_id'] # noqa: E501 + self._check_operation_params('create_measurement_schema', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `create_measurement_schema`") # noqa: E501 + # verify the required parameter 'measurement_schema_create_request' is set + if ('measurement_schema_create_request' not in local_var_params or + local_var_params['measurement_schema_create_request'] is None): + raise ValueError("Missing the required parameter `measurement_schema_create_request` when calling `create_measurement_schema`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + + header_params = {} + + body_params = None + if 'measurement_schema_create_request' in local_var_params: + body_params = local_var_params['measurement_schema_create_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_measurement_schema(self, bucket_id, measurement_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a measurement schema. + + Retrieves an explicit measurement [schema](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_measurement_schema(bucket_id, measurement_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: A bucket ID. Retrieves schemas for the specified bucket. (required) + :param str measurement_id: The measurement schema ID. Specifies the measurement schema to retrieve. (required) + :param str org: Organization name. Specifies the organization that owns the schema. + :param str org_id: Organization ID. Specifies the organization that owns the schema. + :return: MeasurementSchema + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_measurement_schema_with_http_info(bucket_id, measurement_id, **kwargs) # noqa: E501 + else: + (data) = self.get_measurement_schema_with_http_info(bucket_id, measurement_id, **kwargs) # noqa: E501 + return data + + def get_measurement_schema_with_http_info(self, bucket_id, measurement_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a measurement schema. + + Retrieves an explicit measurement [schema](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_measurement_schema_with_http_info(bucket_id, measurement_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: A bucket ID. Retrieves schemas for the specified bucket. (required) + :param str measurement_id: The measurement schema ID. Specifies the measurement schema to retrieve. (required) + :param str org: Organization name. Specifies the organization that owns the schema. + :param str org_id: Organization ID. Specifies the organization that owns the schema. + :return: MeasurementSchema + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not self._is_cloud_instance(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('BucketSchemasService', + 'https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_measurement_schema_prepare(bucket_id, measurement_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/schema/measurements/{measurementID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='MeasurementSchema', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_measurement_schema_async(self, bucket_id, measurement_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a measurement schema. + + Retrieves an explicit measurement [schema](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: A bucket ID. Retrieves schemas for the specified bucket. (required) + :param str measurement_id: The measurement schema ID. Specifies the measurement schema to retrieve. (required) + :param str org: Organization name. Specifies the organization that owns the schema. + :param str org_id: Organization ID. Specifies the organization that owns the schema. + :return: MeasurementSchema + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not await self._is_cloud_instance_async(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('BucketSchemasService', + 'https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_measurement_schema_prepare(bucket_id, measurement_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/schema/measurements/{measurementID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='MeasurementSchema', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_measurement_schema_prepare(self, bucket_id, measurement_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'measurement_id', 'org', 'org_id'] # noqa: E501 + self._check_operation_params('get_measurement_schema', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `get_measurement_schema`") # noqa: E501 + # verify the required parameter 'measurement_id' is set + if ('measurement_id' not in local_var_params or + local_var_params['measurement_id'] is None): + raise ValueError("Missing the required parameter `measurement_id` when calling `get_measurement_schema`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + if 'measurement_id' in local_var_params: + path_params['measurementID'] = local_var_params['measurement_id'] # noqa: E501 + + query_params = [] + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + + header_params = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_measurement_schemas(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List measurement schemas of a bucket. + + Lists _explicit_ [schemas](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema) (`"schemaType": "explicit"`) for a bucket. _Explicit_ schemas are used to enforce column names, tags, fields, and data types for your data. By default, buckets have an _implicit_ schema-type (`"schemaType": "implicit"`) that conforms to your data. #### Related guides - [Using bucket schemas](https://www.influxdata.com/blog/new-bucket-schema-option-protect-from-unwanted-schema-changes/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_measurement_schemas(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: A bucket ID. Lists measurement schemas for the specified bucket. (required) + :param str org: An organization name. Specifies the organization that owns the schema. + :param str org_id: An organization ID. Specifies the organization that owns the schema. + :param str name: A measurement name. Only returns measurement schemas with the specified name. + :return: MeasurementSchemaList + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_measurement_schemas_with_http_info(bucket_id, **kwargs) # noqa: E501 + else: + (data) = self.get_measurement_schemas_with_http_info(bucket_id, **kwargs) # noqa: E501 + return data + + def get_measurement_schemas_with_http_info(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List measurement schemas of a bucket. + + Lists _explicit_ [schemas](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema) (`"schemaType": "explicit"`) for a bucket. _Explicit_ schemas are used to enforce column names, tags, fields, and data types for your data. By default, buckets have an _implicit_ schema-type (`"schemaType": "implicit"`) that conforms to your data. #### Related guides - [Using bucket schemas](https://www.influxdata.com/blog/new-bucket-schema-option-protect-from-unwanted-schema-changes/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_measurement_schemas_with_http_info(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: A bucket ID. Lists measurement schemas for the specified bucket. (required) + :param str org: An organization name. Specifies the organization that owns the schema. + :param str org_id: An organization ID. Specifies the organization that owns the schema. + :param str name: A measurement name. Only returns measurement schemas with the specified name. + :return: MeasurementSchemaList + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not self._is_cloud_instance(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('BucketSchemasService', + 'https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_measurement_schemas_prepare(bucket_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/schema/measurements', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='MeasurementSchemaList', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_measurement_schemas_async(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List measurement schemas of a bucket. + + Lists _explicit_ [schemas](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema) (`"schemaType": "explicit"`) for a bucket. _Explicit_ schemas are used to enforce column names, tags, fields, and data types for your data. By default, buckets have an _implicit_ schema-type (`"schemaType": "implicit"`) that conforms to your data. #### Related guides - [Using bucket schemas](https://www.influxdata.com/blog/new-bucket-schema-option-protect-from-unwanted-schema-changes/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: A bucket ID. Lists measurement schemas for the specified bucket. (required) + :param str org: An organization name. Specifies the organization that owns the schema. + :param str org_id: An organization ID. Specifies the organization that owns the schema. + :param str name: A measurement name. Only returns measurement schemas with the specified name. + :return: MeasurementSchemaList + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not await self._is_cloud_instance_async(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('BucketSchemasService', + 'https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_measurement_schemas_prepare(bucket_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/schema/measurements', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='MeasurementSchemaList', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_measurement_schemas_prepare(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'org', 'org_id', 'name'] # noqa: E501 + self._check_operation_params('get_measurement_schemas', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `get_measurement_schemas`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'name' in local_var_params: + query_params.append(('name', local_var_params['name'])) # noqa: E501 + + header_params = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def update_measurement_schema(self, bucket_id, measurement_id, measurement_schema_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a measurement schema. + + Updates a measurement [schema](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema). Use this endpoint to update the fields (`name`, `type`, and `dataType`) of a measurement schema. #### Limitations - You can't update the `name` of a measurement. #### Related guides - [Manage bucket schemas](https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/). - [Using bucket schemas](https://www.influxdata.com/blog/new-bucket-schema-option-protect-from-unwanted-schema-changes/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.update_measurement_schema(bucket_id, measurement_id, measurement_schema_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: A bucket ID. Specifies the bucket to retrieve schemas for. (required) + :param str measurement_id: A measurement schema ID. Retrieves the specified measurement schema. (required) + :param MeasurementSchemaUpdateRequest measurement_schema_update_request: (required) + :param str org: An organization name. Specifies the organization that owns the schema. + :param str org_id: An organization ID. Specifies the organization that owns the schema. + :return: MeasurementSchema + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.update_measurement_schema_with_http_info(bucket_id, measurement_id, measurement_schema_update_request, **kwargs) # noqa: E501 + else: + (data) = self.update_measurement_schema_with_http_info(bucket_id, measurement_id, measurement_schema_update_request, **kwargs) # noqa: E501 + return data + + def update_measurement_schema_with_http_info(self, bucket_id, measurement_id, measurement_schema_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a measurement schema. + + Updates a measurement [schema](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema). Use this endpoint to update the fields (`name`, `type`, and `dataType`) of a measurement schema. #### Limitations - You can't update the `name` of a measurement. #### Related guides - [Manage bucket schemas](https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/). - [Using bucket schemas](https://www.influxdata.com/blog/new-bucket-schema-option-protect-from-unwanted-schema-changes/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.update_measurement_schema_with_http_info(bucket_id, measurement_id, measurement_schema_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: A bucket ID. Specifies the bucket to retrieve schemas for. (required) + :param str measurement_id: A measurement schema ID. Retrieves the specified measurement schema. (required) + :param MeasurementSchemaUpdateRequest measurement_schema_update_request: (required) + :param str org: An organization name. Specifies the organization that owns the schema. + :param str org_id: An organization ID. Specifies the organization that owns the schema. + :return: MeasurementSchema + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not self._is_cloud_instance(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('BucketSchemasService', + 'https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._update_measurement_schema_prepare(bucket_id, measurement_id, measurement_schema_update_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/schema/measurements/{measurementID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='MeasurementSchema', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def update_measurement_schema_async(self, bucket_id, measurement_id, measurement_schema_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a measurement schema. + + Updates a measurement [schema](https://docs.influxdata.com/influxdb/cloud/reference/glossary/#schema). Use this endpoint to update the fields (`name`, `type`, and `dataType`) of a measurement schema. #### Limitations - You can't update the `name` of a measurement. #### Related guides - [Manage bucket schemas](https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/). - [Using bucket schemas](https://www.influxdata.com/blog/new-bucket-schema-option-protect-from-unwanted-schema-changes/). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: A bucket ID. Specifies the bucket to retrieve schemas for. (required) + :param str measurement_id: A measurement schema ID. Retrieves the specified measurement schema. (required) + :param MeasurementSchemaUpdateRequest measurement_schema_update_request: (required) + :param str org: An organization name. Specifies the organization that owns the schema. + :param str org_id: An organization ID. Specifies the organization that owns the schema. + :return: MeasurementSchema + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not await self._is_cloud_instance_async(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('BucketSchemasService', + 'https://docs.influxdata.com/influxdb/cloud/organizations/buckets/bucket-schema/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._update_measurement_schema_prepare(bucket_id, measurement_id, measurement_schema_update_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/schema/measurements/{measurementID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='MeasurementSchema', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _update_measurement_schema_prepare(self, bucket_id, measurement_id, measurement_schema_update_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'measurement_id', 'measurement_schema_update_request', 'org', 'org_id'] # noqa: E501 + self._check_operation_params('update_measurement_schema', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `update_measurement_schema`") # noqa: E501 + # verify the required parameter 'measurement_id' is set + if ('measurement_id' not in local_var_params or + local_var_params['measurement_id'] is None): + raise ValueError("Missing the required parameter `measurement_id` when calling `update_measurement_schema`") # noqa: E501 + # verify the required parameter 'measurement_schema_update_request' is set + if ('measurement_schema_update_request' not in local_var_params or + local_var_params['measurement_schema_update_request'] is None): + raise ValueError("Missing the required parameter `measurement_schema_update_request` when calling `update_measurement_schema`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + if 'measurement_id' in local_var_params: + path_params['measurementID'] = local_var_params['measurement_id'] # noqa: E501 + + query_params = [] + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + + header_params = {} + + body_params = None + if 'measurement_schema_update_request' in local_var_params: + body_params = local_var_params['measurement_schema_update_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/buckets_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/buckets_service.py new file mode 100644 index 0000000..05cf0e1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/buckets_service.py @@ -0,0 +1,1929 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class BucketsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """BucketsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_buckets_id(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Delete a bucket. + + Deletes a bucket and all associated records. #### InfluxDB Cloud - Does the following when you send a delete request: 1. Validates the request and queues the delete. 2. Returns an HTTP `204` status code if queued; _error_ otherwise. 3. Handles the delete asynchronously. #### InfluxDB OSS - Validates the request, handles the delete synchronously, and then responds with success or failure. #### Limitations - Only one bucket can be deleted per request. #### Related Guides - [Delete a bucket](https://docs.influxdata.com/influxdb/latest/organizations/buckets/delete-bucket/#delete-a-bucket-in-the-influxdb-ui) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_buckets_id(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: Bucket ID. The ID of the bucket to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_buckets_id_with_http_info(bucket_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_buckets_id_with_http_info(bucket_id, **kwargs) # noqa: E501 + return data + + def delete_buckets_id_with_http_info(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Delete a bucket. + + Deletes a bucket and all associated records. #### InfluxDB Cloud - Does the following when you send a delete request: 1. Validates the request and queues the delete. 2. Returns an HTTP `204` status code if queued; _error_ otherwise. 3. Handles the delete asynchronously. #### InfluxDB OSS - Validates the request, handles the delete synchronously, and then responds with success or failure. #### Limitations - Only one bucket can be deleted per request. #### Related Guides - [Delete a bucket](https://docs.influxdata.com/influxdb/latest/organizations/buckets/delete-bucket/#delete-a-bucket-in-the-influxdb-ui) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_buckets_id_with_http_info(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: Bucket ID. The ID of the bucket to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_buckets_id_prepare(bucket_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_buckets_id_async(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Delete a bucket. + + Deletes a bucket and all associated records. #### InfluxDB Cloud - Does the following when you send a delete request: 1. Validates the request and queues the delete. 2. Returns an HTTP `204` status code if queued; _error_ otherwise. 3. Handles the delete asynchronously. #### InfluxDB OSS - Validates the request, handles the delete synchronously, and then responds with success or failure. #### Limitations - Only one bucket can be deleted per request. #### Related Guides - [Delete a bucket](https://docs.influxdata.com/influxdb/latest/organizations/buckets/delete-bucket/#delete-a-bucket-in-the-influxdb-ui) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: Bucket ID. The ID of the bucket to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_buckets_id_prepare(bucket_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_buckets_id_prepare(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_buckets_id', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `delete_buckets_id`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_buckets_id_labels_id(self, bucket_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a bucket. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_buckets_id_labels_id(bucket_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The bucket ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_buckets_id_labels_id_with_http_info(bucket_id, label_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_buckets_id_labels_id_with_http_info(bucket_id, label_id, **kwargs) # noqa: E501 + return data + + def delete_buckets_id_labels_id_with_http_info(self, bucket_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a bucket. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_buckets_id_labels_id_with_http_info(bucket_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The bucket ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_buckets_id_labels_id_prepare(bucket_id, label_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_buckets_id_labels_id_async(self, bucket_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a bucket. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: The bucket ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_buckets_id_labels_id_prepare(bucket_id, label_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_buckets_id_labels_id_prepare(self, bucket_id, label_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'label_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_buckets_id_labels_id', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `delete_buckets_id_labels_id`") # noqa: E501 + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `delete_buckets_id_labels_id`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_buckets_id_members_id(self, user_id, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a bucket. + + Removes a member from a bucket. Use this endpoint to remove a user's member privileges from a bucket. This removes the user's `read` and `write` permissions for the bucket. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_buckets_id_members_id(user_id, bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the user to remove. (required) + :param str bucket_id: The ID of the bucket to remove a user from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_buckets_id_members_id_with_http_info(user_id, bucket_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_buckets_id_members_id_with_http_info(user_id, bucket_id, **kwargs) # noqa: E501 + return data + + def delete_buckets_id_members_id_with_http_info(self, user_id, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a bucket. + + Removes a member from a bucket. Use this endpoint to remove a user's member privileges from a bucket. This removes the user's `read` and `write` permissions for the bucket. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_buckets_id_members_id_with_http_info(user_id, bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the user to remove. (required) + :param str bucket_id: The ID of the bucket to remove a user from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_buckets_id_members_id_prepare(user_id, bucket_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_buckets_id_members_id_async(self, user_id, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a bucket. + + Removes a member from a bucket. Use this endpoint to remove a user's member privileges from a bucket. This removes the user's `read` and `write` permissions for the bucket. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the user to remove. (required) + :param str bucket_id: The ID of the bucket to remove a user from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_buckets_id_members_id_prepare(user_id, bucket_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_buckets_id_members_id_prepare(self, user_id, bucket_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'bucket_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_buckets_id_members_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_buckets_id_members_id`") # noqa: E501 + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `delete_buckets_id_members_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_buckets_id_owners_id(self, user_id, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a bucket. + + Removes an owner from a bucket. Use this endpoint to remove a user's `owner` role for a bucket. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to remove an owner from. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_buckets_id_owners_id(user_id, bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str bucket_id: The ID of the bucket to remove an owner from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_buckets_id_owners_id_with_http_info(user_id, bucket_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_buckets_id_owners_id_with_http_info(user_id, bucket_id, **kwargs) # noqa: E501 + return data + + def delete_buckets_id_owners_id_with_http_info(self, user_id, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a bucket. + + Removes an owner from a bucket. Use this endpoint to remove a user's `owner` role for a bucket. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to remove an owner from. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_buckets_id_owners_id_with_http_info(user_id, bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str bucket_id: The ID of the bucket to remove an owner from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_buckets_id_owners_id_prepare(user_id, bucket_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_buckets_id_owners_id_async(self, user_id, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a bucket. + + Removes an owner from a bucket. Use this endpoint to remove a user's `owner` role for a bucket. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to remove an owner from. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str bucket_id: The ID of the bucket to remove an owner from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_buckets_id_owners_id_prepare(user_id, bucket_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_buckets_id_owners_id_prepare(self, user_id, bucket_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'bucket_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_buckets_id_owners_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_buckets_id_owners_id`") # noqa: E501 + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `delete_buckets_id_owners_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_buckets(self, **kwargs): # noqa: E501,D401,D403 + """List buckets. + + Lists [buckets](https://docs.influxdata.com/influxdb/latest/reference/glossary/#bucket). InfluxDB retrieves buckets owned by the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) associated with the authorization ([API token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token)). To limit which buckets are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all buckets up to the default `limit`. #### InfluxDB OSS - If you use an _[operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_ to authenticate your request, InfluxDB retrieves resources for _all organizations_ in the instance. To retrieve resources for only a specific organization, use the `org` parameter or the `orgID` parameter to specify the organization. #### Required permissions | Action | Permission required | |:--------------------------|:--------------------| | Retrieve _user buckets_ | `read-buckets` | | Retrieve [_system buckets_](https://docs.influxdata.com/influxdb/latest/reference/internals/system-buckets/) | `read-orgs` | #### Related Guides - [Manage buckets](https://docs.influxdata.com/influxdb/latest/organizations/buckets/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_buckets(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param str after: A resource ID to seek from. Returns records created after the specified record; results don't include the specified record. Use `after` instead of the `offset` parameter. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param str org: An organization name. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Lists buckets for the organization associated with the authorization (API token). #### InfluxDB OSS - Lists buckets for the specified organization. + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Lists buckets for the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or `orgID` parameter. - Lists buckets for the specified organization. + :param str name: A bucket name. Only returns buckets with the specified name. + :param str id: A bucket ID. Only returns the bucket with the specified ID. + :return: Buckets + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_buckets_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_buckets_with_http_info(**kwargs) # noqa: E501 + return data + + def get_buckets_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List buckets. + + Lists [buckets](https://docs.influxdata.com/influxdb/latest/reference/glossary/#bucket). InfluxDB retrieves buckets owned by the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) associated with the authorization ([API token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token)). To limit which buckets are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all buckets up to the default `limit`. #### InfluxDB OSS - If you use an _[operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_ to authenticate your request, InfluxDB retrieves resources for _all organizations_ in the instance. To retrieve resources for only a specific organization, use the `org` parameter or the `orgID` parameter to specify the organization. #### Required permissions | Action | Permission required | |:--------------------------|:--------------------| | Retrieve _user buckets_ | `read-buckets` | | Retrieve [_system buckets_](https://docs.influxdata.com/influxdb/latest/reference/internals/system-buckets/) | `read-orgs` | #### Related Guides - [Manage buckets](https://docs.influxdata.com/influxdb/latest/organizations/buckets/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_buckets_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param str after: A resource ID to seek from. Returns records created after the specified record; results don't include the specified record. Use `after` instead of the `offset` parameter. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param str org: An organization name. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Lists buckets for the organization associated with the authorization (API token). #### InfluxDB OSS - Lists buckets for the specified organization. + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Lists buckets for the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or `orgID` parameter. - Lists buckets for the specified organization. + :param str name: A bucket name. Only returns buckets with the specified name. + :param str id: A bucket ID. Only returns the bucket with the specified ID. + :return: Buckets + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_buckets_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Buckets', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_buckets_async(self, **kwargs): # noqa: E501,D401,D403 + """List buckets. + + Lists [buckets](https://docs.influxdata.com/influxdb/latest/reference/glossary/#bucket). InfluxDB retrieves buckets owned by the [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) associated with the authorization ([API token](https://docs.influxdata.com/influxdb/latest/reference/glossary/#token)). To limit which buckets are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all buckets up to the default `limit`. #### InfluxDB OSS - If you use an _[operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_ to authenticate your request, InfluxDB retrieves resources for _all organizations_ in the instance. To retrieve resources for only a specific organization, use the `org` parameter or the `orgID` parameter to specify the organization. #### Required permissions | Action | Permission required | |:--------------------------|:--------------------| | Retrieve _user buckets_ | `read-buckets` | | Retrieve [_system buckets_](https://docs.influxdata.com/influxdb/latest/reference/internals/system-buckets/) | `read-orgs` | #### Related Guides - [Manage buckets](https://docs.influxdata.com/influxdb/latest/organizations/buckets/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param str after: A resource ID to seek from. Returns records created after the specified record; results don't include the specified record. Use `after` instead of the `offset` parameter. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param str org: An organization name. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Lists buckets for the organization associated with the authorization (API token). #### InfluxDB OSS - Lists buckets for the specified organization. + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Lists buckets for the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or `orgID` parameter. - Lists buckets for the specified organization. + :param str name: A bucket name. Only returns buckets with the specified name. + :param str id: A bucket ID. Only returns the bucket with the specified ID. + :return: Buckets + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_buckets_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Buckets', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_buckets_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'offset', 'limit', 'after', 'org', 'org_id', 'name', 'id'] # noqa: E501 + self._check_operation_params('get_buckets', all_params, local_var_params) + + if 'offset' in local_var_params and local_var_params['offset'] < 0: # noqa: E501 + raise ValueError("Invalid value for parameter `offset` when calling `get_buckets`, must be a value greater than or equal to `0`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] > 100: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_buckets`, must be a value less than or equal to `100`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_buckets`, must be a value greater than or equal to `1`") # noqa: E501 + path_params = {} + + query_params = [] + if 'offset' in local_var_params: + query_params.append(('offset', local_var_params['offset'])) # noqa: E501 + if 'limit' in local_var_params: + query_params.append(('limit', local_var_params['limit'])) # noqa: E501 + if 'after' in local_var_params: + query_params.append(('after', local_var_params['after'])) # noqa: E501 + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'name' in local_var_params: + query_params.append(('name', local_var_params['name'])) # noqa: E501 + if 'id' in local_var_params: + query_params.append(('id', local_var_params['id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_buckets_id(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a bucket. + + Retrieves a bucket. Use this endpoint to retrieve information for a specific bucket. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_buckets_id(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Bucket + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_buckets_id_with_http_info(bucket_id, **kwargs) # noqa: E501 + else: + (data) = self.get_buckets_id_with_http_info(bucket_id, **kwargs) # noqa: E501 + return data + + def get_buckets_id_with_http_info(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a bucket. + + Retrieves a bucket. Use this endpoint to retrieve information for a specific bucket. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_buckets_id_with_http_info(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Bucket + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_buckets_id_prepare(bucket_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Bucket', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_buckets_id_async(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a bucket. + + Retrieves a bucket. Use this endpoint to retrieve information for a specific bucket. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Bucket + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_buckets_id_prepare(bucket_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Bucket', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_buckets_id_prepare(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_buckets_id', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `get_buckets_id`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_buckets_id_labels(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a bucket. + + Lists all labels for a bucket. Labels are objects that contain `labelID`, `name`, `description`, and `color` key-value pairs. They may be used for grouping and filtering InfluxDB resources. Labels are also capable of grouping across different resources--for example, you can apply a label named `air_sensor` to a bucket and a task to quickly organize resources. #### Related guides - Use the [`/api/v2/labels` InfluxDB API endpoint](#tag/Labels) to retrieve and manage labels. - [Manage labels in the InfluxDB UI](https://docs.influxdata.com/influxdb/latest/visualize-data/labels/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_buckets_id_labels(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve labels for. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_buckets_id_labels_with_http_info(bucket_id, **kwargs) # noqa: E501 + else: + (data) = self.get_buckets_id_labels_with_http_info(bucket_id, **kwargs) # noqa: E501 + return data + + def get_buckets_id_labels_with_http_info(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a bucket. + + Lists all labels for a bucket. Labels are objects that contain `labelID`, `name`, `description`, and `color` key-value pairs. They may be used for grouping and filtering InfluxDB resources. Labels are also capable of grouping across different resources--for example, you can apply a label named `air_sensor` to a bucket and a task to quickly organize resources. #### Related guides - Use the [`/api/v2/labels` InfluxDB API endpoint](#tag/Labels) to retrieve and manage labels. - [Manage labels in the InfluxDB UI](https://docs.influxdata.com/influxdb/latest/visualize-data/labels/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_buckets_id_labels_with_http_info(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve labels for. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_buckets_id_labels_prepare(bucket_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_buckets_id_labels_async(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a bucket. + + Lists all labels for a bucket. Labels are objects that contain `labelID`, `name`, `description`, and `color` key-value pairs. They may be used for grouping and filtering InfluxDB resources. Labels are also capable of grouping across different resources--for example, you can apply a label named `air_sensor` to a bucket and a task to quickly organize resources. #### Related guides - Use the [`/api/v2/labels` InfluxDB API endpoint](#tag/Labels) to retrieve and manage labels. - [Manage labels in the InfluxDB UI](https://docs.influxdata.com/influxdb/latest/visualize-data/labels/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve labels for. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_buckets_id_labels_prepare(bucket_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_buckets_id_labels_prepare(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_buckets_id_labels', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `get_buckets_id_labels`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_buckets_id_members(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List all users with member privileges for a bucket. + + Lists all users for a bucket. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users in an organization with access to the specified resource. Use this endpoint to retrieve all users with access to a bucket. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_buckets_id_members(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve users for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_buckets_id_members_with_http_info(bucket_id, **kwargs) # noqa: E501 + else: + (data) = self.get_buckets_id_members_with_http_info(bucket_id, **kwargs) # noqa: E501 + return data + + def get_buckets_id_members_with_http_info(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List all users with member privileges for a bucket. + + Lists all users for a bucket. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users in an organization with access to the specified resource. Use this endpoint to retrieve all users with access to a bucket. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_buckets_id_members_with_http_info(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve users for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_buckets_id_members_prepare(bucket_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_buckets_id_members_async(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List all users with member privileges for a bucket. + + Lists all users for a bucket. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users in an organization with access to the specified resource. Use this endpoint to retrieve all users with access to a bucket. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve users for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_buckets_id_members_prepare(bucket_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_buckets_id_members_prepare(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_buckets_id_members', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `get_buckets_id_members`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_buckets_id_owners(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a bucket. + + Lists all [owners](https://docs.influxdata.com/influxdb/latest/reference/glossary/#owner) of a bucket. Bucket owners have permission to delete buckets and remove user and member permissions from the bucket. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `read-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to retrieve a list of owners for. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_buckets_id_owners(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve owners for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_buckets_id_owners_with_http_info(bucket_id, **kwargs) # noqa: E501 + else: + (data) = self.get_buckets_id_owners_with_http_info(bucket_id, **kwargs) # noqa: E501 + return data + + def get_buckets_id_owners_with_http_info(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a bucket. + + Lists all [owners](https://docs.influxdata.com/influxdb/latest/reference/glossary/#owner) of a bucket. Bucket owners have permission to delete buckets and remove user and member permissions from the bucket. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `read-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to retrieve a list of owners for. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_buckets_id_owners_with_http_info(bucket_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve owners for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_buckets_id_owners_prepare(bucket_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_buckets_id_owners_async(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a bucket. + + Lists all [owners](https://docs.influxdata.com/influxdb/latest/reference/glossary/#owner) of a bucket. Bucket owners have permission to delete buckets and remove user and member permissions from the bucket. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `read-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to retrieve a list of owners for. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve owners for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_buckets_id_owners_prepare(bucket_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_buckets_id_owners_prepare(self, bucket_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_buckets_id_owners', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `get_buckets_id_owners`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_sources_id_buckets(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Get buckets in a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_sources_id_buckets(source_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :return: Buckets + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_sources_id_buckets_with_http_info(source_id, **kwargs) # noqa: E501 + else: + (data) = self.get_sources_id_buckets_with_http_info(source_id, **kwargs) # noqa: E501 + return data + + def get_sources_id_buckets_with_http_info(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Get buckets in a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_sources_id_buckets_with_http_info(source_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :return: Buckets + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_sources_id_buckets_prepare(source_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/sources/{sourceID}/buckets', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Buckets', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_sources_id_buckets_async(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Get buckets in a source. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :return: Buckets + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_sources_id_buckets_prepare(source_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/sources/{sourceID}/buckets', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Buckets', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_sources_id_buckets_prepare(self, source_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['source_id', 'zap_trace_span', 'org'] # noqa: E501 + self._check_operation_params('get_sources_id_buckets', all_params, local_var_params) + # verify the required parameter 'source_id' is set + if ('source_id' not in local_var_params or + local_var_params['source_id'] is None): + raise ValueError("Missing the required parameter `source_id` when calling `get_sources_id_buckets`") # noqa: E501 + + path_params = {} + if 'source_id' in local_var_params: + path_params['sourceID'] = local_var_params['source_id'] # noqa: E501 + + query_params = [] + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_buckets_id(self, bucket_id, patch_bucket_request, **kwargs): # noqa: E501,D401,D403 + """Update a bucket. + + Updates a bucket. Use this endpoint to update properties (`name`, `description`, and `retentionRules`) of a bucket. #### InfluxDB Cloud - Requires the `retentionRules` property in the request body. If you don't provide `retentionRules`, InfluxDB responds with an HTTP `403` status code. #### InfluxDB OSS - Doesn't require `retentionRules`. #### Related Guides - [Update a bucket](https://docs.influxdata.com/influxdb/latest/organizations/buckets/update-bucket/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_buckets_id(bucket_id, patch_bucket_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The bucket ID. (required) + :param PatchBucketRequest patch_bucket_request: The bucket update to apply. (required) + :param str zap_trace_span: OpenTracing span context + :return: Bucket + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_buckets_id_with_http_info(bucket_id, patch_bucket_request, **kwargs) # noqa: E501 + else: + (data) = self.patch_buckets_id_with_http_info(bucket_id, patch_bucket_request, **kwargs) # noqa: E501 + return data + + def patch_buckets_id_with_http_info(self, bucket_id, patch_bucket_request, **kwargs): # noqa: E501,D401,D403 + """Update a bucket. + + Updates a bucket. Use this endpoint to update properties (`name`, `description`, and `retentionRules`) of a bucket. #### InfluxDB Cloud - Requires the `retentionRules` property in the request body. If you don't provide `retentionRules`, InfluxDB responds with an HTTP `403` status code. #### InfluxDB OSS - Doesn't require `retentionRules`. #### Related Guides - [Update a bucket](https://docs.influxdata.com/influxdb/latest/organizations/buckets/update-bucket/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_buckets_id_with_http_info(bucket_id, patch_bucket_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The bucket ID. (required) + :param PatchBucketRequest patch_bucket_request: The bucket update to apply. (required) + :param str zap_trace_span: OpenTracing span context + :return: Bucket + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_buckets_id_prepare(bucket_id, patch_bucket_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Bucket', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_buckets_id_async(self, bucket_id, patch_bucket_request, **kwargs): # noqa: E501,D401,D403 + """Update a bucket. + + Updates a bucket. Use this endpoint to update properties (`name`, `description`, and `retentionRules`) of a bucket. #### InfluxDB Cloud - Requires the `retentionRules` property in the request body. If you don't provide `retentionRules`, InfluxDB responds with an HTTP `403` status code. #### InfluxDB OSS - Doesn't require `retentionRules`. #### Related Guides - [Update a bucket](https://docs.influxdata.com/influxdb/latest/organizations/buckets/update-bucket/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: The bucket ID. (required) + :param PatchBucketRequest patch_bucket_request: The bucket update to apply. (required) + :param str zap_trace_span: OpenTracing span context + :return: Bucket + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_buckets_id_prepare(bucket_id, patch_bucket_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Bucket', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_buckets_id_prepare(self, bucket_id, patch_bucket_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'patch_bucket_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_buckets_id', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `patch_buckets_id`") # noqa: E501 + # verify the required parameter 'patch_bucket_request' is set + if ('patch_bucket_request' not in local_var_params or + local_var_params['patch_bucket_request'] is None): + raise ValueError("Missing the required parameter `patch_bucket_request` when calling `patch_buckets_id`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'patch_bucket_request' in local_var_params: + body_params = local_var_params['patch_bucket_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_buckets(self, post_bucket_request, **kwargs): # noqa: E501,D401,D403 + """Create a bucket. + + Creates a [bucket](https://docs.influxdata.com/influxdb/latest/reference/glossary/#bucket) and returns the bucket resource. The default data [retention period](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-period) is 30 days. #### InfluxDB OSS - A single InfluxDB OSS instance supports active writes or queries for approximately 20 buckets across all organizations at a given time. Reading or writing to more than 20 buckets at a time can adversely affect performance. #### Limitations - InfluxDB Cloud Free Plan allows users to create up to two buckets. Exceeding the bucket quota will result in an HTTP `403` status code. For additional information regarding InfluxDB Cloud offerings, see [InfluxDB Cloud Pricing](https://www.influxdata.com/influxdb-cloud-pricing/). #### Related Guides - [Create a bucket](https://docs.influxdata.com/influxdb/latest/organizations/buckets/create-bucket/) - [Create bucket CLI reference](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/bucket/create) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_buckets(post_bucket_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostBucketRequest post_bucket_request: The bucket to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Bucket + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_buckets_with_http_info(post_bucket_request, **kwargs) # noqa: E501 + else: + (data) = self.post_buckets_with_http_info(post_bucket_request, **kwargs) # noqa: E501 + return data + + def post_buckets_with_http_info(self, post_bucket_request, **kwargs): # noqa: E501,D401,D403 + """Create a bucket. + + Creates a [bucket](https://docs.influxdata.com/influxdb/latest/reference/glossary/#bucket) and returns the bucket resource. The default data [retention period](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-period) is 30 days. #### InfluxDB OSS - A single InfluxDB OSS instance supports active writes or queries for approximately 20 buckets across all organizations at a given time. Reading or writing to more than 20 buckets at a time can adversely affect performance. #### Limitations - InfluxDB Cloud Free Plan allows users to create up to two buckets. Exceeding the bucket quota will result in an HTTP `403` status code. For additional information regarding InfluxDB Cloud offerings, see [InfluxDB Cloud Pricing](https://www.influxdata.com/influxdb-cloud-pricing/). #### Related Guides - [Create a bucket](https://docs.influxdata.com/influxdb/latest/organizations/buckets/create-bucket/) - [Create bucket CLI reference](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/bucket/create) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_buckets_with_http_info(post_bucket_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostBucketRequest post_bucket_request: The bucket to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Bucket + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_buckets_prepare(post_bucket_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Bucket', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_buckets_async(self, post_bucket_request, **kwargs): # noqa: E501,D401,D403 + """Create a bucket. + + Creates a [bucket](https://docs.influxdata.com/influxdb/latest/reference/glossary/#bucket) and returns the bucket resource. The default data [retention period](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-period) is 30 days. #### InfluxDB OSS - A single InfluxDB OSS instance supports active writes or queries for approximately 20 buckets across all organizations at a given time. Reading or writing to more than 20 buckets at a time can adversely affect performance. #### Limitations - InfluxDB Cloud Free Plan allows users to create up to two buckets. Exceeding the bucket quota will result in an HTTP `403` status code. For additional information regarding InfluxDB Cloud offerings, see [InfluxDB Cloud Pricing](https://www.influxdata.com/influxdb-cloud-pricing/). #### Related Guides - [Create a bucket](https://docs.influxdata.com/influxdb/latest/organizations/buckets/create-bucket/) - [Create bucket CLI reference](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/bucket/create) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param PostBucketRequest post_bucket_request: The bucket to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Bucket + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_buckets_prepare(post_bucket_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Bucket', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_buckets_prepare(self, post_bucket_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['post_bucket_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_buckets', all_params, local_var_params) + # verify the required parameter 'post_bucket_request' is set + if ('post_bucket_request' not in local_var_params or + local_var_params['post_bucket_request'] is None): + raise ValueError("Missing the required parameter `post_bucket_request` when calling `post_buckets`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'post_bucket_request' in local_var_params: + body_params = local_var_params['post_bucket_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_buckets_id_labels(self, bucket_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a bucket. + + Adds a label to a bucket and returns the new label information. Labels are objects that contain `labelID`, `name`, `description`, and `color` key-value pairs. They may be used for grouping and filtering across one or more kinds of **resources**--for example, you can apply a label named `air_sensor` to a bucket and a task to quickly organize resources. #### Limitations - Before adding a label to a bucket, you must create the label if you haven't already. To create a label with the InfluxDB API, send a `POST` request to the [`/api/v2/labels` endpoint](#operation/PostLabels)). #### Related guides - Use the [`/api/v2/labels` InfluxDB API endpoint](#tag/Labels) to retrieve and manage labels. - [Manage labels in the InfluxDB UI](https://docs.influxdata.com/influxdb/latest/visualize-data/labels/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_buckets_id_labels(bucket_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: Bucket ID. The ID of the bucket to label. (required) + :param LabelMapping label_mapping: An object that contains a _`labelID`_ to add to the bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_buckets_id_labels_with_http_info(bucket_id, label_mapping, **kwargs) # noqa: E501 + else: + (data) = self.post_buckets_id_labels_with_http_info(bucket_id, label_mapping, **kwargs) # noqa: E501 + return data + + def post_buckets_id_labels_with_http_info(self, bucket_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a bucket. + + Adds a label to a bucket and returns the new label information. Labels are objects that contain `labelID`, `name`, `description`, and `color` key-value pairs. They may be used for grouping and filtering across one or more kinds of **resources**--for example, you can apply a label named `air_sensor` to a bucket and a task to quickly organize resources. #### Limitations - Before adding a label to a bucket, you must create the label if you haven't already. To create a label with the InfluxDB API, send a `POST` request to the [`/api/v2/labels` endpoint](#operation/PostLabels)). #### Related guides - Use the [`/api/v2/labels` InfluxDB API endpoint](#tag/Labels) to retrieve and manage labels. - [Manage labels in the InfluxDB UI](https://docs.influxdata.com/influxdb/latest/visualize-data/labels/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_buckets_id_labels_with_http_info(bucket_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: Bucket ID. The ID of the bucket to label. (required) + :param LabelMapping label_mapping: An object that contains a _`labelID`_ to add to the bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_buckets_id_labels_prepare(bucket_id, label_mapping, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_buckets_id_labels_async(self, bucket_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a bucket. + + Adds a label to a bucket and returns the new label information. Labels are objects that contain `labelID`, `name`, `description`, and `color` key-value pairs. They may be used for grouping and filtering across one or more kinds of **resources**--for example, you can apply a label named `air_sensor` to a bucket and a task to quickly organize resources. #### Limitations - Before adding a label to a bucket, you must create the label if you haven't already. To create a label with the InfluxDB API, send a `POST` request to the [`/api/v2/labels` endpoint](#operation/PostLabels)). #### Related guides - Use the [`/api/v2/labels` InfluxDB API endpoint](#tag/Labels) to retrieve and manage labels. - [Manage labels in the InfluxDB UI](https://docs.influxdata.com/influxdb/latest/visualize-data/labels/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: Bucket ID. The ID of the bucket to label. (required) + :param LabelMapping label_mapping: An object that contains a _`labelID`_ to add to the bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_buckets_id_labels_prepare(bucket_id, label_mapping, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_buckets_id_labels_prepare(self, bucket_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'label_mapping', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_buckets_id_labels', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `post_buckets_id_labels`") # noqa: E501 + # verify the required parameter 'label_mapping' is set + if ('label_mapping' not in local_var_params or + local_var_params['label_mapping'] is None): + raise ValueError("Missing the required parameter `label_mapping` when calling `post_buckets_id_labels`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'label_mapping' in local_var_params: + body_params = local_var_params['label_mapping'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_buckets_id_members(self, bucket_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a bucket. + + Add a user to a bucket and return the new user information. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users in an organization. Use this endpoint to give a user member privileges to a bucket. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_buckets_id_members(bucket_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve users for. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as a member to the bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_buckets_id_members_with_http_info(bucket_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_buckets_id_members_with_http_info(bucket_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_buckets_id_members_with_http_info(self, bucket_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a bucket. + + Add a user to a bucket and return the new user information. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users in an organization. Use this endpoint to give a user member privileges to a bucket. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_buckets_id_members_with_http_info(bucket_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve users for. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as a member to the bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_buckets_id_members_prepare(bucket_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_buckets_id_members_async(self, bucket_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a bucket. + + Add a user to a bucket and return the new user information. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users in an organization. Use this endpoint to give a user member privileges to a bucket. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: The ID of the bucket to retrieve users for. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as a member to the bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_buckets_id_members_prepare(bucket_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_buckets_id_members_prepare(self, bucket_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_buckets_id_members', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `post_buckets_id_members`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_buckets_id_members`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_buckets_id_owners(self, bucket_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a bucket. + + Adds an owner to a bucket and returns the [owners](https://docs.influxdata.com/influxdb/latest/reference/glossary/#owner) with role and user detail. Use this endpoint to create a _resource owner_ for the bucket. Bucket owners have permission to delete buckets and remove user and member permissions from the bucket. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to add an owner for. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_buckets_id_owners(bucket_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to add an owner for. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as an owner for the bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_buckets_id_owners_with_http_info(bucket_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_buckets_id_owners_with_http_info(bucket_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_buckets_id_owners_with_http_info(self, bucket_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a bucket. + + Adds an owner to a bucket and returns the [owners](https://docs.influxdata.com/influxdb/latest/reference/glossary/#owner) with role and user detail. Use this endpoint to create a _resource owner_ for the bucket. Bucket owners have permission to delete buckets and remove user and member permissions from the bucket. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to add an owner for. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_buckets_id_owners_with_http_info(bucket_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The ID of the bucket to add an owner for. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as an owner for the bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_buckets_id_owners_prepare(bucket_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/buckets/{bucketID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_buckets_id_owners_async(self, bucket_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a bucket. + + Adds an owner to a bucket and returns the [owners](https://docs.influxdata.com/influxdb/latest/reference/glossary/#owner) with role and user detail. Use this endpoint to create a _resource owner_ for the bucket. Bucket owners have permission to delete buckets and remove user and member permissions from the bucket. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to add an owner for. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: The ID of the bucket to add an owner for. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as an owner for the bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_buckets_id_owners_prepare(bucket_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/buckets/{bucketID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_buckets_id_owners_prepare(self, bucket_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_buckets_id_owners', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `post_buckets_id_owners`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_buckets_id_owners`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/cells_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/cells_service.py new file mode 100644 index 0000000..13c811b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/cells_service.py @@ -0,0 +1,820 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class CellsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """CellsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_dashboards_id_cells_id(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Delete a dashboard cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id_cells_id(dashboard_id, cell_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to delete. (required) + :param str cell_id: The ID of the cell to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, **kwargs) # noqa: E501 + return data + + def delete_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Delete a dashboard cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to delete. (required) + :param str cell_id: The ID of the cell to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_cells_id_prepare(dashboard_id, cell_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_dashboards_id_cells_id_async(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Delete a dashboard cell. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to delete. (required) + :param str cell_id: The ID of the cell to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_cells_id_prepare(dashboard_id, cell_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_dashboards_id_cells_id_prepare(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_dashboards_id_cells_id', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `delete_dashboards_id_cells_id`") # noqa: E501 + # verify the required parameter 'cell_id' is set + if ('cell_id' not in local_var_params or + local_var_params['cell_id'] is None): + raise ValueError("Missing the required parameter `cell_id` when calling `delete_dashboards_id_cells_id`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + if 'cell_id' in local_var_params: + path_params['cellID'] = local_var_params['cell_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_dashboards_id_cells_id_view(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_cells_id_view(dashboard_id, cell_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str cell_id: The cell ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, **kwargs) # noqa: E501 + else: + (data) = self.get_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, **kwargs) # noqa: E501 + return data + + def get_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str cell_id: The cell ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve the view for a cell. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str cell_id: The cell ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_dashboards_id_cells_id_view_prepare(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_dashboards_id_cells_id_view', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `get_dashboards_id_cells_id_view`") # noqa: E501 + # verify the required parameter 'cell_id' is set + if ('cell_id' not in local_var_params or + local_var_params['cell_id'] is None): + raise ValueError("Missing the required parameter `cell_id` when calling `get_dashboards_id_cells_id_view`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + if 'cell_id' in local_var_params: + path_params['cellID'] = local_var_params['cell_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_dashboards_id_cells_id(self, dashboard_id, cell_id, cell_update, **kwargs): # noqa: E501,D401,D403 + """Update the non-positional information related to a cell. + + Updates the non positional information related to a cell. Updates to a single cell's positional data could cause grid conflicts. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id_cells_id(dashboard_id, cell_id, cell_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param CellUpdate cell_update: (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, cell_update, **kwargs) # noqa: E501 + else: + (data) = self.patch_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, cell_update, **kwargs) # noqa: E501 + return data + + def patch_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, cell_update, **kwargs): # noqa: E501,D401,D403 + """Update the non-positional information related to a cell. + + Updates the non positional information related to a cell. Updates to a single cell's positional data could cause grid conflicts. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, cell_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param CellUpdate cell_update: (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_cells_id_prepare(dashboard_id, cell_id, cell_update, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Cell', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_dashboards_id_cells_id_async(self, dashboard_id, cell_id, cell_update, **kwargs): # noqa: E501,D401,D403 + """Update the non-positional information related to a cell. + + Updates the non positional information related to a cell. Updates to a single cell's positional data could cause grid conflicts. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param CellUpdate cell_update: (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_cells_id_prepare(dashboard_id, cell_id, cell_update, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Cell', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_dashboards_id_cells_id_prepare(self, dashboard_id, cell_id, cell_update, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell_id', 'cell_update', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_dashboards_id_cells_id', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `patch_dashboards_id_cells_id`") # noqa: E501 + # verify the required parameter 'cell_id' is set + if ('cell_id' not in local_var_params or + local_var_params['cell_id'] is None): + raise ValueError("Missing the required parameter `cell_id` when calling `patch_dashboards_id_cells_id`") # noqa: E501 + # verify the required parameter 'cell_update' is set + if ('cell_update' not in local_var_params or + local_var_params['cell_update'] is None): + raise ValueError("Missing the required parameter `cell_update` when calling `patch_dashboards_id_cells_id`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + if 'cell_id' in local_var_params: + path_params['cellID'] = local_var_params['cell_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'cell_update' in local_var_params: + body_params = local_var_params['cell_update'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_dashboards_id_cells_id_view(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + """Update the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id_cells_id_view(dashboard_id, cell_id, view, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param View view: (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + else: + (data) = self.patch_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + return data + + def patch_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + """Update the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, view, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param View view: (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + """Update the view for a cell. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param View view: (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_dashboards_id_cells_id_view_prepare(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell_id', 'view', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_dashboards_id_cells_id_view', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `patch_dashboards_id_cells_id_view`") # noqa: E501 + # verify the required parameter 'cell_id' is set + if ('cell_id' not in local_var_params or + local_var_params['cell_id'] is None): + raise ValueError("Missing the required parameter `cell_id` when calling `patch_dashboards_id_cells_id_view`") # noqa: E501 + # verify the required parameter 'view' is set + if ('view' not in local_var_params or + local_var_params['view'] is None): + raise ValueError("Missing the required parameter `view` when calling `patch_dashboards_id_cells_id_view`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + if 'cell_id' in local_var_params: + path_params['cellID'] = local_var_params['cell_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'view' in local_var_params: + body_params = local_var_params['view'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_dashboards_id_cells(self, dashboard_id, create_cell, **kwargs): # noqa: E501,D401,D403 + """Create a dashboard cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards_id_cells(dashboard_id, create_cell, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param CreateCell create_cell: Cell that will be added (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_dashboards_id_cells_with_http_info(dashboard_id, create_cell, **kwargs) # noqa: E501 + else: + (data) = self.post_dashboards_id_cells_with_http_info(dashboard_id, create_cell, **kwargs) # noqa: E501 + return data + + def post_dashboards_id_cells_with_http_info(self, dashboard_id, create_cell, **kwargs): # noqa: E501,D401,D403 + """Create a dashboard cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards_id_cells_with_http_info(dashboard_id, create_cell, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param CreateCell create_cell: Cell that will be added (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_id_cells_prepare(dashboard_id, create_cell, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Cell', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_dashboards_id_cells_async(self, dashboard_id, create_cell, **kwargs): # noqa: E501,D401,D403 + """Create a dashboard cell. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param CreateCell create_cell: Cell that will be added (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_id_cells_prepare(dashboard_id, create_cell, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Cell', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_dashboards_id_cells_prepare(self, dashboard_id, create_cell, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'create_cell', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_dashboards_id_cells', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `post_dashboards_id_cells`") # noqa: E501 + # verify the required parameter 'create_cell' is set + if ('create_cell' not in local_var_params or + local_var_params['create_cell'] is None): + raise ValueError("Missing the required parameter `create_cell` when calling `post_dashboards_id_cells`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'create_cell' in local_var_params: + body_params = local_var_params['create_cell'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def put_dashboards_id_cells(self, dashboard_id, cell, **kwargs): # noqa: E501,D401,D403 + """Replace cells in a dashboard. + + Replaces all cells in a dashboard. This is used primarily to update the positional information of all cells. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_dashboards_id_cells(dashboard_id, cell, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param list[Cell] cell: (required) + :param str zap_trace_span: OpenTracing span context + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.put_dashboards_id_cells_with_http_info(dashboard_id, cell, **kwargs) # noqa: E501 + else: + (data) = self.put_dashboards_id_cells_with_http_info(dashboard_id, cell, **kwargs) # noqa: E501 + return data + + def put_dashboards_id_cells_with_http_info(self, dashboard_id, cell, **kwargs): # noqa: E501,D401,D403 + """Replace cells in a dashboard. + + Replaces all cells in a dashboard. This is used primarily to update the positional information of all cells. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_dashboards_id_cells_with_http_info(dashboard_id, cell, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param list[Cell] cell: (required) + :param str zap_trace_span: OpenTracing span context + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_dashboards_id_cells_prepare(dashboard_id, cell, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Dashboard', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def put_dashboards_id_cells_async(self, dashboard_id, cell, **kwargs): # noqa: E501,D401,D403 + """Replace cells in a dashboard. + + Replaces all cells in a dashboard. This is used primarily to update the positional information of all cells. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param list[Cell] cell: (required) + :param str zap_trace_span: OpenTracing span context + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_dashboards_id_cells_prepare(dashboard_id, cell, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Dashboard', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _put_dashboards_id_cells_prepare(self, dashboard_id, cell, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('put_dashboards_id_cells', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `put_dashboards_id_cells`") # noqa: E501 + # verify the required parameter 'cell' is set + if ('cell' not in local_var_params or + local_var_params['cell'] is None): + raise ValueError("Missing the required parameter `cell` when calling `put_dashboards_id_cells`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'cell' in local_var_params: + body_params = local_var_params['cell'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/checks_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/checks_service.py new file mode 100644 index 0000000..2c780f3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/checks_service.py @@ -0,0 +1,1253 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class ChecksService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """ChecksService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def create_check(self, post_check, **kwargs): # noqa: E501,D401,D403 + """Add new check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.create_check(post_check, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostCheck post_check: Check to create (required) + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.create_check_with_http_info(post_check, **kwargs) # noqa: E501 + else: + (data) = self.create_check_with_http_info(post_check, **kwargs) # noqa: E501 + return data + + def create_check_with_http_info(self, post_check, **kwargs): # noqa: E501,D401,D403 + """Add new check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.create_check_with_http_info(post_check, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostCheck post_check: Check to create (required) + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._create_check_prepare(post_check, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/checks', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Check', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def create_check_async(self, post_check, **kwargs): # noqa: E501,D401,D403 + """Add new check. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param PostCheck post_check: Check to create (required) + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._create_check_prepare(post_check, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/checks', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Check', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _create_check_prepare(self, post_check, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['post_check'] # noqa: E501 + self._check_operation_params('create_check', all_params, local_var_params) + # verify the required parameter 'post_check' is set + if ('post_check' not in local_var_params or + local_var_params['post_check'] is None): + raise ValueError("Missing the required parameter `post_check` when calling `create_check`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + + body_params = None + if 'post_check' in local_var_params: + body_params = local_var_params['post_check'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_checks_id(self, check_id, **kwargs): # noqa: E501,D401,D403 + """Delete a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_checks_id(check_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_checks_id_with_http_info(check_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_checks_id_with_http_info(check_id, **kwargs) # noqa: E501 + return data + + def delete_checks_id_with_http_info(self, check_id, **kwargs): # noqa: E501,D401,D403 + """Delete a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_checks_id_with_http_info(check_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_checks_id_prepare(check_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/checks/{checkID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_checks_id_async(self, check_id, **kwargs): # noqa: E501,D401,D403 + """Delete a check. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_checks_id_prepare(check_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/checks/{checkID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_checks_id_prepare(self, check_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['check_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_checks_id', all_params, local_var_params) + # verify the required parameter 'check_id' is set + if ('check_id' not in local_var_params or + local_var_params['check_id'] is None): + raise ValueError("Missing the required parameter `check_id` when calling `delete_checks_id`") # noqa: E501 + + path_params = {} + if 'check_id' in local_var_params: + path_params['checkID'] = local_var_params['check_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_checks_id_labels_id(self, check_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete label from a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_checks_id_labels_id(check_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_checks_id_labels_id_with_http_info(check_id, label_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_checks_id_labels_id_with_http_info(check_id, label_id, **kwargs) # noqa: E501 + return data + + def delete_checks_id_labels_id_with_http_info(self, check_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete label from a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_checks_id_labels_id_with_http_info(check_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_checks_id_labels_id_prepare(check_id, label_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/checks/{checkID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_checks_id_labels_id_async(self, check_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete label from a check. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str check_id: The check ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_checks_id_labels_id_prepare(check_id, label_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/checks/{checkID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_checks_id_labels_id_prepare(self, check_id, label_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['check_id', 'label_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_checks_id_labels_id', all_params, local_var_params) + # verify the required parameter 'check_id' is set + if ('check_id' not in local_var_params or + local_var_params['check_id'] is None): + raise ValueError("Missing the required parameter `check_id` when calling `delete_checks_id_labels_id`") # noqa: E501 + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `delete_checks_id_labels_id`") # noqa: E501 + + path_params = {} + if 'check_id' in local_var_params: + path_params['checkID'] = local_var_params['check_id'] # noqa: E501 + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_checks(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all checks. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_checks(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: Only show checks that belong to a specific organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :return: Checks + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_checks_with_http_info(org_id, **kwargs) # noqa: E501 + else: + (data) = self.get_checks_with_http_info(org_id, **kwargs) # noqa: E501 + return data + + def get_checks_with_http_info(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all checks. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_checks_with_http_info(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: Only show checks that belong to a specific organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :return: Checks + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_checks_prepare(org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/checks', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Checks', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_checks_async(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all checks. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: Only show checks that belong to a specific organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :return: Checks + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_checks_prepare(org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/checks', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Checks', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_checks_prepare(self, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'zap_trace_span', 'offset', 'limit'] # noqa: E501 + self._check_operation_params('get_checks', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `get_checks`") # noqa: E501 + + if 'offset' in local_var_params and local_var_params['offset'] < 0: # noqa: E501 + raise ValueError("Invalid value for parameter `offset` when calling `get_checks`, must be a value greater than or equal to `0`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] > 100: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_checks`, must be a value less than or equal to `100`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_checks`, must be a value greater than or equal to `1`") # noqa: E501 + path_params = {} + + query_params = [] + if 'offset' in local_var_params: + query_params.append(('offset', local_var_params['offset'])) # noqa: E501 + if 'limit' in local_var_params: + query_params.append(('limit', local_var_params['limit'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_checks_id(self, check_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_checks_id(check_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_checks_id_with_http_info(check_id, **kwargs) # noqa: E501 + else: + (data) = self.get_checks_id_with_http_info(check_id, **kwargs) # noqa: E501 + return data + + def get_checks_id_with_http_info(self, check_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_checks_id_with_http_info(check_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_checks_id_prepare(check_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/checks/{checkID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Check', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_checks_id_async(self, check_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a check. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_checks_id_prepare(check_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/checks/{checkID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Check', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_checks_id_prepare(self, check_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['check_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_checks_id', all_params, local_var_params) + # verify the required parameter 'check_id' is set + if ('check_id' not in local_var_params or + local_var_params['check_id'] is None): + raise ValueError("Missing the required parameter `check_id` when calling `get_checks_id`") # noqa: E501 + + path_params = {} + if 'check_id' in local_var_params: + path_params['checkID'] = local_var_params['check_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_checks_id_labels(self, check_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_checks_id_labels(check_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_checks_id_labels_with_http_info(check_id, **kwargs) # noqa: E501 + else: + (data) = self.get_checks_id_labels_with_http_info(check_id, **kwargs) # noqa: E501 + return data + + def get_checks_id_labels_with_http_info(self, check_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_checks_id_labels_with_http_info(check_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_checks_id_labels_prepare(check_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/checks/{checkID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_checks_id_labels_async(self, check_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a check. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_checks_id_labels_prepare(check_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/checks/{checkID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_checks_id_labels_prepare(self, check_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['check_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_checks_id_labels', all_params, local_var_params) + # verify the required parameter 'check_id' is set + if ('check_id' not in local_var_params or + local_var_params['check_id'] is None): + raise ValueError("Missing the required parameter `check_id` when calling `get_checks_id_labels`") # noqa: E501 + + path_params = {} + if 'check_id' in local_var_params: + path_params['checkID'] = local_var_params['check_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_checks_id_query(self, check_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a check query. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_checks_id_query(check_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: FluxResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_checks_id_query_with_http_info(check_id, **kwargs) # noqa: E501 + else: + (data) = self.get_checks_id_query_with_http_info(check_id, **kwargs) # noqa: E501 + return data + + def get_checks_id_query_with_http_info(self, check_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a check query. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_checks_id_query_with_http_info(check_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: FluxResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_checks_id_query_prepare(check_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/checks/{checkID}/query', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='FluxResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_checks_id_query_async(self, check_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a check query. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str check_id: The check ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: FluxResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_checks_id_query_prepare(check_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/checks/{checkID}/query', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='FluxResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_checks_id_query_prepare(self, check_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['check_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_checks_id_query', all_params, local_var_params) + # verify the required parameter 'check_id' is set + if ('check_id' not in local_var_params or + local_var_params['check_id'] is None): + raise ValueError("Missing the required parameter `check_id` when calling `get_checks_id_query`") # noqa: E501 + + path_params = {} + if 'check_id' in local_var_params: + path_params['checkID'] = local_var_params['check_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_checks_id(self, check_id, check_patch, **kwargs): # noqa: E501,D401,D403 + """Update a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_checks_id(check_id, check_patch, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param CheckPatch check_patch: Check update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_checks_id_with_http_info(check_id, check_patch, **kwargs) # noqa: E501 + else: + (data) = self.patch_checks_id_with_http_info(check_id, check_patch, **kwargs) # noqa: E501 + return data + + def patch_checks_id_with_http_info(self, check_id, check_patch, **kwargs): # noqa: E501,D401,D403 + """Update a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_checks_id_with_http_info(check_id, check_patch, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param CheckPatch check_patch: Check update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_checks_id_prepare(check_id, check_patch, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/checks/{checkID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Check', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_checks_id_async(self, check_id, check_patch, **kwargs): # noqa: E501,D401,D403 + """Update a check. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str check_id: The check ID. (required) + :param CheckPatch check_patch: Check update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_checks_id_prepare(check_id, check_patch, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/checks/{checkID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Check', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_checks_id_prepare(self, check_id, check_patch, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['check_id', 'check_patch', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_checks_id', all_params, local_var_params) + # verify the required parameter 'check_id' is set + if ('check_id' not in local_var_params or + local_var_params['check_id'] is None): + raise ValueError("Missing the required parameter `check_id` when calling `patch_checks_id`") # noqa: E501 + # verify the required parameter 'check_patch' is set + if ('check_patch' not in local_var_params or + local_var_params['check_patch'] is None): + raise ValueError("Missing the required parameter `check_patch` when calling `patch_checks_id`") # noqa: E501 + + path_params = {} + if 'check_id' in local_var_params: + path_params['checkID'] = local_var_params['check_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'check_patch' in local_var_params: + body_params = local_var_params['check_patch'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_checks_id_labels(self, check_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_checks_id_labels(check_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_checks_id_labels_with_http_info(check_id, label_mapping, **kwargs) # noqa: E501 + else: + (data) = self.post_checks_id_labels_with_http_info(check_id, label_mapping, **kwargs) # noqa: E501 + return data + + def post_checks_id_labels_with_http_info(self, check_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_checks_id_labels_with_http_info(check_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_checks_id_labels_prepare(check_id, label_mapping, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/checks/{checkID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_checks_id_labels_async(self, check_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a check. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str check_id: The check ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_checks_id_labels_prepare(check_id, label_mapping, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/checks/{checkID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_checks_id_labels_prepare(self, check_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['check_id', 'label_mapping', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_checks_id_labels', all_params, local_var_params) + # verify the required parameter 'check_id' is set + if ('check_id' not in local_var_params or + local_var_params['check_id'] is None): + raise ValueError("Missing the required parameter `check_id` when calling `post_checks_id_labels`") # noqa: E501 + # verify the required parameter 'label_mapping' is set + if ('label_mapping' not in local_var_params or + local_var_params['label_mapping'] is None): + raise ValueError("Missing the required parameter `label_mapping` when calling `post_checks_id_labels`") # noqa: E501 + + path_params = {} + if 'check_id' in local_var_params: + path_params['checkID'] = local_var_params['check_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'label_mapping' in local_var_params: + body_params = local_var_params['label_mapping'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def put_checks_id(self, check_id, check, **kwargs): # noqa: E501,D401,D403 + """Update a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_checks_id(check_id, check, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param Check check: Check update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.put_checks_id_with_http_info(check_id, check, **kwargs) # noqa: E501 + else: + (data) = self.put_checks_id_with_http_info(check_id, check, **kwargs) # noqa: E501 + return data + + def put_checks_id_with_http_info(self, check_id, check, **kwargs): # noqa: E501,D401,D403 + """Update a check. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_checks_id_with_http_info(check_id, check, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str check_id: The check ID. (required) + :param Check check: Check update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_checks_id_prepare(check_id, check, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/checks/{checkID}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Check', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def put_checks_id_async(self, check_id, check, **kwargs): # noqa: E501,D401,D403 + """Update a check. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str check_id: The check ID. (required) + :param Check check: Check update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Check + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_checks_id_prepare(check_id, check, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/checks/{checkID}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Check', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _put_checks_id_prepare(self, check_id, check, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['check_id', 'check', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('put_checks_id', all_params, local_var_params) + # verify the required parameter 'check_id' is set + if ('check_id' not in local_var_params or + local_var_params['check_id'] is None): + raise ValueError("Missing the required parameter `check_id` when calling `put_checks_id`") # noqa: E501 + # verify the required parameter 'check' is set + if ('check' not in local_var_params or + local_var_params['check'] is None): + raise ValueError("Missing the required parameter `check` when calling `put_checks_id`") # noqa: E501 + + path_params = {} + if 'check_id' in local_var_params: + path_params['checkID'] = local_var_params['check_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'check' in local_var_params: + body_params = local_var_params['check'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/config_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/config_service.py new file mode 100644 index 0000000..2dd85d6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/config_service.py @@ -0,0 +1,250 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class ConfigService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """ConfigService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_config(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve runtime configuration. + + Returns the active runtime configuration of the InfluxDB instance. In InfluxDB v2.2+, use this endpoint to view your active runtime configuration, including flags and environment variables. #### Related guides - [View your runtime server configuration](https://docs.influxdata.com/influxdb/latest/reference/config-options/#view-your-runtime-server-configuration) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_config(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: Config + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_config_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_config_with_http_info(**kwargs) # noqa: E501 + return data + + def get_config_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve runtime configuration. + + Returns the active runtime configuration of the InfluxDB instance. In InfluxDB v2.2+, use this endpoint to view your active runtime configuration, including flags and environment variables. #### Related guides - [View your runtime server configuration](https://docs.influxdata.com/influxdb/latest/reference/config-options/#view-your-runtime-server-configuration) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_config_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: Config + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_config_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/config', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Config', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_config_async(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve runtime configuration. + + Returns the active runtime configuration of the InfluxDB instance. In InfluxDB v2.2+, use this endpoint to view your active runtime configuration, including flags and environment variables. #### Related guides - [View your runtime server configuration](https://docs.influxdata.com/influxdb/latest/reference/config-options/#view-your-runtime-server-configuration) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: Config + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_config_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/config', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Config', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_config_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('get_config', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_flags(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve feature flags. + + Retrieves the feature flag key-value pairs configured for the InfluxDB instance. _Feature flags_ are configuration options used to develop and test experimental InfluxDB features and are intended for internal use only. This endpoint represents the first step in the following three-step process to configure feature flags: 1. Use [token authentication](#section/Authentication/TokenAuthentication) or a [user session](#tag/Signin) with this endpoint to retrieve feature flags and their values. 2. Follow the instructions to [enable, disable, or override values for feature flags](https://docs.influxdata.com/influxdb/latest/reference/config-options/#feature-flags). 3. **Optional**: To confirm that your change is applied, do one of the following: - Send a request to this endpoint to retrieve the current feature flag values. - Send a request to the [`GET /api/v2/config` endpoint](#operation/GetConfig) to retrieve the current runtime server configuration. #### Related guides - [InfluxDB configuration options](https://docs.influxdata.com/influxdb/latest/reference/config-options/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_flags(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: dict(str, object) + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_flags_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_flags_with_http_info(**kwargs) # noqa: E501 + return data + + def get_flags_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve feature flags. + + Retrieves the feature flag key-value pairs configured for the InfluxDB instance. _Feature flags_ are configuration options used to develop and test experimental InfluxDB features and are intended for internal use only. This endpoint represents the first step in the following three-step process to configure feature flags: 1. Use [token authentication](#section/Authentication/TokenAuthentication) or a [user session](#tag/Signin) with this endpoint to retrieve feature flags and their values. 2. Follow the instructions to [enable, disable, or override values for feature flags](https://docs.influxdata.com/influxdb/latest/reference/config-options/#feature-flags). 3. **Optional**: To confirm that your change is applied, do one of the following: - Send a request to this endpoint to retrieve the current feature flag values. - Send a request to the [`GET /api/v2/config` endpoint](#operation/GetConfig) to retrieve the current runtime server configuration. #### Related guides - [InfluxDB configuration options](https://docs.influxdata.com/influxdb/latest/reference/config-options/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_flags_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: dict(str, object) + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_flags_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/flags', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='dict(str, object)', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_flags_async(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve feature flags. + + Retrieves the feature flag key-value pairs configured for the InfluxDB instance. _Feature flags_ are configuration options used to develop and test experimental InfluxDB features and are intended for internal use only. This endpoint represents the first step in the following three-step process to configure feature flags: 1. Use [token authentication](#section/Authentication/TokenAuthentication) or a [user session](#tag/Signin) with this endpoint to retrieve feature flags and their values. 2. Follow the instructions to [enable, disable, or override values for feature flags](https://docs.influxdata.com/influxdb/latest/reference/config-options/#feature-flags). 3. **Optional**: To confirm that your change is applied, do one of the following: - Send a request to this endpoint to retrieve the current feature flag values. - Send a request to the [`GET /api/v2/config` endpoint](#operation/GetConfig) to retrieve the current runtime server configuration. #### Related guides - [InfluxDB configuration options](https://docs.influxdata.com/influxdb/latest/reference/config-options/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: dict(str, object) + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_flags_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/flags', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='dict(str, object)', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_flags_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('get_flags', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/dashboards_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/dashboards_service.py new file mode 100644 index 0000000..801c6c7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/dashboards_service.py @@ -0,0 +1,2568 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class DashboardsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """DashboardsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_dashboards_id(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Delete a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_dashboards_id_with_http_info(dashboard_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_dashboards_id_with_http_info(dashboard_id, **kwargs) # noqa: E501 + return data + + def delete_dashboards_id_with_http_info(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Delete a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id_with_http_info(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_prepare(dashboard_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_dashboards_id_async(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Delete a dashboard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_prepare(dashboard_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_dashboards_id_prepare(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_dashboards_id', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `delete_dashboards_id`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_dashboards_id_cells_id(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Delete a dashboard cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id_cells_id(dashboard_id, cell_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to delete. (required) + :param str cell_id: The ID of the cell to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, **kwargs) # noqa: E501 + return data + + def delete_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Delete a dashboard cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to delete. (required) + :param str cell_id: The ID of the cell to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_cells_id_prepare(dashboard_id, cell_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_dashboards_id_cells_id_async(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Delete a dashboard cell. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to delete. (required) + :param str cell_id: The ID of the cell to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_cells_id_prepare(dashboard_id, cell_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_dashboards_id_cells_id_prepare(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_dashboards_id_cells_id', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `delete_dashboards_id_cells_id`") # noqa: E501 + # verify the required parameter 'cell_id' is set + if ('cell_id' not in local_var_params or + local_var_params['cell_id'] is None): + raise ValueError("Missing the required parameter `cell_id` when calling `delete_dashboards_id_cells_id`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + if 'cell_id' in local_var_params: + path_params['cellID'] = local_var_params['cell_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_dashboards_id_labels_id(self, dashboard_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id_labels_id(dashboard_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_dashboards_id_labels_id_with_http_info(dashboard_id, label_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_dashboards_id_labels_id_with_http_info(dashboard_id, label_id, **kwargs) # noqa: E501 + return data + + def delete_dashboards_id_labels_id_with_http_info(self, dashboard_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id_labels_id_with_http_info(dashboard_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_labels_id_prepare(dashboard_id, label_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_dashboards_id_labels_id_async(self, dashboard_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a dashboard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_labels_id_prepare(dashboard_id, label_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_dashboards_id_labels_id_prepare(self, dashboard_id, label_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'label_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_dashboards_id_labels_id', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `delete_dashboards_id_labels_id`") # noqa: E501 + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `delete_dashboards_id_labels_id`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_dashboards_id_members_id(self, user_id, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id_members_id(user_id, dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the member to remove. (required) + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_dashboards_id_members_id_with_http_info(user_id, dashboard_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_dashboards_id_members_id_with_http_info(user_id, dashboard_id, **kwargs) # noqa: E501 + return data + + def delete_dashboards_id_members_id_with_http_info(self, user_id, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id_members_id_with_http_info(user_id, dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the member to remove. (required) + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_members_id_prepare(user_id, dashboard_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_dashboards_id_members_id_async(self, user_id, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a dashboard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the member to remove. (required) + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_members_id_prepare(user_id, dashboard_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_dashboards_id_members_id_prepare(self, user_id, dashboard_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'dashboard_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_dashboards_id_members_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_dashboards_id_members_id`") # noqa: E501 + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `delete_dashboards_id_members_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_dashboards_id_owners_id(self, user_id, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id_owners_id(user_id, dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_dashboards_id_owners_id_with_http_info(user_id, dashboard_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_dashboards_id_owners_id_with_http_info(user_id, dashboard_id, **kwargs) # noqa: E501 + return data + + def delete_dashboards_id_owners_id_with_http_info(self, user_id, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dashboards_id_owners_id_with_http_info(user_id, dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_owners_id_prepare(user_id, dashboard_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_dashboards_id_owners_id_async(self, user_id, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a dashboard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dashboards_id_owners_id_prepare(user_id, dashboard_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_dashboards_id_owners_id_prepare(self, user_id, dashboard_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'dashboard_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_dashboards_id_owners_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_dashboards_id_owners_id`") # noqa: E501 + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `delete_dashboards_id_owners_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_dashboards(self, **kwargs): # noqa: E501,D401,D403 + """List dashboards. + + Lists [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard). #### Related guides - [Manage dashboards](https://docs.influxdata.com/influxdb/latest/visualize-data/dashboards/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param bool descending: + :param str owner: A user ID. Only returns [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard) where the specified user has the `owner` role. + :param str sort_by: The column to sort by. + :param list[str] id: A list of dashboard IDs. Returns only the specified [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard). If you specify `id` and `owner`, only `id` is used. + :param str org_id: An organization ID. Only returns [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard) that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :param str org: An organization name. Only returns [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard) that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :return: Dashboards + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_dashboards_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_dashboards_with_http_info(**kwargs) # noqa: E501 + return data + + def get_dashboards_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List dashboards. + + Lists [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard). #### Related guides - [Manage dashboards](https://docs.influxdata.com/influxdb/latest/visualize-data/dashboards/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param bool descending: + :param str owner: A user ID. Only returns [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard) where the specified user has the `owner` role. + :param str sort_by: The column to sort by. + :param list[str] id: A list of dashboard IDs. Returns only the specified [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard). If you specify `id` and `owner`, only `id` is used. + :param str org_id: An organization ID. Only returns [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard) that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :param str org: An organization name. Only returns [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard) that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :return: Dashboards + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Dashboards', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_dashboards_async(self, **kwargs): # noqa: E501,D401,D403 + """List dashboards. + + Lists [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard). #### Related guides - [Manage dashboards](https://docs.influxdata.com/influxdb/latest/visualize-data/dashboards/). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param bool descending: + :param str owner: A user ID. Only returns [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard) where the specified user has the `owner` role. + :param str sort_by: The column to sort by. + :param list[str] id: A list of dashboard IDs. Returns only the specified [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard). If you specify `id` and `owner`, only `id` is used. + :param str org_id: An organization ID. Only returns [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard) that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :param str org: An organization name. Only returns [dashboards](https://docs.influxdata.com/influxdb/latest/reference/glossary/#dashboard) that belong to the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). + :return: Dashboards + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Dashboards', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_dashboards_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'offset', 'limit', 'descending', 'owner', 'sort_by', 'id', 'org_id', 'org'] # noqa: E501 + self._check_operation_params('get_dashboards', all_params, local_var_params) + + if 'offset' in local_var_params and local_var_params['offset'] < 0: # noqa: E501 + raise ValueError("Invalid value for parameter `offset` when calling `get_dashboards`, must be a value greater than or equal to `0`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] > 100: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_dashboards`, must be a value less than or equal to `100`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_dashboards`, must be a value greater than or equal to `1`") # noqa: E501 + path_params = {} + + query_params = [] + if 'offset' in local_var_params: + query_params.append(('offset', local_var_params['offset'])) # noqa: E501 + if 'limit' in local_var_params: + query_params.append(('limit', local_var_params['limit'])) # noqa: E501 + if 'descending' in local_var_params: + query_params.append(('descending', local_var_params['descending'])) # noqa: E501 + if 'owner' in local_var_params: + query_params.append(('owner', local_var_params['owner'])) # noqa: E501 + if 'sort_by' in local_var_params: + query_params.append(('sortBy', local_var_params['sort_by'])) # noqa: E501 + if 'id' in local_var_params: + query_params.append(('id', local_var_params['id'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_dashboards_id(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str zap_trace_span: OpenTracing span context + :param str include: If `properties`, includes the cell view properties in the response. + :return: DashboardWithViewProperties + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_dashboards_id_with_http_info(dashboard_id, **kwargs) # noqa: E501 + else: + (data) = self.get_dashboards_id_with_http_info(dashboard_id, **kwargs) # noqa: E501 + return data + + def get_dashboards_id_with_http_info(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_with_http_info(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str zap_trace_span: OpenTracing span context + :param str include: If `properties`, includes the cell view properties in the response. + :return: DashboardWithViewProperties + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_prepare(dashboard_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='DashboardWithViewProperties', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_dashboards_id_async(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a dashboard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str zap_trace_span: OpenTracing span context + :param str include: If `properties`, includes the cell view properties in the response. + :return: DashboardWithViewProperties + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_prepare(dashboard_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='DashboardWithViewProperties', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_dashboards_id_prepare(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'zap_trace_span', 'include'] # noqa: E501 + self._check_operation_params('get_dashboards_id', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `get_dashboards_id`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + if 'include' in local_var_params: + query_params.append(('include', local_var_params['include'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_dashboards_id_cells_id_view(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_cells_id_view(dashboard_id, cell_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str cell_id: The cell ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, **kwargs) # noqa: E501 + else: + (data) = self.get_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, **kwargs) # noqa: E501 + return data + + def get_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str cell_id: The cell ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve the view for a cell. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str cell_id: The cell ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_dashboards_id_cells_id_view_prepare(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_dashboards_id_cells_id_view', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `get_dashboards_id_cells_id_view`") # noqa: E501 + # verify the required parameter 'cell_id' is set + if ('cell_id' not in local_var_params or + local_var_params['cell_id'] is None): + raise ValueError("Missing the required parameter `cell_id` when calling `get_dashboards_id_cells_id_view`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + if 'cell_id' in local_var_params: + path_params['cellID'] = local_var_params['cell_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_dashboards_id_labels(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_labels(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_dashboards_id_labels_with_http_info(dashboard_id, **kwargs) # noqa: E501 + else: + (data) = self.get_dashboards_id_labels_with_http_info(dashboard_id, **kwargs) # noqa: E501 + return data + + def get_dashboards_id_labels_with_http_info(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_labels_with_http_info(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_labels_prepare(dashboard_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_dashboards_id_labels_async(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a dashboard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_labels_prepare(dashboard_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_dashboards_id_labels_prepare(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_dashboards_id_labels', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `get_dashboards_id_labels`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_dashboards_id_members(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """List all dashboard members. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_members(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_dashboards_id_members_with_http_info(dashboard_id, **kwargs) # noqa: E501 + else: + (data) = self.get_dashboards_id_members_with_http_info(dashboard_id, **kwargs) # noqa: E501 + return data + + def get_dashboards_id_members_with_http_info(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """List all dashboard members. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_members_with_http_info(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_members_prepare(dashboard_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_dashboards_id_members_async(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """List all dashboard members. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_members_prepare(dashboard_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_dashboards_id_members_prepare(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_dashboards_id_members', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `get_dashboards_id_members`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_dashboards_id_owners(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """List all dashboard owners. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_owners(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_dashboards_id_owners_with_http_info(dashboard_id, **kwargs) # noqa: E501 + else: + (data) = self.get_dashboards_id_owners_with_http_info(dashboard_id, **kwargs) # noqa: E501 + return data + + def get_dashboards_id_owners_with_http_info(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """List all dashboard owners. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_owners_with_http_info(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_owners_prepare(dashboard_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_dashboards_id_owners_async(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """List all dashboard owners. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_owners_prepare(dashboard_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_dashboards_id_owners_prepare(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_dashboards_id_owners', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `get_dashboards_id_owners`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_dashboards_id(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Update a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str zap_trace_span: OpenTracing span context + :param PatchDashboardRequest patch_dashboard_request: + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_dashboards_id_with_http_info(dashboard_id, **kwargs) # noqa: E501 + else: + (data) = self.patch_dashboards_id_with_http_info(dashboard_id, **kwargs) # noqa: E501 + return data + + def patch_dashboards_id_with_http_info(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Update a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id_with_http_info(dashboard_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str zap_trace_span: OpenTracing span context + :param PatchDashboardRequest patch_dashboard_request: + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_prepare(dashboard_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Dashboard', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_dashboards_id_async(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + """Update a dashboard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str zap_trace_span: OpenTracing span context + :param PatchDashboardRequest patch_dashboard_request: + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_prepare(dashboard_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Dashboard', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_dashboards_id_prepare(self, dashboard_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'zap_trace_span', 'patch_dashboard_request'] # noqa: E501 + self._check_operation_params('patch_dashboards_id', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `patch_dashboards_id`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'patch_dashboard_request' in local_var_params: + body_params = local_var_params['patch_dashboard_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_dashboards_id_cells_id(self, dashboard_id, cell_id, cell_update, **kwargs): # noqa: E501,D401,D403 + """Update the non-positional information related to a cell. + + Updates the non positional information related to a cell. Updates to a single cell's positional data could cause grid conflicts. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id_cells_id(dashboard_id, cell_id, cell_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param CellUpdate cell_update: (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, cell_update, **kwargs) # noqa: E501 + else: + (data) = self.patch_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, cell_update, **kwargs) # noqa: E501 + return data + + def patch_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, cell_update, **kwargs): # noqa: E501,D401,D403 + """Update the non-positional information related to a cell. + + Updates the non positional information related to a cell. Updates to a single cell's positional data could cause grid conflicts. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id_cells_id_with_http_info(dashboard_id, cell_id, cell_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param CellUpdate cell_update: (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_cells_id_prepare(dashboard_id, cell_id, cell_update, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Cell', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_dashboards_id_cells_id_async(self, dashboard_id, cell_id, cell_update, **kwargs): # noqa: E501,D401,D403 + """Update the non-positional information related to a cell. + + Updates the non positional information related to a cell. Updates to a single cell's positional data could cause grid conflicts. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param CellUpdate cell_update: (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_cells_id_prepare(dashboard_id, cell_id, cell_update, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Cell', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_dashboards_id_cells_id_prepare(self, dashboard_id, cell_id, cell_update, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell_id', 'cell_update', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_dashboards_id_cells_id', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `patch_dashboards_id_cells_id`") # noqa: E501 + # verify the required parameter 'cell_id' is set + if ('cell_id' not in local_var_params or + local_var_params['cell_id'] is None): + raise ValueError("Missing the required parameter `cell_id` when calling `patch_dashboards_id_cells_id`") # noqa: E501 + # verify the required parameter 'cell_update' is set + if ('cell_update' not in local_var_params or + local_var_params['cell_update'] is None): + raise ValueError("Missing the required parameter `cell_update` when calling `patch_dashboards_id_cells_id`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + if 'cell_id' in local_var_params: + path_params['cellID'] = local_var_params['cell_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'cell_update' in local_var_params: + body_params = local_var_params['cell_update'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_dashboards_id_cells_id_view(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + """Update the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id_cells_id_view(dashboard_id, cell_id, view, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param View view: (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + else: + (data) = self.patch_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + return data + + def patch_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + """Update the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, view, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param View view: (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + """Update the view for a cell. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param View view: (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_dashboards_id_cells_id_view_prepare(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell_id', 'view', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_dashboards_id_cells_id_view', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `patch_dashboards_id_cells_id_view`") # noqa: E501 + # verify the required parameter 'cell_id' is set + if ('cell_id' not in local_var_params or + local_var_params['cell_id'] is None): + raise ValueError("Missing the required parameter `cell_id` when calling `patch_dashboards_id_cells_id_view`") # noqa: E501 + # verify the required parameter 'view' is set + if ('view' not in local_var_params or + local_var_params['view'] is None): + raise ValueError("Missing the required parameter `view` when calling `patch_dashboards_id_cells_id_view`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + if 'cell_id' in local_var_params: + path_params['cellID'] = local_var_params['cell_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'view' in local_var_params: + body_params = local_var_params['view'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_dashboards(self, create_dashboard_request, **kwargs): # noqa: E501,D401,D403 + """Create a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards(create_dashboard_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param CreateDashboardRequest create_dashboard_request: Dashboard to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_dashboards_with_http_info(create_dashboard_request, **kwargs) # noqa: E501 + else: + (data) = self.post_dashboards_with_http_info(create_dashboard_request, **kwargs) # noqa: E501 + return data + + def post_dashboards_with_http_info(self, create_dashboard_request, **kwargs): # noqa: E501,D401,D403 + """Create a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards_with_http_info(create_dashboard_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param CreateDashboardRequest create_dashboard_request: Dashboard to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_prepare(create_dashboard_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Dashboard', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_dashboards_async(self, create_dashboard_request, **kwargs): # noqa: E501,D401,D403 + """Create a dashboard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param CreateDashboardRequest create_dashboard_request: Dashboard to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_prepare(create_dashboard_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Dashboard', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_dashboards_prepare(self, create_dashboard_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['create_dashboard_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_dashboards', all_params, local_var_params) + # verify the required parameter 'create_dashboard_request' is set + if ('create_dashboard_request' not in local_var_params or + local_var_params['create_dashboard_request'] is None): + raise ValueError("Missing the required parameter `create_dashboard_request` when calling `post_dashboards`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'create_dashboard_request' in local_var_params: + body_params = local_var_params['create_dashboard_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_dashboards_id_cells(self, dashboard_id, create_cell, **kwargs): # noqa: E501,D401,D403 + """Create a dashboard cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards_id_cells(dashboard_id, create_cell, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param CreateCell create_cell: Cell that will be added (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_dashboards_id_cells_with_http_info(dashboard_id, create_cell, **kwargs) # noqa: E501 + else: + (data) = self.post_dashboards_id_cells_with_http_info(dashboard_id, create_cell, **kwargs) # noqa: E501 + return data + + def post_dashboards_id_cells_with_http_info(self, dashboard_id, create_cell, **kwargs): # noqa: E501,D401,D403 + """Create a dashboard cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards_id_cells_with_http_info(dashboard_id, create_cell, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param CreateCell create_cell: Cell that will be added (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_id_cells_prepare(dashboard_id, create_cell, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Cell', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_dashboards_id_cells_async(self, dashboard_id, create_cell, **kwargs): # noqa: E501,D401,D403 + """Create a dashboard cell. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param CreateCell create_cell: Cell that will be added (required) + :param str zap_trace_span: OpenTracing span context + :return: Cell + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_id_cells_prepare(dashboard_id, create_cell, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Cell', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_dashboards_id_cells_prepare(self, dashboard_id, create_cell, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'create_cell', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_dashboards_id_cells', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `post_dashboards_id_cells`") # noqa: E501 + # verify the required parameter 'create_cell' is set + if ('create_cell' not in local_var_params or + local_var_params['create_cell'] is None): + raise ValueError("Missing the required parameter `create_cell` when calling `post_dashboards_id_cells`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'create_cell' in local_var_params: + body_params = local_var_params['create_cell'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_dashboards_id_labels(self, dashboard_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards_id_labels(dashboard_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_dashboards_id_labels_with_http_info(dashboard_id, label_mapping, **kwargs) # noqa: E501 + else: + (data) = self.post_dashboards_id_labels_with_http_info(dashboard_id, label_mapping, **kwargs) # noqa: E501 + return data + + def post_dashboards_id_labels_with_http_info(self, dashboard_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards_id_labels_with_http_info(dashboard_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_id_labels_prepare(dashboard_id, label_mapping, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_dashboards_id_labels_async(self, dashboard_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a dashboard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_id_labels_prepare(dashboard_id, label_mapping, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_dashboards_id_labels_prepare(self, dashboard_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'label_mapping', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_dashboards_id_labels', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `post_dashboards_id_labels`") # noqa: E501 + # verify the required parameter 'label_mapping' is set + if ('label_mapping' not in local_var_params or + local_var_params['label_mapping'] is None): + raise ValueError("Missing the required parameter `label_mapping` when calling `post_dashboards_id_labels`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'label_mapping' in local_var_params: + body_params = local_var_params['label_mapping'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_dashboards_id_members(self, dashboard_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards_id_members(dashboard_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_dashboards_id_members_with_http_info(dashboard_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_dashboards_id_members_with_http_info(dashboard_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_dashboards_id_members_with_http_info(self, dashboard_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards_id_members_with_http_info(dashboard_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_id_members_prepare(dashboard_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_dashboards_id_members_async(self, dashboard_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a dashboard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_id_members_prepare(dashboard_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_dashboards_id_members_prepare(self, dashboard_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_dashboards_id_members', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `post_dashboards_id_members`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_dashboards_id_members`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_dashboards_id_owners(self, dashboard_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards_id_owners(dashboard_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_dashboards_id_owners_with_http_info(dashboard_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_dashboards_id_owners_with_http_info(dashboard_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_dashboards_id_owners_with_http_info(self, dashboard_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a dashboard. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dashboards_id_owners_with_http_info(dashboard_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_id_owners_prepare(dashboard_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_dashboards_id_owners_async(self, dashboard_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a dashboard. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dashboards_id_owners_prepare(dashboard_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_dashboards_id_owners_prepare(self, dashboard_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_dashboards_id_owners', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `post_dashboards_id_owners`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_dashboards_id_owners`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def put_dashboards_id_cells(self, dashboard_id, cell, **kwargs): # noqa: E501,D401,D403 + """Replace cells in a dashboard. + + Replaces all cells in a dashboard. This is used primarily to update the positional information of all cells. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_dashboards_id_cells(dashboard_id, cell, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param list[Cell] cell: (required) + :param str zap_trace_span: OpenTracing span context + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.put_dashboards_id_cells_with_http_info(dashboard_id, cell, **kwargs) # noqa: E501 + else: + (data) = self.put_dashboards_id_cells_with_http_info(dashboard_id, cell, **kwargs) # noqa: E501 + return data + + def put_dashboards_id_cells_with_http_info(self, dashboard_id, cell, **kwargs): # noqa: E501,D401,D403 + """Replace cells in a dashboard. + + Replaces all cells in a dashboard. This is used primarily to update the positional information of all cells. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_dashboards_id_cells_with_http_info(dashboard_id, cell, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param list[Cell] cell: (required) + :param str zap_trace_span: OpenTracing span context + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_dashboards_id_cells_prepare(dashboard_id, cell, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Dashboard', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def put_dashboards_id_cells_async(self, dashboard_id, cell, **kwargs): # noqa: E501,D401,D403 + """Replace cells in a dashboard. + + Replaces all cells in a dashboard. This is used primarily to update the positional information of all cells. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param list[Cell] cell: (required) + :param str zap_trace_span: OpenTracing span context + :return: Dashboard + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_dashboards_id_cells_prepare(dashboard_id, cell, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Dashboard', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _put_dashboards_id_cells_prepare(self, dashboard_id, cell, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('put_dashboards_id_cells', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `put_dashboards_id_cells`") # noqa: E501 + # verify the required parameter 'cell' is set + if ('cell' not in local_var_params or + local_var_params['cell'] is None): + raise ValueError("Missing the required parameter `cell` when calling `put_dashboards_id_cells`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'cell' in local_var_params: + body_params = local_var_params['cell'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/dbr_ps_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/dbr_ps_service.py new file mode 100644 index 0000000..4fb20ae --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/dbr_ps_service.py @@ -0,0 +1,695 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class DBRPsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """DBRPsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_dbrpid(self, dbrp_id, **kwargs): # noqa: E501,D401,D403 + """Delete a database retention policy. + + Deletes the specified database retention policy (DBRP) mapping. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dbrpid(dbrp_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dbrp_id: A DBRP mapping ID. Only returns the specified DBRP mapping. (required) + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Specifies the organization that owns the DBRP mapping. + :param str org: An organization name. Specifies the organization that owns the DBRP mapping. + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_dbrpid_with_http_info(dbrp_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_dbrpid_with_http_info(dbrp_id, **kwargs) # noqa: E501 + return data + + def delete_dbrpid_with_http_info(self, dbrp_id, **kwargs): # noqa: E501,D401,D403 + """Delete a database retention policy. + + Deletes the specified database retention policy (DBRP) mapping. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_dbrpid_with_http_info(dbrp_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dbrp_id: A DBRP mapping ID. Only returns the specified DBRP mapping. (required) + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Specifies the organization that owns the DBRP mapping. + :param str org: An organization name. Specifies the organization that owns the DBRP mapping. + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dbrpid_prepare(dbrp_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dbrps/{dbrpID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_dbrpid_async(self, dbrp_id, **kwargs): # noqa: E501,D401,D403 + """Delete a database retention policy. + + Deletes the specified database retention policy (DBRP) mapping. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dbrp_id: A DBRP mapping ID. Only returns the specified DBRP mapping. (required) + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Specifies the organization that owns the DBRP mapping. + :param str org: An organization name. Specifies the organization that owns the DBRP mapping. + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_dbrpid_prepare(dbrp_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dbrps/{dbrpID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_dbrpid_prepare(self, dbrp_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dbrp_id', 'zap_trace_span', 'org_id', 'org'] # noqa: E501 + self._check_operation_params('delete_dbrpid', all_params, local_var_params) + # verify the required parameter 'dbrp_id' is set + if ('dbrp_id' not in local_var_params or + local_var_params['dbrp_id'] is None): + raise ValueError("Missing the required parameter `dbrp_id` when calling `delete_dbrpid`") # noqa: E501 + + path_params = {} + if 'dbrp_id' in local_var_params: + path_params['dbrpID'] = local_var_params['dbrp_id'] # noqa: E501 + + query_params = [] + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_dbr_ps(self, **kwargs): # noqa: E501,D401,D403 + """List database retention policy mappings. + + Lists database retention policy (DBRP) mappings. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dbr_ps(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Only returns DBRP mappings for the specified organization. + :param str org: An organization name. Only returns DBRP mappings for the specified organization. + :param str id: A DBPR mapping ID. Only returns the specified DBRP mapping. + :param str bucket_id: A bucket ID. Only returns DBRP mappings that belong to the specified bucket. + :param bool default: Specifies filtering on default + :param str db: A database. Only returns DBRP mappings that belong to the 1.x database. + :param str rp: A [retention policy](https://docs.influxdata.com/influxdb/v1.8/concepts/glossary/#retention-policy-rp). Specifies the 1.x retention policy to filter on. + :return: DBRPs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_dbr_ps_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_dbr_ps_with_http_info(**kwargs) # noqa: E501 + return data + + def get_dbr_ps_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List database retention policy mappings. + + Lists database retention policy (DBRP) mappings. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dbr_ps_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Only returns DBRP mappings for the specified organization. + :param str org: An organization name. Only returns DBRP mappings for the specified organization. + :param str id: A DBPR mapping ID. Only returns the specified DBRP mapping. + :param str bucket_id: A bucket ID. Only returns DBRP mappings that belong to the specified bucket. + :param bool default: Specifies filtering on default + :param str db: A database. Only returns DBRP mappings that belong to the 1.x database. + :param str rp: A [retention policy](https://docs.influxdata.com/influxdb/v1.8/concepts/glossary/#retention-policy-rp). Specifies the 1.x retention policy to filter on. + :return: DBRPs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dbr_ps_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dbrps', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='DBRPs', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_dbr_ps_async(self, **kwargs): # noqa: E501,D401,D403 + """List database retention policy mappings. + + Lists database retention policy (DBRP) mappings. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Only returns DBRP mappings for the specified organization. + :param str org: An organization name. Only returns DBRP mappings for the specified organization. + :param str id: A DBPR mapping ID. Only returns the specified DBRP mapping. + :param str bucket_id: A bucket ID. Only returns DBRP mappings that belong to the specified bucket. + :param bool default: Specifies filtering on default + :param str db: A database. Only returns DBRP mappings that belong to the 1.x database. + :param str rp: A [retention policy](https://docs.influxdata.com/influxdb/v1.8/concepts/glossary/#retention-policy-rp). Specifies the 1.x retention policy to filter on. + :return: DBRPs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dbr_ps_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dbrps', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='DBRPs', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_dbr_ps_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'org_id', 'org', 'id', 'bucket_id', 'default', 'db', 'rp'] # noqa: E501 + self._check_operation_params('get_dbr_ps', all_params, local_var_params) + + path_params = {} + + query_params = [] + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'id' in local_var_params: + query_params.append(('id', local_var_params['id'])) # noqa: E501 + if 'bucket_id' in local_var_params: + query_params.append(('bucketID', local_var_params['bucket_id'])) # noqa: E501 + if 'default' in local_var_params: + query_params.append(('default', local_var_params['default'])) # noqa: E501 + if 'db' in local_var_params: + query_params.append(('db', local_var_params['db'])) # noqa: E501 + if 'rp' in local_var_params: + query_params.append(('rp', local_var_params['rp'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_dbr_ps_id(self, dbrp_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a database retention policy mapping. + + Retrieves the specified retention policy (DBRP) mapping. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dbr_ps_id(dbrp_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dbrp_id: A DBRP mapping ID. Specifies the DBRP mapping. (required) + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Specifies the organization that owns the DBRP mapping. + :param str org: An organization name. Specifies the organization that owns the DBRP mapping. + :return: DBRPGet + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_dbr_ps_id_with_http_info(dbrp_id, **kwargs) # noqa: E501 + else: + (data) = self.get_dbr_ps_id_with_http_info(dbrp_id, **kwargs) # noqa: E501 + return data + + def get_dbr_ps_id_with_http_info(self, dbrp_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a database retention policy mapping. + + Retrieves the specified retention policy (DBRP) mapping. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dbr_ps_id_with_http_info(dbrp_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dbrp_id: A DBRP mapping ID. Specifies the DBRP mapping. (required) + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Specifies the organization that owns the DBRP mapping. + :param str org: An organization name. Specifies the organization that owns the DBRP mapping. + :return: DBRPGet + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dbr_ps_id_prepare(dbrp_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dbrps/{dbrpID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='DBRPGet', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_dbr_ps_id_async(self, dbrp_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a database retention policy mapping. + + Retrieves the specified retention policy (DBRP) mapping. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dbrp_id: A DBRP mapping ID. Specifies the DBRP mapping. (required) + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Specifies the organization that owns the DBRP mapping. + :param str org: An organization name. Specifies the organization that owns the DBRP mapping. + :return: DBRPGet + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dbr_ps_id_prepare(dbrp_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dbrps/{dbrpID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='DBRPGet', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_dbr_ps_id_prepare(self, dbrp_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dbrp_id', 'zap_trace_span', 'org_id', 'org'] # noqa: E501 + self._check_operation_params('get_dbr_ps_id', all_params, local_var_params) + # verify the required parameter 'dbrp_id' is set + if ('dbrp_id' not in local_var_params or + local_var_params['dbrp_id'] is None): + raise ValueError("Missing the required parameter `dbrp_id` when calling `get_dbr_ps_id`") # noqa: E501 + + path_params = {} + if 'dbrp_id' in local_var_params: + path_params['dbrpID'] = local_var_params['dbrp_id'] # noqa: E501 + + query_params = [] + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_dbrpid(self, dbrp_id, dbrp_update, **kwargs): # noqa: E501,D401,D403 + """Update a database retention policy mapping. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dbrpid(dbrp_id, dbrp_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dbrp_id: A DBRP mapping ID. Specifies the DBRP mapping. (required) + :param DBRPUpdate dbrp_update: Updates the database retention policy (DBRP) mapping and returns the mapping. Use this endpoint to modify the _retention policy_ (`retention_policy` property) of a DBRP mapping. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) (required) + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Specifies the organization that owns the DBRP mapping. + :param str org: An organization name. Specifies the organization that owns the DBRP mapping. + :return: DBRPGet + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_dbrpid_with_http_info(dbrp_id, dbrp_update, **kwargs) # noqa: E501 + else: + (data) = self.patch_dbrpid_with_http_info(dbrp_id, dbrp_update, **kwargs) # noqa: E501 + return data + + def patch_dbrpid_with_http_info(self, dbrp_id, dbrp_update, **kwargs): # noqa: E501,D401,D403 + """Update a database retention policy mapping. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dbrpid_with_http_info(dbrp_id, dbrp_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dbrp_id: A DBRP mapping ID. Specifies the DBRP mapping. (required) + :param DBRPUpdate dbrp_update: Updates the database retention policy (DBRP) mapping and returns the mapping. Use this endpoint to modify the _retention policy_ (`retention_policy` property) of a DBRP mapping. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) (required) + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Specifies the organization that owns the DBRP mapping. + :param str org: An organization name. Specifies the organization that owns the DBRP mapping. + :return: DBRPGet + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dbrpid_prepare(dbrp_id, dbrp_update, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dbrps/{dbrpID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='DBRPGet', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_dbrpid_async(self, dbrp_id, dbrp_update, **kwargs): # noqa: E501,D401,D403 + """Update a database retention policy mapping. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dbrp_id: A DBRP mapping ID. Specifies the DBRP mapping. (required) + :param DBRPUpdate dbrp_update: Updates the database retention policy (DBRP) mapping and returns the mapping. Use this endpoint to modify the _retention policy_ (`retention_policy` property) of a DBRP mapping. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) (required) + :param str zap_trace_span: OpenTracing span context + :param str org_id: An organization ID. Specifies the organization that owns the DBRP mapping. + :param str org: An organization name. Specifies the organization that owns the DBRP mapping. + :return: DBRPGet + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dbrpid_prepare(dbrp_id, dbrp_update, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dbrps/{dbrpID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='DBRPGet', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_dbrpid_prepare(self, dbrp_id, dbrp_update, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dbrp_id', 'dbrp_update', 'zap_trace_span', 'org_id', 'org'] # noqa: E501 + self._check_operation_params('patch_dbrpid', all_params, local_var_params) + # verify the required parameter 'dbrp_id' is set + if ('dbrp_id' not in local_var_params or + local_var_params['dbrp_id'] is None): + raise ValueError("Missing the required parameter `dbrp_id` when calling `patch_dbrpid`") # noqa: E501 + # verify the required parameter 'dbrp_update' is set + if ('dbrp_update' not in local_var_params or + local_var_params['dbrp_update'] is None): + raise ValueError("Missing the required parameter `dbrp_update` when calling `patch_dbrpid`") # noqa: E501 + + path_params = {} + if 'dbrp_id' in local_var_params: + path_params['dbrpID'] = local_var_params['dbrp_id'] # noqa: E501 + + query_params = [] + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'dbrp_update' in local_var_params: + body_params = local_var_params['dbrp_update'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_dbrp(self, dbrp_create, **kwargs): # noqa: E501,D401,D403 + """Add a database retention policy mapping. + + Creates a database retention policy (DBRP) mapping and returns the mapping. Use this endpoint to add InfluxDB 1.x API compatibility to your InfluxDB Cloud or InfluxDB OSS 2.x buckets. Your buckets must contain a DBRP mapping in order to query and write using the InfluxDB 1.x API. object. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dbrp(dbrp_create, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param DBRPCreate dbrp_create: The database retention policy mapping to add. Note that _`retention_policy`_ is a required parameter in the request body. The value of _`retention_policy`_ can be any arbitrary `string` name or value, with the default value commonly set as `autogen`. The value of _`retention_policy`_ isn't a [retention_policy](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-policy-rp) (required) + :param str zap_trace_span: OpenTracing span context + :return: DBRP + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_dbrp_with_http_info(dbrp_create, **kwargs) # noqa: E501 + else: + (data) = self.post_dbrp_with_http_info(dbrp_create, **kwargs) # noqa: E501 + return data + + def post_dbrp_with_http_info(self, dbrp_create, **kwargs): # noqa: E501,D401,D403 + """Add a database retention policy mapping. + + Creates a database retention policy (DBRP) mapping and returns the mapping. Use this endpoint to add InfluxDB 1.x API compatibility to your InfluxDB Cloud or InfluxDB OSS 2.x buckets. Your buckets must contain a DBRP mapping in order to query and write using the InfluxDB 1.x API. object. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_dbrp_with_http_info(dbrp_create, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param DBRPCreate dbrp_create: The database retention policy mapping to add. Note that _`retention_policy`_ is a required parameter in the request body. The value of _`retention_policy`_ can be any arbitrary `string` name or value, with the default value commonly set as `autogen`. The value of _`retention_policy`_ isn't a [retention_policy](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-policy-rp) (required) + :param str zap_trace_span: OpenTracing span context + :return: DBRP + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dbrp_prepare(dbrp_create, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dbrps', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='DBRP', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_dbrp_async(self, dbrp_create, **kwargs): # noqa: E501,D401,D403 + """Add a database retention policy mapping. + + Creates a database retention policy (DBRP) mapping and returns the mapping. Use this endpoint to add InfluxDB 1.x API compatibility to your InfluxDB Cloud or InfluxDB OSS 2.x buckets. Your buckets must contain a DBRP mapping in order to query and write using the InfluxDB 1.x API. object. #### Related guide - [Database and retention policy mapping](https://docs.influxdata.com/influxdb/latest/reference/api/influxdb-1x/dbrp/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param DBRPCreate dbrp_create: The database retention policy mapping to add. Note that _`retention_policy`_ is a required parameter in the request body. The value of _`retention_policy`_ can be any arbitrary `string` name or value, with the default value commonly set as `autogen`. The value of _`retention_policy`_ isn't a [retention_policy](https://docs.influxdata.com/influxdb/latest/reference/glossary/#retention-policy-rp) (required) + :param str zap_trace_span: OpenTracing span context + :return: DBRP + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_dbrp_prepare(dbrp_create, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dbrps', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='DBRP', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_dbrp_prepare(self, dbrp_create, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dbrp_create', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_dbrp', all_params, local_var_params) + # verify the required parameter 'dbrp_create' is set + if ('dbrp_create' not in local_var_params or + local_var_params['dbrp_create'] is None): + raise ValueError("Missing the required parameter `dbrp_create` when calling `post_dbrp`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'dbrp_create' in local_var_params: + body_params = local_var_params['dbrp_create'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/delete_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/delete_service.py new file mode 100644 index 0000000..7fef4ea --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/delete_service.py @@ -0,0 +1,173 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class DeleteService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """DeleteService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def post_delete(self, delete_predicate_request, **kwargs): # noqa: E501,D401,D403 + """Delete data. + + Deletes data from a bucket. Use this endpoint to delete points from a bucket in a specified time range. #### InfluxDB Cloud - Does the following when you send a delete request: 1. Validates the request and queues the delete. 2. If queued, responds with _success_ (HTTP `2xx` status code); _error_ otherwise. 3. Handles the delete asynchronously and reaches eventual consistency. To ensure that InfluxDB Cloud handles writes and deletes in the order you request them, wait for a success response (HTTP `2xx` status code) before you send the next request. Because writes and deletes are asynchronous, your change might not yet be readable when you receive the response. #### InfluxDB OSS - Validates the request, handles the delete synchronously, and then responds with success or failure. #### Required permissions - `write-buckets` or `write-bucket BUCKET_ID`. *`BUCKET_ID`* is the ID of the destination bucket. #### Rate limits (with InfluxDB Cloud) `write` rate limits apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Delete data](https://docs.influxdata.com/influxdb/latest/write-data/delete-data/) - Learn how to use [delete predicate syntax](https://docs.influxdata.com/influxdb/latest/reference/syntax/delete-predicate/). - Learn how InfluxDB handles [deleted tags](https://docs.influxdata.com/flux/v0.x/stdlib/influxdata/influxdb/schema/measurementtagkeys/) and [deleted fields](https://docs.influxdata.com/flux/v0.x/stdlib/influxdata/influxdb/schema/measurementfieldkeys/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_delete(delete_predicate_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param DeletePredicateRequest delete_predicate_request: Time range parameters and an optional **delete predicate expression**. To select points to delete within the specified time range, pass a **delete predicate expression** in the `predicate` property of the request body. If you don't pass a `predicate`, InfluxDB deletes all data with timestamps in the specified time range. #### Related guides - [Delete data](https://docs.influxdata.com/influxdb/latest/write-data/delete-data/) - Learn how to use [delete predicate syntax](https://docs.influxdata.com/influxdb/latest/reference/syntax/delete-predicate/). (required) + :param str zap_trace_span: OpenTracing span context + :param str org: An organization name or ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Deletes data from the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - Deletes data from the bucket in the specified organization. - If you pass both `orgID` and `org`, they must both be valid. + :param str bucket: A bucket name or ID. Specifies the bucket to delete data from. If you pass both `bucket` and `bucketID`, `bucketID` takes precedence. + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Deletes data from the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - Deletes data from the bucket in the specified organization. - If you pass both `orgID` and `org`, they must both be valid. + :param str bucket_id: A bucket ID. Specifies the bucket to delete data from. If you pass both `bucket` and `bucketID`, `bucketID` takes precedence. + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_delete_with_http_info(delete_predicate_request, **kwargs) # noqa: E501 + else: + (data) = self.post_delete_with_http_info(delete_predicate_request, **kwargs) # noqa: E501 + return data + + def post_delete_with_http_info(self, delete_predicate_request, **kwargs): # noqa: E501,D401,D403 + """Delete data. + + Deletes data from a bucket. Use this endpoint to delete points from a bucket in a specified time range. #### InfluxDB Cloud - Does the following when you send a delete request: 1. Validates the request and queues the delete. 2. If queued, responds with _success_ (HTTP `2xx` status code); _error_ otherwise. 3. Handles the delete asynchronously and reaches eventual consistency. To ensure that InfluxDB Cloud handles writes and deletes in the order you request them, wait for a success response (HTTP `2xx` status code) before you send the next request. Because writes and deletes are asynchronous, your change might not yet be readable when you receive the response. #### InfluxDB OSS - Validates the request, handles the delete synchronously, and then responds with success or failure. #### Required permissions - `write-buckets` or `write-bucket BUCKET_ID`. *`BUCKET_ID`* is the ID of the destination bucket. #### Rate limits (with InfluxDB Cloud) `write` rate limits apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Delete data](https://docs.influxdata.com/influxdb/latest/write-data/delete-data/) - Learn how to use [delete predicate syntax](https://docs.influxdata.com/influxdb/latest/reference/syntax/delete-predicate/). - Learn how InfluxDB handles [deleted tags](https://docs.influxdata.com/flux/v0.x/stdlib/influxdata/influxdb/schema/measurementtagkeys/) and [deleted fields](https://docs.influxdata.com/flux/v0.x/stdlib/influxdata/influxdb/schema/measurementfieldkeys/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_delete_with_http_info(delete_predicate_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param DeletePredicateRequest delete_predicate_request: Time range parameters and an optional **delete predicate expression**. To select points to delete within the specified time range, pass a **delete predicate expression** in the `predicate` property of the request body. If you don't pass a `predicate`, InfluxDB deletes all data with timestamps in the specified time range. #### Related guides - [Delete data](https://docs.influxdata.com/influxdb/latest/write-data/delete-data/) - Learn how to use [delete predicate syntax](https://docs.influxdata.com/influxdb/latest/reference/syntax/delete-predicate/). (required) + :param str zap_trace_span: OpenTracing span context + :param str org: An organization name or ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Deletes data from the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - Deletes data from the bucket in the specified organization. - If you pass both `orgID` and `org`, they must both be valid. + :param str bucket: A bucket name or ID. Specifies the bucket to delete data from. If you pass both `bucket` and `bucketID`, `bucketID` takes precedence. + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Deletes data from the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - Deletes data from the bucket in the specified organization. - If you pass both `orgID` and `org`, they must both be valid. + :param str bucket_id: A bucket ID. Specifies the bucket to delete data from. If you pass both `bucket` and `bucketID`, `bucketID` takes precedence. + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_delete_prepare(delete_predicate_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/delete', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_delete_async(self, delete_predicate_request, **kwargs): # noqa: E501,D401,D403 + """Delete data. + + Deletes data from a bucket. Use this endpoint to delete points from a bucket in a specified time range. #### InfluxDB Cloud - Does the following when you send a delete request: 1. Validates the request and queues the delete. 2. If queued, responds with _success_ (HTTP `2xx` status code); _error_ otherwise. 3. Handles the delete asynchronously and reaches eventual consistency. To ensure that InfluxDB Cloud handles writes and deletes in the order you request them, wait for a success response (HTTP `2xx` status code) before you send the next request. Because writes and deletes are asynchronous, your change might not yet be readable when you receive the response. #### InfluxDB OSS - Validates the request, handles the delete synchronously, and then responds with success or failure. #### Required permissions - `write-buckets` or `write-bucket BUCKET_ID`. *`BUCKET_ID`* is the ID of the destination bucket. #### Rate limits (with InfluxDB Cloud) `write` rate limits apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Delete data](https://docs.influxdata.com/influxdb/latest/write-data/delete-data/) - Learn how to use [delete predicate syntax](https://docs.influxdata.com/influxdb/latest/reference/syntax/delete-predicate/). - Learn how InfluxDB handles [deleted tags](https://docs.influxdata.com/flux/v0.x/stdlib/influxdata/influxdb/schema/measurementtagkeys/) and [deleted fields](https://docs.influxdata.com/flux/v0.x/stdlib/influxdata/influxdb/schema/measurementfieldkeys/). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param DeletePredicateRequest delete_predicate_request: Time range parameters and an optional **delete predicate expression**. To select points to delete within the specified time range, pass a **delete predicate expression** in the `predicate` property of the request body. If you don't pass a `predicate`, InfluxDB deletes all data with timestamps in the specified time range. #### Related guides - [Delete data](https://docs.influxdata.com/influxdb/latest/write-data/delete-data/) - Learn how to use [delete predicate syntax](https://docs.influxdata.com/influxdb/latest/reference/syntax/delete-predicate/). (required) + :param str zap_trace_span: OpenTracing span context + :param str org: An organization name or ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Deletes data from the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - Deletes data from the bucket in the specified organization. - If you pass both `orgID` and `org`, they must both be valid. + :param str bucket: A bucket name or ID. Specifies the bucket to delete data from. If you pass both `bucket` and `bucketID`, `bucketID` takes precedence. + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Deletes data from the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - Deletes data from the bucket in the specified organization. - If you pass both `orgID` and `org`, they must both be valid. + :param str bucket_id: A bucket ID. Specifies the bucket to delete data from. If you pass both `bucket` and `bucketID`, `bucketID` takes precedence. + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_delete_prepare(delete_predicate_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/delete', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_delete_prepare(self, delete_predicate_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['delete_predicate_request', 'zap_trace_span', 'org', 'bucket', 'org_id', 'bucket_id'] # noqa: E501 + self._check_operation_params('post_delete', all_params, local_var_params) + # verify the required parameter 'delete_predicate_request' is set + if ('delete_predicate_request' not in local_var_params or + local_var_params['delete_predicate_request'] is None): + raise ValueError("Missing the required parameter `delete_predicate_request` when calling `post_delete`") # noqa: E501 + + path_params = {} + + query_params = [] + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'bucket' in local_var_params: + query_params.append(('bucket', local_var_params['bucket'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'bucket_id' in local_var_params: + query_params.append(('bucketID', local_var_params['bucket_id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'delete_predicate_request' in local_var_params: + body_params = local_var_params['delete_predicate_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/health_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/health_service.py new file mode 100644 index 0000000..6a87da7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/health_service.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class HealthService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """HealthService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_health(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve the health of the instance. + + Returns the health of the instance. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_health(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: HealthCheck + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_health_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_health_with_http_info(**kwargs) # noqa: E501 + return data + + def get_health_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve the health of the instance. + + Returns the health of the instance. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_health_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: HealthCheck + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_health_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/health', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='HealthCheck', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_health_async(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve the health of the instance. + + Returns the health of the instance. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: HealthCheck + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_health_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/health', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='HealthCheck', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_health_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('get_health', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/invokable_scripts_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/invokable_scripts_service.py new file mode 100644 index 0000000..9c84503 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/invokable_scripts_service.py @@ -0,0 +1,922 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class InvokableScriptsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """InvokableScriptsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_scripts_id(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Delete a script. + + Deletes a [script](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) and all associated records. #### Limitations - You can delete only one script per request. - If the script ID you provide doesn't exist for the organization, InfluxDB responds with an HTTP `204` status code. #### Related Guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_scripts_id(script_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str script_id: A script ID. Deletes the specified script. (required) + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_scripts_id_with_http_info(script_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_scripts_id_with_http_info(script_id, **kwargs) # noqa: E501 + return data + + def delete_scripts_id_with_http_info(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Delete a script. + + Deletes a [script](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) and all associated records. #### Limitations - You can delete only one script per request. - If the script ID you provide doesn't exist for the organization, InfluxDB responds with an HTTP `204` status code. #### Related Guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_scripts_id_with_http_info(script_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str script_id: A script ID. Deletes the specified script. (required) + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not self._is_cloud_instance(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_scripts_id_prepare(script_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scripts/{scriptID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_scripts_id_async(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Delete a script. + + Deletes a [script](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) and all associated records. #### Limitations - You can delete only one script per request. - If the script ID you provide doesn't exist for the organization, InfluxDB responds with an HTTP `204` status code. #### Related Guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str script_id: A script ID. Deletes the specified script. (required) + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not await self._is_cloud_instance_async(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_scripts_id_prepare(script_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scripts/{scriptID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_scripts_id_prepare(self, script_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['script_id'] # noqa: E501 + self._check_operation_params('delete_scripts_id', all_params, local_var_params) + # verify the required parameter 'script_id' is set + if ('script_id' not in local_var_params or + local_var_params['script_id'] is None): + raise ValueError("Missing the required parameter `script_id` when calling `delete_scripts_id`") # noqa: E501 + + path_params = {} + if 'script_id' in local_var_params: + path_params['scriptID'] = local_var_params['script_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_scripts(self, **kwargs): # noqa: E501,D401,D403 + """List scripts. + + Lists [scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/). #### Related guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scripts(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination]({{% INFLUXDB_DOCS_URL %}}/api/#tag/Pagination). + :param int limit: The maximum number of scripts to return. Default is `100`. + :param str name: The script name. Lists scripts with the specified name. + :return: Scripts + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_scripts_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_scripts_with_http_info(**kwargs) # noqa: E501 + return data + + def get_scripts_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List scripts. + + Lists [scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/). #### Related guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scripts_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination]({{% INFLUXDB_DOCS_URL %}}/api/#tag/Pagination). + :param int limit: The maximum number of scripts to return. Default is `100`. + :param str name: The script name. Lists scripts with the specified name. + :return: Scripts + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not self._is_cloud_instance(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scripts_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scripts', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Scripts', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_scripts_async(self, **kwargs): # noqa: E501,D401,D403 + """List scripts. + + Lists [scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/). #### Related guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination]({{% INFLUXDB_DOCS_URL %}}/api/#tag/Pagination). + :param int limit: The maximum number of scripts to return. Default is `100`. + :param str name: The script name. Lists scripts with the specified name. + :return: Scripts + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not await self._is_cloud_instance_async(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scripts_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scripts', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Scripts', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_scripts_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['offset', 'limit', 'name'] # noqa: E501 + self._check_operation_params('get_scripts', all_params, local_var_params) + + if 'offset' in local_var_params and local_var_params['offset'] < 0: # noqa: E501 + raise ValueError("Invalid value for parameter `offset` when calling `get_scripts`, must be a value greater than or equal to `0`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] > 500: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_scripts`, must be a value less than or equal to `500`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] < 0: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_scripts`, must be a value greater than or equal to `0`") # noqa: E501 + path_params = {} + + query_params = [] + if 'offset' in local_var_params: + query_params.append(('offset', local_var_params['offset'])) # noqa: E501 + if 'limit' in local_var_params: + query_params.append(('limit', local_var_params['limit'])) # noqa: E501 + if 'name' in local_var_params: + query_params.append(('name', local_var_params['name'])) # noqa: E501 + + header_params = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_scripts_id(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a script. + + Retrieves a [script](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/). #### Related Guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scripts_id(script_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str script_id: A script ID. Retrieves the specified script. (required) + :return: Script + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_scripts_id_with_http_info(script_id, **kwargs) # noqa: E501 + else: + (data) = self.get_scripts_id_with_http_info(script_id, **kwargs) # noqa: E501 + return data + + def get_scripts_id_with_http_info(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a script. + + Retrieves a [script](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/). #### Related Guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scripts_id_with_http_info(script_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str script_id: A script ID. Retrieves the specified script. (required) + :return: Script + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not self._is_cloud_instance(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scripts_id_prepare(script_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scripts/{scriptID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Script', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_scripts_id_async(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a script. + + Retrieves a [script](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/). #### Related Guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str script_id: A script ID. Retrieves the specified script. (required) + :return: Script + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not await self._is_cloud_instance_async(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scripts_id_prepare(script_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scripts/{scriptID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Script', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_scripts_id_prepare(self, script_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['script_id'] # noqa: E501 + self._check_operation_params('get_scripts_id', all_params, local_var_params) + # verify the required parameter 'script_id' is set + if ('script_id' not in local_var_params or + local_var_params['script_id'] is None): + raise ValueError("Missing the required parameter `script_id` when calling `get_scripts_id`") # noqa: E501 + + path_params = {} + if 'script_id' in local_var_params: + path_params['scriptID'] = local_var_params['script_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_scripts_id_params(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Find script parameters.. + + Analyzes a script and determines required parameters. Find all `params` keys referenced in a script and return a list of keys. If it is possible to determine the type of the value from the context then the type is also returned -- for example: The following sample script contains a _`mybucket`_ parameter : ```json "script": "from(bucket: params.mybucket) |> range(start: -7d) |> limit(n:1)" ``` Requesting the parameters using `GET /api/v2/scripts/SCRIPT_ID/params` returns the following: ```json { "params": { "mybucket": "string" } } ``` The type name returned for a parameter will be one of: - `any` - `bool` - `duration` - `float` - `int` - `string` - `time` - `uint` The type name `any` is used when the type of a parameter cannot be determined from the context, or the type is determined to be a structured type such as an array or record. #### Related guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scripts_id_params(script_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str script_id: A script ID. The script to analyze for params. (required) + :return: Params + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_scripts_id_params_with_http_info(script_id, **kwargs) # noqa: E501 + else: + (data) = self.get_scripts_id_params_with_http_info(script_id, **kwargs) # noqa: E501 + return data + + def get_scripts_id_params_with_http_info(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Find script parameters.. + + Analyzes a script and determines required parameters. Find all `params` keys referenced in a script and return a list of keys. If it is possible to determine the type of the value from the context then the type is also returned -- for example: The following sample script contains a _`mybucket`_ parameter : ```json "script": "from(bucket: params.mybucket) |> range(start: -7d) |> limit(n:1)" ``` Requesting the parameters using `GET /api/v2/scripts/SCRIPT_ID/params` returns the following: ```json { "params": { "mybucket": "string" } } ``` The type name returned for a parameter will be one of: - `any` - `bool` - `duration` - `float` - `int` - `string` - `time` - `uint` The type name `any` is used when the type of a parameter cannot be determined from the context, or the type is determined to be a structured type such as an array or record. #### Related guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scripts_id_params_with_http_info(script_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str script_id: A script ID. The script to analyze for params. (required) + :return: Params + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not self._is_cloud_instance(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scripts_id_params_prepare(script_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scripts/{scriptID}/params', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Params', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_scripts_id_params_async(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Find script parameters.. + + Analyzes a script and determines required parameters. Find all `params` keys referenced in a script and return a list of keys. If it is possible to determine the type of the value from the context then the type is also returned -- for example: The following sample script contains a _`mybucket`_ parameter : ```json "script": "from(bucket: params.mybucket) |> range(start: -7d) |> limit(n:1)" ``` Requesting the parameters using `GET /api/v2/scripts/SCRIPT_ID/params` returns the following: ```json { "params": { "mybucket": "string" } } ``` The type name returned for a parameter will be one of: - `any` - `bool` - `duration` - `float` - `int` - `string` - `time` - `uint` The type name `any` is used when the type of a parameter cannot be determined from the context, or the type is determined to be a structured type such as an array or record. #### Related guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str script_id: A script ID. The script to analyze for params. (required) + :return: Params + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not await self._is_cloud_instance_async(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scripts_id_params_prepare(script_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scripts/{scriptID}/params', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Params', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_scripts_id_params_prepare(self, script_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['script_id'] # noqa: E501 + self._check_operation_params('get_scripts_id_params', all_params, local_var_params) + # verify the required parameter 'script_id' is set + if ('script_id' not in local_var_params or + local_var_params['script_id'] is None): + raise ValueError("Missing the required parameter `script_id` when calling `get_scripts_id_params`") # noqa: E501 + + path_params = {} + if 'script_id' in local_var_params: + path_params['scriptID'] = local_var_params['script_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_scripts_id(self, script_id, script_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a script. + + Updates an invokable script. Use this endpoint to modify values for script properties (`description` and `script`). To update a script, pass an object that contains the updated key-value pairs. #### Limitations - If you send an empty request body, the script will neither update nor store an empty script, but InfluxDB will respond with an HTTP `200` status code. #### Related Guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_scripts_id(script_id, script_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str script_id: A script ID. Updates the specified script. (required) + :param ScriptUpdateRequest script_update_request: An object that contains the updated script properties to apply. (required) + :return: Script + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_scripts_id_with_http_info(script_id, script_update_request, **kwargs) # noqa: E501 + else: + (data) = self.patch_scripts_id_with_http_info(script_id, script_update_request, **kwargs) # noqa: E501 + return data + + def patch_scripts_id_with_http_info(self, script_id, script_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a script. + + Updates an invokable script. Use this endpoint to modify values for script properties (`description` and `script`). To update a script, pass an object that contains the updated key-value pairs. #### Limitations - If you send an empty request body, the script will neither update nor store an empty script, but InfluxDB will respond with an HTTP `200` status code. #### Related Guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_scripts_id_with_http_info(script_id, script_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str script_id: A script ID. Updates the specified script. (required) + :param ScriptUpdateRequest script_update_request: An object that contains the updated script properties to apply. (required) + :return: Script + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not self._is_cloud_instance(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_scripts_id_prepare(script_id, script_update_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scripts/{scriptID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Script', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_scripts_id_async(self, script_id, script_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a script. + + Updates an invokable script. Use this endpoint to modify values for script properties (`description` and `script`). To update a script, pass an object that contains the updated key-value pairs. #### Limitations - If you send an empty request body, the script will neither update nor store an empty script, but InfluxDB will respond with an HTTP `200` status code. #### Related Guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str script_id: A script ID. Updates the specified script. (required) + :param ScriptUpdateRequest script_update_request: An object that contains the updated script properties to apply. (required) + :return: Script + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not await self._is_cloud_instance_async(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_scripts_id_prepare(script_id, script_update_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scripts/{scriptID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Script', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_scripts_id_prepare(self, script_id, script_update_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['script_id', 'script_update_request'] # noqa: E501 + self._check_operation_params('patch_scripts_id', all_params, local_var_params) + # verify the required parameter 'script_id' is set + if ('script_id' not in local_var_params or + local_var_params['script_id'] is None): + raise ValueError("Missing the required parameter `script_id` when calling `patch_scripts_id`") # noqa: E501 + # verify the required parameter 'script_update_request' is set + if ('script_update_request' not in local_var_params or + local_var_params['script_update_request'] is None): + raise ValueError("Missing the required parameter `script_update_request` when calling `patch_scripts_id`") # noqa: E501 + + path_params = {} + if 'script_id' in local_var_params: + path_params['scriptID'] = local_var_params['script_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + body_params = None + if 'script_update_request' in local_var_params: + body_params = local_var_params['script_update_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_scripts(self, script_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a script. + + Creates an [invokable script](https://docs.influxdata.com/resources/videos/api-invokable-scripts/) and returns the script. #### Related guides - [Invokable scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) - [Creating custom InfluxDB endpoints](https://docs.influxdata.com/resources/videos/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scripts(script_create_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param ScriptCreateRequest script_create_request: The script to create. (required) + :return: Script + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_scripts_with_http_info(script_create_request, **kwargs) # noqa: E501 + else: + (data) = self.post_scripts_with_http_info(script_create_request, **kwargs) # noqa: E501 + return data + + def post_scripts_with_http_info(self, script_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a script. + + Creates an [invokable script](https://docs.influxdata.com/resources/videos/api-invokable-scripts/) and returns the script. #### Related guides - [Invokable scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) - [Creating custom InfluxDB endpoints](https://docs.influxdata.com/resources/videos/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scripts_with_http_info(script_create_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param ScriptCreateRequest script_create_request: The script to create. (required) + :return: Script + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not self._is_cloud_instance(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scripts_prepare(script_create_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scripts', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Script', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_scripts_async(self, script_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a script. + + Creates an [invokable script](https://docs.influxdata.com/resources/videos/api-invokable-scripts/) and returns the script. #### Related guides - [Invokable scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) - [Creating custom InfluxDB endpoints](https://docs.influxdata.com/resources/videos/api-invokable-scripts/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param ScriptCreateRequest script_create_request: The script to create. (required) + :return: Script + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not await self._is_cloud_instance_async(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scripts_prepare(script_create_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scripts', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Script', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_scripts_prepare(self, script_create_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['script_create_request'] # noqa: E501 + self._check_operation_params('post_scripts', all_params, local_var_params) + # verify the required parameter 'script_create_request' is set + if ('script_create_request' not in local_var_params or + local_var_params['script_create_request'] is None): + raise ValueError("Missing the required parameter `script_create_request` when calling `post_scripts`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + + body_params = None + if 'script_create_request' in local_var_params: + body_params = local_var_params['script_create_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_scripts_id_invoke(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Invoke a script. + + Runs a script and returns the result. When the script runs, InfluxDB replaces `params` keys referenced in the script with `params` key-values passed in the request body--for example: The following sample script contains a _`mybucket`_ parameter : ```json "script": "from(bucket: params.mybucket) |> range(start: -7d) |> limit(n:1)" ``` The following example `POST /api/v2/scripts/SCRIPT_ID/invoke` request body passes a value for the _`mybucket`_ parameter: ```json { "params": { "mybucket": "air_sensor" } } ``` #### Related guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scripts_id_invoke(script_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str script_id: A script ID. Runs the specified script. (required) + :param ScriptInvocationParams script_invocation_params: + :return: file + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_scripts_id_invoke_with_http_info(script_id, **kwargs) # noqa: E501 + else: + (data) = self.post_scripts_id_invoke_with_http_info(script_id, **kwargs) # noqa: E501 + return data + + def post_scripts_id_invoke_with_http_info(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Invoke a script. + + Runs a script and returns the result. When the script runs, InfluxDB replaces `params` keys referenced in the script with `params` key-values passed in the request body--for example: The following sample script contains a _`mybucket`_ parameter : ```json "script": "from(bucket: params.mybucket) |> range(start: -7d) |> limit(n:1)" ``` The following example `POST /api/v2/scripts/SCRIPT_ID/invoke` request body passes a value for the _`mybucket`_ parameter: ```json { "params": { "mybucket": "air_sensor" } } ``` #### Related guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scripts_id_invoke_with_http_info(script_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str script_id: A script ID. Runs the specified script. (required) + :param ScriptInvocationParams script_invocation_params: + :return: file + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not self._is_cloud_instance(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scripts_id_invoke_prepare(script_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scripts/{scriptID}/invoke', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='file', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_scripts_id_invoke_async(self, script_id, **kwargs): # noqa: E501,D401,D403 + """Invoke a script. + + Runs a script and returns the result. When the script runs, InfluxDB replaces `params` keys referenced in the script with `params` key-values passed in the request body--for example: The following sample script contains a _`mybucket`_ parameter : ```json "script": "from(bucket: params.mybucket) |> range(start: -7d) |> limit(n:1)" ``` The following example `POST /api/v2/scripts/SCRIPT_ID/invoke` request body passes a value for the _`mybucket`_ parameter: ```json { "params": { "mybucket": "air_sensor" } } ``` #### Related guides - [Invoke custom scripts](https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str script_id: A script ID. Runs the specified script. (required) + :param ScriptInvocationParams script_invocation_params: + :return: file + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + if not await self._is_cloud_instance_async(): + from influxdb_client.client.warnings import CloudOnlyWarning + CloudOnlyWarning.print_warning('InvokableScriptsService', + 'https://docs.influxdata.com/influxdb/cloud/api-guide/api-invokable-scripts/') # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scripts_id_invoke_prepare(script_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scripts/{scriptID}/invoke', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='file', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_scripts_id_invoke_prepare(self, script_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['script_id', 'script_invocation_params'] # noqa: E501 + self._check_operation_params('post_scripts_id_invoke', all_params, local_var_params) + # verify the required parameter 'script_id' is set + if ('script_id' not in local_var_params or + local_var_params['script_id'] is None): + raise ValueError("Missing the required parameter `script_id` when calling `post_scripts_id_invoke`") # noqa: E501 + + path_params = {} + if 'script_id' in local_var_params: + path_params['scriptID'] = local_var_params['script_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + body_params = None + if 'script_invocation_params' in local_var_params: + body_params = local_var_params['script_invocation_params'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['text/csv', 'application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/labels_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/labels_service.py new file mode 100644 index 0000000..aa88f41 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/labels_service.py @@ -0,0 +1,618 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class LabelsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """LabelsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_labels_id(self, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_labels_id(label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_labels_id_with_http_info(label_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_labels_id_with_http_info(label_id, **kwargs) # noqa: E501 + return data + + def delete_labels_id_with_http_info(self, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_labels_id_with_http_info(label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_labels_id_prepare(label_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_labels_id_async(self, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_labels_id_prepare(label_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_labels_id_prepare(self, label_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['label_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_labels_id', all_params, local_var_params) + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `delete_labels_id`") # noqa: E501 + + path_params = {} + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_labels(self, **kwargs): # noqa: E501,D401,D403 + """List all labels. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_labels(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org_id: The organization ID. + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_labels_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_labels_with_http_info(**kwargs) # noqa: E501 + return data + + def get_labels_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List all labels. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_labels_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org_id: The organization ID. + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_labels_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_labels_async(self, **kwargs): # noqa: E501,D401,D403 + """List all labels. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org_id: The organization ID. + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_labels_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_labels_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'org_id'] # noqa: E501 + self._check_operation_params('get_labels', all_params, local_var_params) + + path_params = {} + + query_params = [] + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_labels_id(self, label_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a label. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_labels_id(label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str label_id: The ID of the label to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_labels_id_with_http_info(label_id, **kwargs) # noqa: E501 + else: + (data) = self.get_labels_id_with_http_info(label_id, **kwargs) # noqa: E501 + return data + + def get_labels_id_with_http_info(self, label_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a label. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_labels_id_with_http_info(label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str label_id: The ID of the label to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_labels_id_prepare(label_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/labels/{labelID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_labels_id_async(self, label_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a label. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str label_id: The ID of the label to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_labels_id_prepare(label_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/labels/{labelID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_labels_id_prepare(self, label_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['label_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_labels_id', all_params, local_var_params) + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `get_labels_id`") # noqa: E501 + + path_params = {} + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_labels_id(self, label_id, label_update, **kwargs): # noqa: E501,D401,D403 + """Update a label. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_labels_id(label_id, label_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str label_id: The ID of the label to update. (required) + :param LabelUpdate label_update: A label update. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_labels_id_with_http_info(label_id, label_update, **kwargs) # noqa: E501 + else: + (data) = self.patch_labels_id_with_http_info(label_id, label_update, **kwargs) # noqa: E501 + return data + + def patch_labels_id_with_http_info(self, label_id, label_update, **kwargs): # noqa: E501,D401,D403 + """Update a label. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_labels_id_with_http_info(label_id, label_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str label_id: The ID of the label to update. (required) + :param LabelUpdate label_update: A label update. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_labels_id_prepare(label_id, label_update, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/labels/{labelID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_labels_id_async(self, label_id, label_update, **kwargs): # noqa: E501,D401,D403 + """Update a label. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str label_id: The ID of the label to update. (required) + :param LabelUpdate label_update: A label update. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_labels_id_prepare(label_id, label_update, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/labels/{labelID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_labels_id_prepare(self, label_id, label_update, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['label_id', 'label_update', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_labels_id', all_params, local_var_params) + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `patch_labels_id`") # noqa: E501 + # verify the required parameter 'label_update' is set + if ('label_update' not in local_var_params or + local_var_params['label_update'] is None): + raise ValueError("Missing the required parameter `label_update` when calling `patch_labels_id`") # noqa: E501 + + path_params = {} + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'label_update' in local_var_params: + body_params = local_var_params['label_update'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_labels(self, label_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a label. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_labels(label_create_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param LabelCreateRequest label_create_request: The label to create. (required) + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_labels_with_http_info(label_create_request, **kwargs) # noqa: E501 + else: + (data) = self.post_labels_with_http_info(label_create_request, **kwargs) # noqa: E501 + return data + + def post_labels_with_http_info(self, label_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a label. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_labels_with_http_info(label_create_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param LabelCreateRequest label_create_request: The label to create. (required) + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_labels_prepare(label_create_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_labels_async(self, label_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a label. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param LabelCreateRequest label_create_request: The label to create. (required) + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_labels_prepare(label_create_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_labels_prepare(self, label_create_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['label_create_request'] # noqa: E501 + self._check_operation_params('post_labels', all_params, local_var_params) + # verify the required parameter 'label_create_request' is set + if ('label_create_request' not in local_var_params or + local_var_params['label_create_request'] is None): + raise ValueError("Missing the required parameter `label_create_request` when calling `post_labels`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + + body_params = None + if 'label_create_request' in local_var_params: + body_params = local_var_params['label_create_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/legacy_authorizations_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/legacy_authorizations_service.py new file mode 100644 index 0000000..622ef49 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/legacy_authorizations_service.py @@ -0,0 +1,777 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class LegacyAuthorizationsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """LegacyAuthorizationsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_legacy_authorizations_id(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Delete a legacy authorization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_legacy_authorizations_id(auth_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_legacy_authorizations_id_with_http_info(auth_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_legacy_authorizations_id_with_http_info(auth_id, **kwargs) # noqa: E501 + return data + + def delete_legacy_authorizations_id_with_http_info(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Delete a legacy authorization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_legacy_authorizations_id_with_http_info(auth_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_legacy_authorizations_id_prepare(auth_id, **kwargs) + + return self.api_client.call_api( + '/private/legacy/authorizations/{authID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_legacy_authorizations_id_async(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Delete a legacy authorization. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_legacy_authorizations_id_prepare(auth_id, **kwargs) + + return await self.api_client.call_api( + '/private/legacy/authorizations/{authID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_legacy_authorizations_id_prepare(self, auth_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['auth_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_legacy_authorizations_id', all_params, local_var_params) + # verify the required parameter 'auth_id' is set + if ('auth_id' not in local_var_params or + local_var_params['auth_id'] is None): + raise ValueError("Missing the required parameter `auth_id` when calling `delete_legacy_authorizations_id`") # noqa: E501 + + path_params = {} + if 'auth_id' in local_var_params: + path_params['authID'] = local_var_params['auth_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_legacy_authorizations(self, **kwargs): # noqa: E501,D401,D403 + """List all legacy authorizations. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_legacy_authorizations(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str user_id: Only show legacy authorizations that belong to a user ID. + :param str user: Only show legacy authorizations that belong to a user name. + :param str org_id: Only show legacy authorizations that belong to an organization ID. + :param str org: Only show legacy authorizations that belong to a organization name. + :param str token: Only show legacy authorizations with a specified token (auth name). + :param str auth_id: Only show legacy authorizations with a specified auth ID. + :return: Authorizations + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_legacy_authorizations_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_legacy_authorizations_with_http_info(**kwargs) # noqa: E501 + return data + + def get_legacy_authorizations_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List all legacy authorizations. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_legacy_authorizations_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str user_id: Only show legacy authorizations that belong to a user ID. + :param str user: Only show legacy authorizations that belong to a user name. + :param str org_id: Only show legacy authorizations that belong to an organization ID. + :param str org: Only show legacy authorizations that belong to a organization name. + :param str token: Only show legacy authorizations with a specified token (auth name). + :param str auth_id: Only show legacy authorizations with a specified auth ID. + :return: Authorizations + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_legacy_authorizations_prepare(**kwargs) + + return self.api_client.call_api( + '/private/legacy/authorizations', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorizations', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_legacy_authorizations_async(self, **kwargs): # noqa: E501,D401,D403 + """List all legacy authorizations. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str user_id: Only show legacy authorizations that belong to a user ID. + :param str user: Only show legacy authorizations that belong to a user name. + :param str org_id: Only show legacy authorizations that belong to an organization ID. + :param str org: Only show legacy authorizations that belong to a organization name. + :param str token: Only show legacy authorizations with a specified token (auth name). + :param str auth_id: Only show legacy authorizations with a specified auth ID. + :return: Authorizations + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_legacy_authorizations_prepare(**kwargs) + + return await self.api_client.call_api( + '/private/legacy/authorizations', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorizations', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_legacy_authorizations_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'user_id', 'user', 'org_id', 'org', 'token', 'auth_id'] # noqa: E501 + self._check_operation_params('get_legacy_authorizations', all_params, local_var_params) + + path_params = {} + + query_params = [] + if 'user_id' in local_var_params: + query_params.append(('userID', local_var_params['user_id'])) # noqa: E501 + if 'user' in local_var_params: + query_params.append(('user', local_var_params['user'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'token' in local_var_params: + query_params.append(('token', local_var_params['token'])) # noqa: E501 + if 'auth_id' in local_var_params: + query_params.append(('authID', local_var_params['auth_id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_legacy_authorizations_id(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a legacy authorization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_legacy_authorizations_id(auth_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to get. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_legacy_authorizations_id_with_http_info(auth_id, **kwargs) # noqa: E501 + else: + (data) = self.get_legacy_authorizations_id_with_http_info(auth_id, **kwargs) # noqa: E501 + return data + + def get_legacy_authorizations_id_with_http_info(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a legacy authorization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_legacy_authorizations_id_with_http_info(auth_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to get. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_legacy_authorizations_id_prepare(auth_id, **kwargs) + + return self.api_client.call_api( + '/private/legacy/authorizations/{authID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_legacy_authorizations_id_async(self, auth_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a legacy authorization. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to get. (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_legacy_authorizations_id_prepare(auth_id, **kwargs) + + return await self.api_client.call_api( + '/private/legacy/authorizations/{authID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_legacy_authorizations_id_prepare(self, auth_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['auth_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_legacy_authorizations_id', all_params, local_var_params) + # verify the required parameter 'auth_id' is set + if ('auth_id' not in local_var_params or + local_var_params['auth_id'] is None): + raise ValueError("Missing the required parameter `auth_id` when calling `get_legacy_authorizations_id`") # noqa: E501 + + path_params = {} + if 'auth_id' in local_var_params: + path_params['authID'] = local_var_params['auth_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_legacy_authorizations_id(self, auth_id, authorization_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a legacy authorization to be active or inactive. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_legacy_authorizations_id(auth_id, authorization_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to update. (required) + :param AuthorizationUpdateRequest authorization_update_request: Legacy authorization to update (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_legacy_authorizations_id_with_http_info(auth_id, authorization_update_request, **kwargs) # noqa: E501 + else: + (data) = self.patch_legacy_authorizations_id_with_http_info(auth_id, authorization_update_request, **kwargs) # noqa: E501 + return data + + def patch_legacy_authorizations_id_with_http_info(self, auth_id, authorization_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a legacy authorization to be active or inactive. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_legacy_authorizations_id_with_http_info(auth_id, authorization_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to update. (required) + :param AuthorizationUpdateRequest authorization_update_request: Legacy authorization to update (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_legacy_authorizations_id_prepare(auth_id, authorization_update_request, **kwargs) + + return self.api_client.call_api( + '/private/legacy/authorizations/{authID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_legacy_authorizations_id_async(self, auth_id, authorization_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a legacy authorization to be active or inactive. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to update. (required) + :param AuthorizationUpdateRequest authorization_update_request: Legacy authorization to update (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_legacy_authorizations_id_prepare(auth_id, authorization_update_request, **kwargs) + + return await self.api_client.call_api( + '/private/legacy/authorizations/{authID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_legacy_authorizations_id_prepare(self, auth_id, authorization_update_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['auth_id', 'authorization_update_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_legacy_authorizations_id', all_params, local_var_params) + # verify the required parameter 'auth_id' is set + if ('auth_id' not in local_var_params or + local_var_params['auth_id'] is None): + raise ValueError("Missing the required parameter `auth_id` when calling `patch_legacy_authorizations_id`") # noqa: E501 + # verify the required parameter 'authorization_update_request' is set + if ('authorization_update_request' not in local_var_params or + local_var_params['authorization_update_request'] is None): + raise ValueError("Missing the required parameter `authorization_update_request` when calling `patch_legacy_authorizations_id`") # noqa: E501 + + path_params = {} + if 'auth_id' in local_var_params: + path_params['authID'] = local_var_params['auth_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'authorization_update_request' in local_var_params: + body_params = local_var_params['authorization_update_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_legacy_authorizations(self, legacy_authorization_post_request, **kwargs): # noqa: E501,D401,D403 + """Create a legacy authorization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_legacy_authorizations(legacy_authorization_post_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param LegacyAuthorizationPostRequest legacy_authorization_post_request: Legacy authorization to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_legacy_authorizations_with_http_info(legacy_authorization_post_request, **kwargs) # noqa: E501 + else: + (data) = self.post_legacy_authorizations_with_http_info(legacy_authorization_post_request, **kwargs) # noqa: E501 + return data + + def post_legacy_authorizations_with_http_info(self, legacy_authorization_post_request, **kwargs): # noqa: E501,D401,D403 + """Create a legacy authorization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_legacy_authorizations_with_http_info(legacy_authorization_post_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param LegacyAuthorizationPostRequest legacy_authorization_post_request: Legacy authorization to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_legacy_authorizations_prepare(legacy_authorization_post_request, **kwargs) + + return self.api_client.call_api( + '/private/legacy/authorizations', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_legacy_authorizations_async(self, legacy_authorization_post_request, **kwargs): # noqa: E501,D401,D403 + """Create a legacy authorization. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param LegacyAuthorizationPostRequest legacy_authorization_post_request: Legacy authorization to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Authorization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_legacy_authorizations_prepare(legacy_authorization_post_request, **kwargs) + + return await self.api_client.call_api( + '/private/legacy/authorizations', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Authorization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_legacy_authorizations_prepare(self, legacy_authorization_post_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['legacy_authorization_post_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_legacy_authorizations', all_params, local_var_params) + # verify the required parameter 'legacy_authorization_post_request' is set + if ('legacy_authorization_post_request' not in local_var_params or + local_var_params['legacy_authorization_post_request'] is None): + raise ValueError("Missing the required parameter `legacy_authorization_post_request` when calling `post_legacy_authorizations`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'legacy_authorization_post_request' in local_var_params: + body_params = local_var_params['legacy_authorization_post_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_legacy_authorizations_id_password(self, auth_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Set a legacy authorization password. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_legacy_authorizations_id_password(auth_id, password_reset_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to update. (required) + :param PasswordResetBody password_reset_body: New password (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_legacy_authorizations_id_password_with_http_info(auth_id, password_reset_body, **kwargs) # noqa: E501 + else: + (data) = self.post_legacy_authorizations_id_password_with_http_info(auth_id, password_reset_body, **kwargs) # noqa: E501 + return data + + def post_legacy_authorizations_id_password_with_http_info(self, auth_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Set a legacy authorization password. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_legacy_authorizations_id_password_with_http_info(auth_id, password_reset_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to update. (required) + :param PasswordResetBody password_reset_body: New password (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_legacy_authorizations_id_password_prepare(auth_id, password_reset_body, **kwargs) + + return self.api_client.call_api( + '/private/legacy/authorizations/{authID}/password', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_legacy_authorizations_id_password_async(self, auth_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Set a legacy authorization password. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str auth_id: The ID of the legacy authorization to update. (required) + :param PasswordResetBody password_reset_body: New password (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_legacy_authorizations_id_password_prepare(auth_id, password_reset_body, **kwargs) + + return await self.api_client.call_api( + '/private/legacy/authorizations/{authID}/password', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_legacy_authorizations_id_password_prepare(self, auth_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['auth_id', 'password_reset_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_legacy_authorizations_id_password', all_params, local_var_params) + # verify the required parameter 'auth_id' is set + if ('auth_id' not in local_var_params or + local_var_params['auth_id'] is None): + raise ValueError("Missing the required parameter `auth_id` when calling `post_legacy_authorizations_id_password`") # noqa: E501 + # verify the required parameter 'password_reset_body' is set + if ('password_reset_body' not in local_var_params or + local_var_params['password_reset_body'] is None): + raise ValueError("Missing the required parameter `password_reset_body` when calling `post_legacy_authorizations_id_password`") # noqa: E501 + + path_params = {} + if 'auth_id' in local_var_params: + path_params['authID'] = local_var_params['auth_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'password_reset_body' in local_var_params: + body_params = local_var_params['password_reset_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/metrics_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/metrics_service.py new file mode 100644 index 0000000..9e99de0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/metrics_service.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class MetricsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """MetricsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_metrics(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve workload performance metrics. + + Returns metrics about the workload performance of an InfluxDB instance. Use this endpoint to get performance, resource, and usage metrics. #### Related guides - For the list of metrics categories, see [InfluxDB OSS metrics](https://docs.influxdata.com/influxdb/latest/reference/internals/metrics/). - Learn how to use InfluxDB to [scrape Prometheus metrics](https://docs.influxdata.com/influxdb/latest/write-data/developer-tools/scrape-prometheus-metrics/). - Learn how InfluxDB [parses the Prometheus exposition format](https://docs.influxdata.com/influxdb/latest/reference/prometheus-metrics/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_metrics(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_metrics_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_metrics_with_http_info(**kwargs) # noqa: E501 + return data + + def get_metrics_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve workload performance metrics. + + Returns metrics about the workload performance of an InfluxDB instance. Use this endpoint to get performance, resource, and usage metrics. #### Related guides - For the list of metrics categories, see [InfluxDB OSS metrics](https://docs.influxdata.com/influxdb/latest/reference/internals/metrics/). - Learn how to use InfluxDB to [scrape Prometheus metrics](https://docs.influxdata.com/influxdb/latest/write-data/developer-tools/scrape-prometheus-metrics/). - Learn how InfluxDB [parses the Prometheus exposition format](https://docs.influxdata.com/influxdb/latest/reference/prometheus-metrics/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_metrics_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_metrics_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/metrics', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='str', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_metrics_async(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve workload performance metrics. + + Returns metrics about the workload performance of an InfluxDB instance. Use this endpoint to get performance, resource, and usage metrics. #### Related guides - For the list of metrics categories, see [InfluxDB OSS metrics](https://docs.influxdata.com/influxdb/latest/reference/internals/metrics/). - Learn how to use InfluxDB to [scrape Prometheus metrics](https://docs.influxdata.com/influxdb/latest/write-data/developer-tools/scrape-prometheus-metrics/). - Learn how InfluxDB [parses the Prometheus exposition format](https://docs.influxdata.com/influxdb/latest/reference/prometheus-metrics/). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_metrics_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/metrics', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='str', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_metrics_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('get_metrics', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['text/plain', 'application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/notification_endpoints_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/notification_endpoints_service.py new file mode 100644 index 0000000..151b486 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/notification_endpoints_service.py @@ -0,0 +1,1137 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class NotificationEndpointsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """NotificationEndpointsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def create_notification_endpoint(self, post_notification_endpoint, **kwargs): # noqa: E501,D401,D403 + """Add a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.create_notification_endpoint(post_notification_endpoint, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostNotificationEndpoint post_notification_endpoint: Notification endpoint to create (required) + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.create_notification_endpoint_with_http_info(post_notification_endpoint, **kwargs) # noqa: E501 + else: + (data) = self.create_notification_endpoint_with_http_info(post_notification_endpoint, **kwargs) # noqa: E501 + return data + + def create_notification_endpoint_with_http_info(self, post_notification_endpoint, **kwargs): # noqa: E501,D401,D403 + """Add a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.create_notification_endpoint_with_http_info(post_notification_endpoint, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostNotificationEndpoint post_notification_endpoint: Notification endpoint to create (required) + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._create_notification_endpoint_prepare(post_notification_endpoint, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationEndpoints', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationEndpoint', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def create_notification_endpoint_async(self, post_notification_endpoint, **kwargs): # noqa: E501,D401,D403 + """Add a notification endpoint. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param PostNotificationEndpoint post_notification_endpoint: Notification endpoint to create (required) + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._create_notification_endpoint_prepare(post_notification_endpoint, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationEndpoints', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationEndpoint', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _create_notification_endpoint_prepare(self, post_notification_endpoint, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['post_notification_endpoint'] # noqa: E501 + self._check_operation_params('create_notification_endpoint', all_params, local_var_params) + # verify the required parameter 'post_notification_endpoint' is set + if ('post_notification_endpoint' not in local_var_params or + local_var_params['post_notification_endpoint'] is None): + raise ValueError("Missing the required parameter `post_notification_endpoint` when calling `create_notification_endpoint`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + + body_params = None + if 'post_notification_endpoint' in local_var_params: + body_params = local_var_params['post_notification_endpoint'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_notification_endpoints_id(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + """Delete a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_notification_endpoints_id(endpoint_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_notification_endpoints_id_with_http_info(endpoint_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_notification_endpoints_id_with_http_info(endpoint_id, **kwargs) # noqa: E501 + return data + + def delete_notification_endpoints_id_with_http_info(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + """Delete a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_notification_endpoints_id_with_http_info(endpoint_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_notification_endpoints_id_prepare(endpoint_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_notification_endpoints_id_async(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + """Delete a notification endpoint. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_notification_endpoints_id_prepare(endpoint_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_notification_endpoints_id_prepare(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['endpoint_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_notification_endpoints_id', all_params, local_var_params) + # verify the required parameter 'endpoint_id' is set + if ('endpoint_id' not in local_var_params or + local_var_params['endpoint_id'] is None): + raise ValueError("Missing the required parameter `endpoint_id` when calling `delete_notification_endpoints_id`") # noqa: E501 + + path_params = {} + if 'endpoint_id' in local_var_params: + path_params['endpointID'] = local_var_params['endpoint_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_notification_endpoints_id_labels_id(self, endpoint_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_notification_endpoints_id_labels_id(endpoint_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_notification_endpoints_id_labels_id_with_http_info(endpoint_id, label_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_notification_endpoints_id_labels_id_with_http_info(endpoint_id, label_id, **kwargs) # noqa: E501 + return data + + def delete_notification_endpoints_id_labels_id_with_http_info(self, endpoint_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_notification_endpoints_id_labels_id_with_http_info(endpoint_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_notification_endpoints_id_labels_id_prepare(endpoint_id, label_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_notification_endpoints_id_labels_id_async(self, endpoint_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a notification endpoint. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_notification_endpoints_id_labels_id_prepare(endpoint_id, label_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_notification_endpoints_id_labels_id_prepare(self, endpoint_id, label_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['endpoint_id', 'label_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_notification_endpoints_id_labels_id', all_params, local_var_params) + # verify the required parameter 'endpoint_id' is set + if ('endpoint_id' not in local_var_params or + local_var_params['endpoint_id'] is None): + raise ValueError("Missing the required parameter `endpoint_id` when calling `delete_notification_endpoints_id_labels_id`") # noqa: E501 + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `delete_notification_endpoints_id_labels_id`") # noqa: E501 + + path_params = {} + if 'endpoint_id' in local_var_params: + path_params['endpointID'] = local_var_params['endpoint_id'] # noqa: E501 + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_notification_endpoints(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all notification endpoints. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_endpoints(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: Only show notification endpoints that belong to specific organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :return: NotificationEndpoints + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_notification_endpoints_with_http_info(org_id, **kwargs) # noqa: E501 + else: + (data) = self.get_notification_endpoints_with_http_info(org_id, **kwargs) # noqa: E501 + return data + + def get_notification_endpoints_with_http_info(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all notification endpoints. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_endpoints_with_http_info(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: Only show notification endpoints that belong to specific organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :return: NotificationEndpoints + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_endpoints_prepare(org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationEndpoints', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationEndpoints', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_notification_endpoints_async(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all notification endpoints. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: Only show notification endpoints that belong to specific organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :return: NotificationEndpoints + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_endpoints_prepare(org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationEndpoints', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationEndpoints', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_notification_endpoints_prepare(self, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'zap_trace_span', 'offset', 'limit'] # noqa: E501 + self._check_operation_params('get_notification_endpoints', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `get_notification_endpoints`") # noqa: E501 + + if 'offset' in local_var_params and local_var_params['offset'] < 0: # noqa: E501 + raise ValueError("Invalid value for parameter `offset` when calling `get_notification_endpoints`, must be a value greater than or equal to `0`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] > 100: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_notification_endpoints`, must be a value less than or equal to `100`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_notification_endpoints`, must be a value greater than or equal to `1`") # noqa: E501 + path_params = {} + + query_params = [] + if 'offset' in local_var_params: + query_params.append(('offset', local_var_params['offset'])) # noqa: E501 + if 'limit' in local_var_params: + query_params.append(('limit', local_var_params['limit'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_notification_endpoints_id(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_endpoints_id(endpoint_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_notification_endpoints_id_with_http_info(endpoint_id, **kwargs) # noqa: E501 + else: + (data) = self.get_notification_endpoints_id_with_http_info(endpoint_id, **kwargs) # noqa: E501 + return data + + def get_notification_endpoints_id_with_http_info(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_endpoints_id_with_http_info(endpoint_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_endpoints_id_prepare(endpoint_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationEndpoint', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_notification_endpoints_id_async(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a notification endpoint. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_endpoints_id_prepare(endpoint_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationEndpoint', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_notification_endpoints_id_prepare(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['endpoint_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_notification_endpoints_id', all_params, local_var_params) + # verify the required parameter 'endpoint_id' is set + if ('endpoint_id' not in local_var_params or + local_var_params['endpoint_id'] is None): + raise ValueError("Missing the required parameter `endpoint_id` when calling `get_notification_endpoints_id`") # noqa: E501 + + path_params = {} + if 'endpoint_id' in local_var_params: + path_params['endpointID'] = local_var_params['endpoint_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_notification_endpoints_id_labels(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_endpoints_id_labels(endpoint_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_notification_endpoints_id_labels_with_http_info(endpoint_id, **kwargs) # noqa: E501 + else: + (data) = self.get_notification_endpoints_id_labels_with_http_info(endpoint_id, **kwargs) # noqa: E501 + return data + + def get_notification_endpoints_id_labels_with_http_info(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_endpoints_id_labels_with_http_info(endpoint_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_endpoints_id_labels_prepare(endpoint_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_notification_endpoints_id_labels_async(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a notification endpoint. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_endpoints_id_labels_prepare(endpoint_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_notification_endpoints_id_labels_prepare(self, endpoint_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['endpoint_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_notification_endpoints_id_labels', all_params, local_var_params) + # verify the required parameter 'endpoint_id' is set + if ('endpoint_id' not in local_var_params or + local_var_params['endpoint_id'] is None): + raise ValueError("Missing the required parameter `endpoint_id` when calling `get_notification_endpoints_id_labels`") # noqa: E501 + + path_params = {} + if 'endpoint_id' in local_var_params: + path_params['endpointID'] = local_var_params['endpoint_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_notification_endpoints_id(self, endpoint_id, notification_endpoint_update, **kwargs): # noqa: E501,D401,D403 + """Update a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_notification_endpoints_id(endpoint_id, notification_endpoint_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param NotificationEndpointUpdate notification_endpoint_update: Check update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_notification_endpoints_id_with_http_info(endpoint_id, notification_endpoint_update, **kwargs) # noqa: E501 + else: + (data) = self.patch_notification_endpoints_id_with_http_info(endpoint_id, notification_endpoint_update, **kwargs) # noqa: E501 + return data + + def patch_notification_endpoints_id_with_http_info(self, endpoint_id, notification_endpoint_update, **kwargs): # noqa: E501,D401,D403 + """Update a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_notification_endpoints_id_with_http_info(endpoint_id, notification_endpoint_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param NotificationEndpointUpdate notification_endpoint_update: Check update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_notification_endpoints_id_prepare(endpoint_id, notification_endpoint_update, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationEndpoint', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_notification_endpoints_id_async(self, endpoint_id, notification_endpoint_update, **kwargs): # noqa: E501,D401,D403 + """Update a notification endpoint. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param NotificationEndpointUpdate notification_endpoint_update: Check update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_notification_endpoints_id_prepare(endpoint_id, notification_endpoint_update, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationEndpoint', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_notification_endpoints_id_prepare(self, endpoint_id, notification_endpoint_update, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['endpoint_id', 'notification_endpoint_update', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_notification_endpoints_id', all_params, local_var_params) + # verify the required parameter 'endpoint_id' is set + if ('endpoint_id' not in local_var_params or + local_var_params['endpoint_id'] is None): + raise ValueError("Missing the required parameter `endpoint_id` when calling `patch_notification_endpoints_id`") # noqa: E501 + # verify the required parameter 'notification_endpoint_update' is set + if ('notification_endpoint_update' not in local_var_params or + local_var_params['notification_endpoint_update'] is None): + raise ValueError("Missing the required parameter `notification_endpoint_update` when calling `patch_notification_endpoints_id`") # noqa: E501 + + path_params = {} + if 'endpoint_id' in local_var_params: + path_params['endpointID'] = local_var_params['endpoint_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'notification_endpoint_update' in local_var_params: + body_params = local_var_params['notification_endpoint_update'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_notification_endpoint_id_labels(self, endpoint_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_notification_endpoint_id_labels(endpoint_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_notification_endpoint_id_labels_with_http_info(endpoint_id, label_mapping, **kwargs) # noqa: E501 + else: + (data) = self.post_notification_endpoint_id_labels_with_http_info(endpoint_id, label_mapping, **kwargs) # noqa: E501 + return data + + def post_notification_endpoint_id_labels_with_http_info(self, endpoint_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_notification_endpoint_id_labels_with_http_info(endpoint_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_notification_endpoint_id_labels_prepare(endpoint_id, label_mapping, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_notification_endpoint_id_labels_async(self, endpoint_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a notification endpoint. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_notification_endpoint_id_labels_prepare(endpoint_id, label_mapping, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_notification_endpoint_id_labels_prepare(self, endpoint_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['endpoint_id', 'label_mapping', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_notification_endpoint_id_labels', all_params, local_var_params) + # verify the required parameter 'endpoint_id' is set + if ('endpoint_id' not in local_var_params or + local_var_params['endpoint_id'] is None): + raise ValueError("Missing the required parameter `endpoint_id` when calling `post_notification_endpoint_id_labels`") # noqa: E501 + # verify the required parameter 'label_mapping' is set + if ('label_mapping' not in local_var_params or + local_var_params['label_mapping'] is None): + raise ValueError("Missing the required parameter `label_mapping` when calling `post_notification_endpoint_id_labels`") # noqa: E501 + + path_params = {} + if 'endpoint_id' in local_var_params: + path_params['endpointID'] = local_var_params['endpoint_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'label_mapping' in local_var_params: + body_params = local_var_params['label_mapping'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def put_notification_endpoints_id(self, endpoint_id, notification_endpoint, **kwargs): # noqa: E501,D401,D403 + """Update a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_notification_endpoints_id(endpoint_id, notification_endpoint, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param NotificationEndpoint notification_endpoint: A new notification endpoint to replace the existing endpoint with (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.put_notification_endpoints_id_with_http_info(endpoint_id, notification_endpoint, **kwargs) # noqa: E501 + else: + (data) = self.put_notification_endpoints_id_with_http_info(endpoint_id, notification_endpoint, **kwargs) # noqa: E501 + return data + + def put_notification_endpoints_id_with_http_info(self, endpoint_id, notification_endpoint, **kwargs): # noqa: E501,D401,D403 + """Update a notification endpoint. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_notification_endpoints_id_with_http_info(endpoint_id, notification_endpoint, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param NotificationEndpoint notification_endpoint: A new notification endpoint to replace the existing endpoint with (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_notification_endpoints_id_prepare(endpoint_id, notification_endpoint, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationEndpoint', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def put_notification_endpoints_id_async(self, endpoint_id, notification_endpoint, **kwargs): # noqa: E501,D401,D403 + """Update a notification endpoint. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str endpoint_id: The notification endpoint ID. (required) + :param NotificationEndpoint notification_endpoint: A new notification endpoint to replace the existing endpoint with (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationEndpoint + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_notification_endpoints_id_prepare(endpoint_id, notification_endpoint, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationEndpoints/{endpointID}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationEndpoint', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _put_notification_endpoints_id_prepare(self, endpoint_id, notification_endpoint, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['endpoint_id', 'notification_endpoint', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('put_notification_endpoints_id', all_params, local_var_params) + # verify the required parameter 'endpoint_id' is set + if ('endpoint_id' not in local_var_params or + local_var_params['endpoint_id'] is None): + raise ValueError("Missing the required parameter `endpoint_id` when calling `put_notification_endpoints_id`") # noqa: E501 + # verify the required parameter 'notification_endpoint' is set + if ('notification_endpoint' not in local_var_params or + local_var_params['notification_endpoint'] is None): + raise ValueError("Missing the required parameter `notification_endpoint` when calling `put_notification_endpoints_id`") # noqa: E501 + + path_params = {} + if 'endpoint_id' in local_var_params: + path_params['endpointID'] = local_var_params['endpoint_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'notification_endpoint' in local_var_params: + body_params = local_var_params['notification_endpoint'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/notification_rules_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/notification_rules_service.py new file mode 100644 index 0000000..208feeb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/notification_rules_service.py @@ -0,0 +1,1149 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class NotificationRulesService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """NotificationRulesService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def create_notification_rule(self, post_notification_rule, **kwargs): # noqa: E501,D401,D403 + """Add a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.create_notification_rule(post_notification_rule, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostNotificationRule post_notification_rule: Notification rule to create (required) + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.create_notification_rule_with_http_info(post_notification_rule, **kwargs) # noqa: E501 + else: + (data) = self.create_notification_rule_with_http_info(post_notification_rule, **kwargs) # noqa: E501 + return data + + def create_notification_rule_with_http_info(self, post_notification_rule, **kwargs): # noqa: E501,D401,D403 + """Add a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.create_notification_rule_with_http_info(post_notification_rule, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostNotificationRule post_notification_rule: Notification rule to create (required) + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._create_notification_rule_prepare(post_notification_rule, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationRules', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationRule', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def create_notification_rule_async(self, post_notification_rule, **kwargs): # noqa: E501,D401,D403 + """Add a notification rule. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param PostNotificationRule post_notification_rule: Notification rule to create (required) + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._create_notification_rule_prepare(post_notification_rule, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationRules', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationRule', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _create_notification_rule_prepare(self, post_notification_rule, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['post_notification_rule'] # noqa: E501 + self._check_operation_params('create_notification_rule', all_params, local_var_params) + # verify the required parameter 'post_notification_rule' is set + if ('post_notification_rule' not in local_var_params or + local_var_params['post_notification_rule'] is None): + raise ValueError("Missing the required parameter `post_notification_rule` when calling `create_notification_rule`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + + body_params = None + if 'post_notification_rule' in local_var_params: + body_params = local_var_params['post_notification_rule'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_notification_rules_id(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """Delete a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_notification_rules_id(rule_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_notification_rules_id_with_http_info(rule_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_notification_rules_id_with_http_info(rule_id, **kwargs) # noqa: E501 + return data + + def delete_notification_rules_id_with_http_info(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """Delete a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_notification_rules_id_with_http_info(rule_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_notification_rules_id_prepare(rule_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_notification_rules_id_async(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """Delete a notification rule. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_notification_rules_id_prepare(rule_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_notification_rules_id_prepare(self, rule_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['rule_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_notification_rules_id', all_params, local_var_params) + # verify the required parameter 'rule_id' is set + if ('rule_id' not in local_var_params or + local_var_params['rule_id'] is None): + raise ValueError("Missing the required parameter `rule_id` when calling `delete_notification_rules_id`") # noqa: E501 + + path_params = {} + if 'rule_id' in local_var_params: + path_params['ruleID'] = local_var_params['rule_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_notification_rules_id_labels_id(self, rule_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete label from a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_notification_rules_id_labels_id(rule_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_notification_rules_id_labels_id_with_http_info(rule_id, label_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_notification_rules_id_labels_id_with_http_info(rule_id, label_id, **kwargs) # noqa: E501 + return data + + def delete_notification_rules_id_labels_id_with_http_info(self, rule_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete label from a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_notification_rules_id_labels_id_with_http_info(rule_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_notification_rules_id_labels_id_prepare(rule_id, label_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_notification_rules_id_labels_id_async(self, rule_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete label from a notification rule. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_notification_rules_id_labels_id_prepare(rule_id, label_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_notification_rules_id_labels_id_prepare(self, rule_id, label_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['rule_id', 'label_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_notification_rules_id_labels_id', all_params, local_var_params) + # verify the required parameter 'rule_id' is set + if ('rule_id' not in local_var_params or + local_var_params['rule_id'] is None): + raise ValueError("Missing the required parameter `rule_id` when calling `delete_notification_rules_id_labels_id`") # noqa: E501 + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `delete_notification_rules_id_labels_id`") # noqa: E501 + + path_params = {} + if 'rule_id' in local_var_params: + path_params['ruleID'] = local_var_params['rule_id'] # noqa: E501 + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_notification_rules(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all notification rules. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_rules(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: Only show notification rules that belong to a specific organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param str check_id: Only show notifications that belong to the specific check ID. + :param str tag: Only return notification rules that "would match" statuses which contain the tag key value pairs provided. + :return: NotificationRules + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_notification_rules_with_http_info(org_id, **kwargs) # noqa: E501 + else: + (data) = self.get_notification_rules_with_http_info(org_id, **kwargs) # noqa: E501 + return data + + def get_notification_rules_with_http_info(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all notification rules. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_rules_with_http_info(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: Only show notification rules that belong to a specific organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param str check_id: Only show notifications that belong to the specific check ID. + :param str tag: Only return notification rules that "would match" statuses which contain the tag key value pairs provided. + :return: NotificationRules + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_rules_prepare(org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationRules', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationRules', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_notification_rules_async(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all notification rules. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: Only show notification rules that belong to a specific organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param str check_id: Only show notifications that belong to the specific check ID. + :param str tag: Only return notification rules that "would match" statuses which contain the tag key value pairs provided. + :return: NotificationRules + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_rules_prepare(org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationRules', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationRules', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_notification_rules_prepare(self, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'zap_trace_span', 'offset', 'limit', 'check_id', 'tag'] # noqa: E501 + self._check_operation_params('get_notification_rules', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `get_notification_rules`") # noqa: E501 + + if 'offset' in local_var_params and local_var_params['offset'] < 0: # noqa: E501 + raise ValueError("Invalid value for parameter `offset` when calling `get_notification_rules`, must be a value greater than or equal to `0`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] > 100: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_notification_rules`, must be a value less than or equal to `100`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_notification_rules`, must be a value greater than or equal to `1`") # noqa: E501 + if 'tag' in local_var_params and not re.search(r'^[a-zA-Z0-9_]+:[a-zA-Z0-9_]+$', local_var_params['tag']): # noqa: E501 + raise ValueError("Invalid value for parameter `tag` when calling `get_notification_rules`, must conform to the pattern `/^[a-zA-Z0-9_]+:[a-zA-Z0-9_]+$/`") # noqa: E501 + path_params = {} + + query_params = [] + if 'offset' in local_var_params: + query_params.append(('offset', local_var_params['offset'])) # noqa: E501 + if 'limit' in local_var_params: + query_params.append(('limit', local_var_params['limit'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'check_id' in local_var_params: + query_params.append(('checkID', local_var_params['check_id'])) # noqa: E501 + if 'tag' in local_var_params: + query_params.append(('tag', local_var_params['tag'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_notification_rules_id(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_rules_id(rule_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_notification_rules_id_with_http_info(rule_id, **kwargs) # noqa: E501 + else: + (data) = self.get_notification_rules_id_with_http_info(rule_id, **kwargs) # noqa: E501 + return data + + def get_notification_rules_id_with_http_info(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_rules_id_with_http_info(rule_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_rules_id_prepare(rule_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationRule', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_notification_rules_id_async(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a notification rule. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_rules_id_prepare(rule_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationRule', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_notification_rules_id_prepare(self, rule_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['rule_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_notification_rules_id', all_params, local_var_params) + # verify the required parameter 'rule_id' is set + if ('rule_id' not in local_var_params or + local_var_params['rule_id'] is None): + raise ValueError("Missing the required parameter `rule_id` when calling `get_notification_rules_id`") # noqa: E501 + + path_params = {} + if 'rule_id' in local_var_params: + path_params['ruleID'] = local_var_params['rule_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_notification_rules_id_labels(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_rules_id_labels(rule_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_notification_rules_id_labels_with_http_info(rule_id, **kwargs) # noqa: E501 + else: + (data) = self.get_notification_rules_id_labels_with_http_info(rule_id, **kwargs) # noqa: E501 + return data + + def get_notification_rules_id_labels_with_http_info(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_rules_id_labels_with_http_info(rule_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_rules_id_labels_prepare(rule_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_notification_rules_id_labels_async(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a notification rule. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_rules_id_labels_prepare(rule_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_notification_rules_id_labels_prepare(self, rule_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['rule_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_notification_rules_id_labels', all_params, local_var_params) + # verify the required parameter 'rule_id' is set + if ('rule_id' not in local_var_params or + local_var_params['rule_id'] is None): + raise ValueError("Missing the required parameter `rule_id` when calling `get_notification_rules_id_labels`") # noqa: E501 + + path_params = {} + if 'rule_id' in local_var_params: + path_params['ruleID'] = local_var_params['rule_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_notification_rules_id(self, rule_id, notification_rule_update, **kwargs): # noqa: E501,D401,D403 + """Update a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_notification_rules_id(rule_id, notification_rule_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param NotificationRuleUpdate notification_rule_update: Notification rule update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_notification_rules_id_with_http_info(rule_id, notification_rule_update, **kwargs) # noqa: E501 + else: + (data) = self.patch_notification_rules_id_with_http_info(rule_id, notification_rule_update, **kwargs) # noqa: E501 + return data + + def patch_notification_rules_id_with_http_info(self, rule_id, notification_rule_update, **kwargs): # noqa: E501,D401,D403 + """Update a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_notification_rules_id_with_http_info(rule_id, notification_rule_update, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param NotificationRuleUpdate notification_rule_update: Notification rule update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_notification_rules_id_prepare(rule_id, notification_rule_update, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationRule', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_notification_rules_id_async(self, rule_id, notification_rule_update, **kwargs): # noqa: E501,D401,D403 + """Update a notification rule. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param NotificationRuleUpdate notification_rule_update: Notification rule update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_notification_rules_id_prepare(rule_id, notification_rule_update, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationRule', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_notification_rules_id_prepare(self, rule_id, notification_rule_update, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['rule_id', 'notification_rule_update', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_notification_rules_id', all_params, local_var_params) + # verify the required parameter 'rule_id' is set + if ('rule_id' not in local_var_params or + local_var_params['rule_id'] is None): + raise ValueError("Missing the required parameter `rule_id` when calling `patch_notification_rules_id`") # noqa: E501 + # verify the required parameter 'notification_rule_update' is set + if ('notification_rule_update' not in local_var_params or + local_var_params['notification_rule_update'] is None): + raise ValueError("Missing the required parameter `notification_rule_update` when calling `patch_notification_rules_id`") # noqa: E501 + + path_params = {} + if 'rule_id' in local_var_params: + path_params['ruleID'] = local_var_params['rule_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'notification_rule_update' in local_var_params: + body_params = local_var_params['notification_rule_update'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_notification_rule_id_labels(self, rule_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_notification_rule_id_labels(rule_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_notification_rule_id_labels_with_http_info(rule_id, label_mapping, **kwargs) # noqa: E501 + else: + (data) = self.post_notification_rule_id_labels_with_http_info(rule_id, label_mapping, **kwargs) # noqa: E501 + return data + + def post_notification_rule_id_labels_with_http_info(self, rule_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_notification_rule_id_labels_with_http_info(rule_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_notification_rule_id_labels_prepare(rule_id, label_mapping, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_notification_rule_id_labels_async(self, rule_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a notification rule. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_notification_rule_id_labels_prepare(rule_id, label_mapping, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_notification_rule_id_labels_prepare(self, rule_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['rule_id', 'label_mapping', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_notification_rule_id_labels', all_params, local_var_params) + # verify the required parameter 'rule_id' is set + if ('rule_id' not in local_var_params or + local_var_params['rule_id'] is None): + raise ValueError("Missing the required parameter `rule_id` when calling `post_notification_rule_id_labels`") # noqa: E501 + # verify the required parameter 'label_mapping' is set + if ('label_mapping' not in local_var_params or + local_var_params['label_mapping'] is None): + raise ValueError("Missing the required parameter `label_mapping` when calling `post_notification_rule_id_labels`") # noqa: E501 + + path_params = {} + if 'rule_id' in local_var_params: + path_params['ruleID'] = local_var_params['rule_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'label_mapping' in local_var_params: + body_params = local_var_params['label_mapping'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def put_notification_rules_id(self, rule_id, notification_rule, **kwargs): # noqa: E501,D401,D403 + """Update a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_notification_rules_id(rule_id, notification_rule, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param NotificationRule notification_rule: Notification rule update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.put_notification_rules_id_with_http_info(rule_id, notification_rule, **kwargs) # noqa: E501 + else: + (data) = self.put_notification_rules_id_with_http_info(rule_id, notification_rule, **kwargs) # noqa: E501 + return data + + def put_notification_rules_id_with_http_info(self, rule_id, notification_rule, **kwargs): # noqa: E501,D401,D403 + """Update a notification rule. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_notification_rules_id_with_http_info(rule_id, notification_rule, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param NotificationRule notification_rule: Notification rule update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_notification_rules_id_prepare(rule_id, notification_rule, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationRule', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def put_notification_rules_id_async(self, rule_id, notification_rule, **kwargs): # noqa: E501,D401,D403 + """Update a notification rule. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param NotificationRule notification_rule: Notification rule update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: NotificationRule + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_notification_rules_id_prepare(rule_id, notification_rule, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='NotificationRule', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _put_notification_rules_id_prepare(self, rule_id, notification_rule, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['rule_id', 'notification_rule', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('put_notification_rules_id', all_params, local_var_params) + # verify the required parameter 'rule_id' is set + if ('rule_id' not in local_var_params or + local_var_params['rule_id'] is None): + raise ValueError("Missing the required parameter `rule_id` when calling `put_notification_rules_id`") # noqa: E501 + # verify the required parameter 'notification_rule' is set + if ('notification_rule' not in local_var_params or + local_var_params['notification_rule'] is None): + raise ValueError("Missing the required parameter `notification_rule` when calling `put_notification_rules_id`") # noqa: E501 + + path_params = {} + if 'rule_id' in local_var_params: + path_params['ruleID'] = local_var_params['rule_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'notification_rule' in local_var_params: + body_params = local_var_params['notification_rule'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/organizations_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/organizations_service.py new file mode 100644 index 0000000..6d845a3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/organizations_service.py @@ -0,0 +1,1427 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class OrganizationsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """OrganizationsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_orgs_id(self, org_id, **kwargs): # noqa: E501,D401,D403 + """Delete an organization. + + Deletes an organization. Deleting an organization from InfluxDB Cloud can't be undone. Once deleted, all data associated with the organization is removed. #### InfluxDB Cloud - Does the following when you send a delete request: 1. Validates the request and queues the delete. 2. Returns an HTTP `204` status code if queued; _error_ otherwise. 3. Handles the delete asynchronously. #### InfluxDB OSS - Validates the request, handles the delete synchronously, and then responds with success or failure. #### Limitations - Only one organization can be deleted per request. #### Related guides - [Delete organizations](https://docs.influxdata.com/influxdb/latest/organizations/delete-orgs/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_orgs_id(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_orgs_id_with_http_info(org_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_orgs_id_with_http_info(org_id, **kwargs) # noqa: E501 + return data + + def delete_orgs_id_with_http_info(self, org_id, **kwargs): # noqa: E501,D401,D403 + """Delete an organization. + + Deletes an organization. Deleting an organization from InfluxDB Cloud can't be undone. Once deleted, all data associated with the organization is removed. #### InfluxDB Cloud - Does the following when you send a delete request: 1. Validates the request and queues the delete. 2. Returns an HTTP `204` status code if queued; _error_ otherwise. 3. Handles the delete asynchronously. #### InfluxDB OSS - Validates the request, handles the delete synchronously, and then responds with success or failure. #### Limitations - Only one organization can be deleted per request. #### Related guides - [Delete organizations](https://docs.influxdata.com/influxdb/latest/organizations/delete-orgs/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_orgs_id_with_http_info(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_orgs_id_prepare(org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_orgs_id_async(self, org_id, **kwargs): # noqa: E501,D401,D403 + """Delete an organization. + + Deletes an organization. Deleting an organization from InfluxDB Cloud can't be undone. Once deleted, all data associated with the organization is removed. #### InfluxDB Cloud - Does the following when you send a delete request: 1. Validates the request and queues the delete. 2. Returns an HTTP `204` status code if queued; _error_ otherwise. 3. Handles the delete asynchronously. #### InfluxDB OSS - Validates the request, handles the delete synchronously, and then responds with success or failure. #### Limitations - Only one organization can be deleted per request. #### Related guides - [Delete organizations](https://docs.influxdata.com/influxdb/latest/organizations/delete-orgs/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The ID of the organization to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_orgs_id_prepare(org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_orgs_id_prepare(self, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_orgs_id', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `delete_orgs_id`") # noqa: E501 + + path_params = {} + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_orgs_id_members_id(self, user_id, org_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from an organization. + + Removes a member from an organization. Use this endpoint to remove a user's member privileges for an organization. Removing member privileges removes the user's `read` and `write` permissions from the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Member permissions are separate from API token permissions. - Member permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to remove an owner from. #### Related guides - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_orgs_id_members_id(user_id, org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the user to remove. (required) + :param str org_id: The ID of the organization to remove a user from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_orgs_id_members_id_with_http_info(user_id, org_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_orgs_id_members_id_with_http_info(user_id, org_id, **kwargs) # noqa: E501 + return data + + def delete_orgs_id_members_id_with_http_info(self, user_id, org_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from an organization. + + Removes a member from an organization. Use this endpoint to remove a user's member privileges for an organization. Removing member privileges removes the user's `read` and `write` permissions from the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Member permissions are separate from API token permissions. - Member permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to remove an owner from. #### Related guides - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_orgs_id_members_id_with_http_info(user_id, org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the user to remove. (required) + :param str org_id: The ID of the organization to remove a user from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_orgs_id_members_id_prepare(user_id, org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_orgs_id_members_id_async(self, user_id, org_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from an organization. + + Removes a member from an organization. Use this endpoint to remove a user's member privileges for an organization. Removing member privileges removes the user's `read` and `write` permissions from the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Member permissions are separate from API token permissions. - Member permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to remove an owner from. #### Related guides - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the user to remove. (required) + :param str org_id: The ID of the organization to remove a user from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_orgs_id_members_id_prepare(user_id, org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_orgs_id_members_id_prepare(self, user_id, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'org_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_orgs_id_members_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_orgs_id_members_id`") # noqa: E501 + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `delete_orgs_id_members_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_orgs_id_owners_id(self, user_id, org_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from an organization. + + Removes an [owner](https://docs.influxdata.com/influxdb/latest/reference/glossary/#owner) from the organization. Organization owners have permission to delete organizations and remove user and member permissions from the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to remove an owner from. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_orgs_id_owners_id(user_id, org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the user to remove. (required) + :param str org_id: The ID of the organization to remove an owner from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_orgs_id_owners_id_with_http_info(user_id, org_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_orgs_id_owners_id_with_http_info(user_id, org_id, **kwargs) # noqa: E501 + return data + + def delete_orgs_id_owners_id_with_http_info(self, user_id, org_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from an organization. + + Removes an [owner](https://docs.influxdata.com/influxdb/latest/reference/glossary/#owner) from the organization. Organization owners have permission to delete organizations and remove user and member permissions from the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to remove an owner from. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_orgs_id_owners_id_with_http_info(user_id, org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the user to remove. (required) + :param str org_id: The ID of the organization to remove an owner from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_orgs_id_owners_id_prepare(user_id, org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_orgs_id_owners_id_async(self, user_id, org_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from an organization. + + Removes an [owner](https://docs.influxdata.com/influxdb/latest/reference/glossary/#owner) from the organization. Organization owners have permission to delete organizations and remove user and member permissions from the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Owner permissions are separate from API token permissions. - Owner permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to remove an owner from. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the user to remove. (required) + :param str org_id: The ID of the organization to remove an owner from. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_orgs_id_owners_id_prepare(user_id, org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_orgs_id_owners_id_prepare(self, user_id, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'org_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_orgs_id_owners_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_orgs_id_owners_id`") # noqa: E501 + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `delete_orgs_id_owners_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_orgs(self, **kwargs): # noqa: E501,D401,D403 + """List organizations. + + Lists [organizations](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization/). To limit which organizations are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all organizations up to the default `limit`. #### InfluxDB Cloud - Only returns the organization that owns the token passed in the request. #### Related guides - [View organizations](https://docs.influxdata.com/influxdb/latest/organizations/view-orgs/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_orgs(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param bool descending: + :param str org: An organization name. Only returns the specified organization. + :param str org_id: An organization ID. Only returns the specified organization. + :param str user_id: A user ID. Only returns organizations where the specified user is a member or owner. + :return: Organizations + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_orgs_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_orgs_with_http_info(**kwargs) # noqa: E501 + return data + + def get_orgs_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List organizations. + + Lists [organizations](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization/). To limit which organizations are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all organizations up to the default `limit`. #### InfluxDB Cloud - Only returns the organization that owns the token passed in the request. #### Related guides - [View organizations](https://docs.influxdata.com/influxdb/latest/organizations/view-orgs/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_orgs_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param bool descending: + :param str org: An organization name. Only returns the specified organization. + :param str org_id: An organization ID. Only returns the specified organization. + :param str user_id: A user ID. Only returns organizations where the specified user is a member or owner. + :return: Organizations + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_orgs_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Organizations', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_orgs_async(self, **kwargs): # noqa: E501,D401,D403 + """List organizations. + + Lists [organizations](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization/). To limit which organizations are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all organizations up to the default `limit`. #### InfluxDB Cloud - Only returns the organization that owns the token passed in the request. #### Related guides - [View organizations](https://docs.influxdata.com/influxdb/latest/organizations/view-orgs/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param bool descending: + :param str org: An organization name. Only returns the specified organization. + :param str org_id: An organization ID. Only returns the specified organization. + :param str user_id: A user ID. Only returns organizations where the specified user is a member or owner. + :return: Organizations + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_orgs_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Organizations', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_orgs_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'offset', 'limit', 'descending', 'org', 'org_id', 'user_id'] # noqa: E501 + self._check_operation_params('get_orgs', all_params, local_var_params) + + if 'offset' in local_var_params and local_var_params['offset'] < 0: # noqa: E501 + raise ValueError("Invalid value for parameter `offset` when calling `get_orgs`, must be a value greater than or equal to `0`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] > 100: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_orgs`, must be a value less than or equal to `100`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_orgs`, must be a value greater than or equal to `1`") # noqa: E501 + path_params = {} + + query_params = [] + if 'offset' in local_var_params: + query_params.append(('offset', local_var_params['offset'])) # noqa: E501 + if 'limit' in local_var_params: + query_params.append(('limit', local_var_params['limit'])) # noqa: E501 + if 'descending' in local_var_params: + query_params.append(('descending', local_var_params['descending'])) # noqa: E501 + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'user_id' in local_var_params: + query_params.append(('userID', local_var_params['user_id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_orgs_id(self, org_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve an organization. + + Retrieves an organization. Use this endpoint to retrieve information for a specific organization. #### Related guides - [View organizations](https://docs.influxdata.com/influxdb/latest/organizations/view-orgs/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_orgs_id(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Organization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_orgs_id_with_http_info(org_id, **kwargs) # noqa: E501 + else: + (data) = self.get_orgs_id_with_http_info(org_id, **kwargs) # noqa: E501 + return data + + def get_orgs_id_with_http_info(self, org_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve an organization. + + Retrieves an organization. Use this endpoint to retrieve information for a specific organization. #### Related guides - [View organizations](https://docs.influxdata.com/influxdb/latest/organizations/view-orgs/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_orgs_id_with_http_info(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Organization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_orgs_id_prepare(org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Organization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_orgs_id_async(self, org_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve an organization. + + Retrieves an organization. Use this endpoint to retrieve information for a specific organization. #### Related guides - [View organizations](https://docs.influxdata.com/influxdb/latest/organizations/view-orgs/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The ID of the organization to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Organization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_orgs_id_prepare(org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Organization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_orgs_id_prepare(self, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_orgs_id', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `get_orgs_id`") # noqa: E501 + + path_params = {} + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_orgs_id_members(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all members of an organization. + + Lists all users that belong to an organization. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users within the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Member permissions are separate from API token permissions. - Member permissions are used in the context of the InfluxDB UI. #### Required permissions - `read-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to retrieve members for. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_orgs_id_members(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization to retrieve users for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_orgs_id_members_with_http_info(org_id, **kwargs) # noqa: E501 + else: + (data) = self.get_orgs_id_members_with_http_info(org_id, **kwargs) # noqa: E501 + return data + + def get_orgs_id_members_with_http_info(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all members of an organization. + + Lists all users that belong to an organization. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users within the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Member permissions are separate from API token permissions. - Member permissions are used in the context of the InfluxDB UI. #### Required permissions - `read-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to retrieve members for. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_orgs_id_members_with_http_info(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization to retrieve users for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_orgs_id_members_prepare(org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_orgs_id_members_async(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all members of an organization. + + Lists all users that belong to an organization. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users within the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Member permissions are separate from API token permissions. - Member permissions are used in the context of the InfluxDB UI. #### Required permissions - `read-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to retrieve members for. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The ID of the organization to retrieve users for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_orgs_id_members_prepare(org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_orgs_id_members_prepare(self, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_orgs_id_members', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `get_orgs_id_members`") # noqa: E501 + + path_params = {} + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_orgs_id_owners(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of an organization. + + Lists all owners of an organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Required permissions - `read-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to retrieve a list of owners from. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_orgs_id_owners(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization to list owners for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_orgs_id_owners_with_http_info(org_id, **kwargs) # noqa: E501 + else: + (data) = self.get_orgs_id_owners_with_http_info(org_id, **kwargs) # noqa: E501 + return data + + def get_orgs_id_owners_with_http_info(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of an organization. + + Lists all owners of an organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Required permissions - `read-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to retrieve a list of owners from. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_orgs_id_owners_with_http_info(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization to list owners for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_orgs_id_owners_prepare(org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_orgs_id_owners_async(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of an organization. + + Lists all owners of an organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Required permissions - `read-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to retrieve a list of owners from. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The ID of the organization to list owners for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_orgs_id_owners_prepare(org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_orgs_id_owners_prepare(self, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_orgs_id_owners', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `get_orgs_id_owners`") # noqa: E501 + + path_params = {} + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_orgs_id(self, org_id, patch_organization_request, **kwargs): # noqa: E501,D401,D403 + """Update an organization. + + Updates an organization. Use this endpoint to update properties (`name`, `description`) of an organization. Updating an organization’s name affects all resources that reference the organization by name, including the following: - Queries - Dashboards - Tasks - Telegraf configurations - Templates If you change an organization name, be sure to update the organization name in these resources as well. #### Related Guides - [Update an organization](https://docs.influxdata.com/influxdb/latest/organizations/update-org/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_orgs_id(org_id, patch_organization_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization to update. (required) + :param PatchOrganizationRequest patch_organization_request: The organization update to apply. (required) + :param str zap_trace_span: OpenTracing span context + :return: Organization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_orgs_id_with_http_info(org_id, patch_organization_request, **kwargs) # noqa: E501 + else: + (data) = self.patch_orgs_id_with_http_info(org_id, patch_organization_request, **kwargs) # noqa: E501 + return data + + def patch_orgs_id_with_http_info(self, org_id, patch_organization_request, **kwargs): # noqa: E501,D401,D403 + """Update an organization. + + Updates an organization. Use this endpoint to update properties (`name`, `description`) of an organization. Updating an organization’s name affects all resources that reference the organization by name, including the following: - Queries - Dashboards - Tasks - Telegraf configurations - Templates If you change an organization name, be sure to update the organization name in these resources as well. #### Related Guides - [Update an organization](https://docs.influxdata.com/influxdb/latest/organizations/update-org/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_orgs_id_with_http_info(org_id, patch_organization_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization to update. (required) + :param PatchOrganizationRequest patch_organization_request: The organization update to apply. (required) + :param str zap_trace_span: OpenTracing span context + :return: Organization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_orgs_id_prepare(org_id, patch_organization_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Organization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_orgs_id_async(self, org_id, patch_organization_request, **kwargs): # noqa: E501,D401,D403 + """Update an organization. + + Updates an organization. Use this endpoint to update properties (`name`, `description`) of an organization. Updating an organization’s name affects all resources that reference the organization by name, including the following: - Queries - Dashboards - Tasks - Telegraf configurations - Templates If you change an organization name, be sure to update the organization name in these resources as well. #### Related Guides - [Update an organization](https://docs.influxdata.com/influxdb/latest/organizations/update-org/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The ID of the organization to update. (required) + :param PatchOrganizationRequest patch_organization_request: The organization update to apply. (required) + :param str zap_trace_span: OpenTracing span context + :return: Organization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_orgs_id_prepare(org_id, patch_organization_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Organization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_orgs_id_prepare(self, org_id, patch_organization_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'patch_organization_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_orgs_id', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `patch_orgs_id`") # noqa: E501 + # verify the required parameter 'patch_organization_request' is set + if ('patch_organization_request' not in local_var_params or + local_var_params['patch_organization_request'] is None): + raise ValueError("Missing the required parameter `patch_organization_request` when calling `patch_orgs_id`") # noqa: E501 + + path_params = {} + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'patch_organization_request' in local_var_params: + body_params = local_var_params['patch_organization_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_orgs(self, post_organization_request, **kwargs): # noqa: E501,D401,D403 + """Create an organization. + + Creates an [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) and returns the newly created organization. #### InfluxDB Cloud - Doesn't allow you to use this endpoint to create organizations. #### Related guides - [Manage organizations](https://docs.influxdata.com/influxdb/latest/organizations) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_orgs(post_organization_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostOrganizationRequest post_organization_request: The organization to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Organization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_orgs_with_http_info(post_organization_request, **kwargs) # noqa: E501 + else: + (data) = self.post_orgs_with_http_info(post_organization_request, **kwargs) # noqa: E501 + return data + + def post_orgs_with_http_info(self, post_organization_request, **kwargs): # noqa: E501,D401,D403 + """Create an organization. + + Creates an [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) and returns the newly created organization. #### InfluxDB Cloud - Doesn't allow you to use this endpoint to create organizations. #### Related guides - [Manage organizations](https://docs.influxdata.com/influxdb/latest/organizations) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_orgs_with_http_info(post_organization_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostOrganizationRequest post_organization_request: The organization to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Organization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_orgs_prepare(post_organization_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Organization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_orgs_async(self, post_organization_request, **kwargs): # noqa: E501,D401,D403 + """Create an organization. + + Creates an [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) and returns the newly created organization. #### InfluxDB Cloud - Doesn't allow you to use this endpoint to create organizations. #### Related guides - [Manage organizations](https://docs.influxdata.com/influxdb/latest/organizations) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param PostOrganizationRequest post_organization_request: The organization to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Organization + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_orgs_prepare(post_organization_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Organization', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_orgs_prepare(self, post_organization_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['post_organization_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_orgs', all_params, local_var_params) + # verify the required parameter 'post_organization_request' is set + if ('post_organization_request' not in local_var_params or + local_var_params['post_organization_request'] is None): + raise ValueError("Missing the required parameter `post_organization_request` when calling `post_orgs`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'post_organization_request' in local_var_params: + body_params = local_var_params['post_organization_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_orgs_id_members(self, org_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to an organization. + + Add a user to an organization. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users within the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Member permissions are separate from API token permissions. - Member permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to add a member to. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_orgs_id_members(org_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: The user to add to the organization. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_orgs_id_members_with_http_info(org_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_orgs_id_members_with_http_info(org_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_orgs_id_members_with_http_info(self, org_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to an organization. + + Add a user to an organization. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users within the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Member permissions are separate from API token permissions. - Member permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to add a member to. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_orgs_id_members_with_http_info(org_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: The user to add to the organization. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_orgs_id_members_prepare(org_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_orgs_id_members_async(self, org_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to an organization. + + Add a user to an organization. InfluxDB [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) have permission to access InfluxDB. [Members](https://docs.influxdata.com/influxdb/latest/reference/glossary/#member) are users within the organization. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Limitations - Member permissions are separate from API token permissions. - Member permissions are used in the context of the InfluxDB UI. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to add a member to. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/users/) - [Manage members](https://docs.influxdata.com/influxdb/latest/organizations/members/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The ID of the organization. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: The user to add to the organization. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_orgs_id_members_prepare(org_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_orgs_id_members_prepare(self, org_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_orgs_id_members', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `post_orgs_id_members`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_orgs_id_members`") # noqa: E501 + + path_params = {} + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_orgs_id_owners(self, org_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to an organization. + + Adds an owner to an organization. Use this endpoint to assign the organization `owner` role to a user. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to add an owner for. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_orgs_id_owners(org_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization that you want to add an owner for. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: The user to add as an owner of the organization. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_orgs_id_owners_with_http_info(org_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_orgs_id_owners_with_http_info(org_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_orgs_id_owners_with_http_info(self, org_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to an organization. + + Adds an owner to an organization. Use this endpoint to assign the organization `owner` role to a user. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to add an owner for. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_orgs_id_owners_with_http_info(org_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The ID of the organization that you want to add an owner for. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: The user to add as an owner of the organization. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_orgs_id_owners_prepare(org_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_orgs_id_owners_async(self, org_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to an organization. + + Adds an owner to an organization. Use this endpoint to assign the organization `owner` role to a user. #### InfluxDB Cloud - Doesn't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. #### Required permissions - `write-orgs INFLUX_ORG_ID` *`INFLUX_ORG_ID`* is the ID of the organization that you want to add an owner for. #### Related endpoints - [Authorizations](#tag/Authorizations-(API-tokens)) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The ID of the organization that you want to add an owner for. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: The user to add as an owner of the organization. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_orgs_id_owners_prepare(org_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_orgs_id_owners_prepare(self, org_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_orgs_id_owners', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `post_orgs_id_owners`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_orgs_id_owners`") # noqa: E501 + + path_params = {} + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/ping_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/ping_service.py new file mode 100644 index 0000000..2d89a74 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/ping_service.py @@ -0,0 +1,232 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class PingService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """PingService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_ping(self, **kwargs): # noqa: E501,D401,D403 + """Get the status of the instance. + + Retrieves the status and InfluxDB version of the instance. Use this endpoint to monitor uptime for the InfluxDB instance. The response returns a HTTP `204` status code to inform you the instance is available. #### InfluxDB Cloud - Isn't versioned and doesn't return `X-Influxdb-Version` in the headers. #### Related guides - [Influx ping](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/ping/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_ping(async_req=True) + >>> result = thread.get() + + :param async_req bool + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_ping_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_ping_with_http_info(**kwargs) # noqa: E501 + return data + + def get_ping_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Get the status of the instance. + + Retrieves the status and InfluxDB version of the instance. Use this endpoint to monitor uptime for the InfluxDB instance. The response returns a HTTP `204` status code to inform you the instance is available. #### InfluxDB Cloud - Isn't versioned and doesn't return `X-Influxdb-Version` in the headers. #### Related guides - [Influx ping](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/ping/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_ping_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_ping_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/ping', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_ping_async(self, **kwargs): # noqa: E501,D401,D403 + """Get the status of the instance. + + Retrieves the status and InfluxDB version of the instance. Use this endpoint to monitor uptime for the InfluxDB instance. The response returns a HTTP `204` status code to inform you the instance is available. #### InfluxDB Cloud - Isn't versioned and doesn't return `X-Influxdb-Version` in the headers. #### Related guides - [Influx ping](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/ping/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_ping_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/ping', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_ping_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = [] # noqa: E501 + self._check_operation_params('get_ping', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + + body_params = None + return local_var_params, path_params, query_params, header_params, body_params + + def head_ping(self, **kwargs): # noqa: E501,D401,D403 + """Get the status of the instance. + + Returns the status and InfluxDB version of the instance. Use this endpoint to monitor uptime for the InfluxDB instance. The response returns a HTTP `204` status code to inform you the instance is available. #### InfluxDB Cloud - Isn't versioned and doesn't return `X-Influxdb-Version` in the headers. #### Related guides - [Influx ping](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/ping/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.head_ping(async_req=True) + >>> result = thread.get() + + :param async_req bool + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.head_ping_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.head_ping_with_http_info(**kwargs) # noqa: E501 + return data + + def head_ping_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Get the status of the instance. + + Returns the status and InfluxDB version of the instance. Use this endpoint to monitor uptime for the InfluxDB instance. The response returns a HTTP `204` status code to inform you the instance is available. #### InfluxDB Cloud - Isn't versioned and doesn't return `X-Influxdb-Version` in the headers. #### Related guides - [Influx ping](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/ping/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.head_ping_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._head_ping_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/ping', 'HEAD', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def head_ping_async(self, **kwargs): # noqa: E501,D401,D403 + """Get the status of the instance. + + Returns the status and InfluxDB version of the instance. Use this endpoint to monitor uptime for the InfluxDB instance. The response returns a HTTP `204` status code to inform you the instance is available. #### InfluxDB Cloud - Isn't versioned and doesn't return `X-Influxdb-Version` in the headers. #### Related guides - [Influx ping](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/ping/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._head_ping_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/ping', 'HEAD', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _head_ping_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = [] # noqa: E501 + self._check_operation_params('head_ping', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + + body_params = None + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/query_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/query_service.py new file mode 100644 index 0000000..0be8122 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/query_service.py @@ -0,0 +1,646 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class QueryService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """QueryService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_query_suggestions(self, **kwargs): # noqa: E501,D401,D403 + """List Flux query suggestions. + + Lists Flux query suggestions. Each suggestion contains a [Flux function](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) name and parameters. Use this endpoint to retrieve a list of Flux query suggestions used in the InfluxDB Flux Query Builder. #### Limitations - When writing a query, avoid using `_functionName()` helper functions exposed by this endpoint. Helper function names have an underscore (`_`) prefix and aren't meant to be used directly in queries--for example: - To sort on a column and keep the top n records, use the `top(n, columns=["_value"], tables=<-)` function instead of the `_sortLimit` helper function. `top` uses `_sortLimit`. #### Related Guides - [List of all Flux functions](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_query_suggestions(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: FluxSuggestions + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_query_suggestions_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_query_suggestions_with_http_info(**kwargs) # noqa: E501 + return data + + def get_query_suggestions_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List Flux query suggestions. + + Lists Flux query suggestions. Each suggestion contains a [Flux function](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) name and parameters. Use this endpoint to retrieve a list of Flux query suggestions used in the InfluxDB Flux Query Builder. #### Limitations - When writing a query, avoid using `_functionName()` helper functions exposed by this endpoint. Helper function names have an underscore (`_`) prefix and aren't meant to be used directly in queries--for example: - To sort on a column and keep the top n records, use the `top(n, columns=["_value"], tables=<-)` function instead of the `_sortLimit` helper function. `top` uses `_sortLimit`. #### Related Guides - [List of all Flux functions](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_query_suggestions_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: FluxSuggestions + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_query_suggestions_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/query/suggestions', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='FluxSuggestions', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_query_suggestions_async(self, **kwargs): # noqa: E501,D401,D403 + """List Flux query suggestions. + + Lists Flux query suggestions. Each suggestion contains a [Flux function](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) name and parameters. Use this endpoint to retrieve a list of Flux query suggestions used in the InfluxDB Flux Query Builder. #### Limitations - When writing a query, avoid using `_functionName()` helper functions exposed by this endpoint. Helper function names have an underscore (`_`) prefix and aren't meant to be used directly in queries--for example: - To sort on a column and keep the top n records, use the `top(n, columns=["_value"], tables=<-)` function instead of the `_sortLimit` helper function. `top` uses `_sortLimit`. #### Related Guides - [List of all Flux functions](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: FluxSuggestions + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_query_suggestions_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/query/suggestions', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='FluxSuggestions', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_query_suggestions_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('get_query_suggestions', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json', 'text/html']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_query_suggestions_name(self, name, **kwargs): # noqa: E501,D401,D403 + """Retrieve a query suggestion for a branching suggestion. + + Retrieves a query suggestion that contains the name and parameters of the requested function. Use this endpoint to pass a branching suggestion (a Flux function name) and retrieve the parameters of the requested function. #### Limitations - Use `/api/v2/query/suggestions/{name}` (without a trailing slash). `/api/v2/query/suggestions/{name}/` (note the trailing slash) results in a HTTP `301 Moved Permanently` status. - The function `name` must exist and must be spelled correctly. #### Related Guides - [List of all Flux functions](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_query_suggestions_name(name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str name: A [Flux function](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) name. (required) + :param str zap_trace_span: OpenTracing span context + :return: FluxSuggestion + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_query_suggestions_name_with_http_info(name, **kwargs) # noqa: E501 + else: + (data) = self.get_query_suggestions_name_with_http_info(name, **kwargs) # noqa: E501 + return data + + def get_query_suggestions_name_with_http_info(self, name, **kwargs): # noqa: E501,D401,D403 + """Retrieve a query suggestion for a branching suggestion. + + Retrieves a query suggestion that contains the name and parameters of the requested function. Use this endpoint to pass a branching suggestion (a Flux function name) and retrieve the parameters of the requested function. #### Limitations - Use `/api/v2/query/suggestions/{name}` (without a trailing slash). `/api/v2/query/suggestions/{name}/` (note the trailing slash) results in a HTTP `301 Moved Permanently` status. - The function `name` must exist and must be spelled correctly. #### Related Guides - [List of all Flux functions](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_query_suggestions_name_with_http_info(name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str name: A [Flux function](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) name. (required) + :param str zap_trace_span: OpenTracing span context + :return: FluxSuggestion + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_query_suggestions_name_prepare(name, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/query/suggestions/{name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='FluxSuggestion', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_query_suggestions_name_async(self, name, **kwargs): # noqa: E501,D401,D403 + """Retrieve a query suggestion for a branching suggestion. + + Retrieves a query suggestion that contains the name and parameters of the requested function. Use this endpoint to pass a branching suggestion (a Flux function name) and retrieve the parameters of the requested function. #### Limitations - Use `/api/v2/query/suggestions/{name}` (without a trailing slash). `/api/v2/query/suggestions/{name}/` (note the trailing slash) results in a HTTP `301 Moved Permanently` status. - The function `name` must exist and must be spelled correctly. #### Related Guides - [List of all Flux functions](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str name: A [Flux function](https://docs.influxdata.com/flux/v0.x/stdlib/all-functions/) name. (required) + :param str zap_trace_span: OpenTracing span context + :return: FluxSuggestion + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_query_suggestions_name_prepare(name, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/query/suggestions/{name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='FluxSuggestion', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_query_suggestions_name_prepare(self, name, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['name', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_query_suggestions_name', all_params, local_var_params) + # verify the required parameter 'name' is set + if ('name' not in local_var_params or + local_var_params['name'] is None): + raise ValueError("Missing the required parameter `name` when calling `get_query_suggestions_name`") # noqa: E501 + + path_params = {} + if 'name' in local_var_params: + path_params['name'] = local_var_params['name'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_query(self, **kwargs): # noqa: E501,D401,D403 + """Query data. + + Retrieves data from buckets. Use this endpoint to send a Flux query request and retrieve data from a bucket. #### Rate limits (with InfluxDB Cloud) `read` rate limits apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Query with the InfluxDB API](https://docs.influxdata.com/influxdb/latest/query-data/execute-queries/influx-api/) - [Get started with Flux](https://docs.influxdata.com/flux/v0.x/get-started/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_query(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str accept_encoding: The content encoding (usually a compression algorithm) that the client can understand. + :param str content_type: + :param str org: An organization name or ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Queries the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or `orgID` parameter. - Queries the bucket in the specified organization. + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Queries the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or `orgID` parameter. - Queries the bucket in the specified organization. + :param Query query: Flux query or specification to execute + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_query_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.post_query_with_http_info(**kwargs) # noqa: E501 + return data + + def post_query_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Query data. + + Retrieves data from buckets. Use this endpoint to send a Flux query request and retrieve data from a bucket. #### Rate limits (with InfluxDB Cloud) `read` rate limits apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Query with the InfluxDB API](https://docs.influxdata.com/influxdb/latest/query-data/execute-queries/influx-api/) - [Get started with Flux](https://docs.influxdata.com/flux/v0.x/get-started/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_query_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str accept_encoding: The content encoding (usually a compression algorithm) that the client can understand. + :param str content_type: + :param str org: An organization name or ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Queries the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or `orgID` parameter. - Queries the bucket in the specified organization. + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Queries the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or `orgID` parameter. - Queries the bucket in the specified organization. + :param Query query: Flux query or specification to execute + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_query_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/query', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='str', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_query_async(self, **kwargs): # noqa: E501,D401,D403 + """Query data. + + Retrieves data from buckets. Use this endpoint to send a Flux query request and retrieve data from a bucket. #### Rate limits (with InfluxDB Cloud) `read` rate limits apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Query with the InfluxDB API](https://docs.influxdata.com/influxdb/latest/query-data/execute-queries/influx-api/) - [Get started with Flux](https://docs.influxdata.com/flux/v0.x/get-started/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str accept_encoding: The content encoding (usually a compression algorithm) that the client can understand. + :param str content_type: + :param str org: An organization name or ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Queries the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or `orgID` parameter. - Queries the bucket in the specified organization. + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Queries the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or `orgID` parameter. - Queries the bucket in the specified organization. + :param Query query: Flux query or specification to execute + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_query_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/query', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='str', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_query_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'accept_encoding', 'content_type', 'org', 'org_id', 'query'] # noqa: E501 + self._check_operation_params('post_query', all_params, local_var_params) + + path_params = {} + + query_params = [] + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'accept_encoding' in local_var_params: + header_params['Accept-Encoding'] = local_var_params['accept_encoding'] # noqa: E501 + if 'content_type' in local_var_params: + header_params['Content-Type'] = local_var_params['content_type'] # noqa: E501 + + body_params = None + if 'query' in local_var_params: + body_params = local_var_params['query'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/csv', 'application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json', 'application/vnd.flux']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_query_analyze(self, **kwargs): # noqa: E501,D401,D403 + r"""Analyze a Flux query. + + Analyzes a [Flux query](https://docs.influxdata.com/flux/v0.x/) for syntax errors and returns the list of errors. In the following sample query, `from()` is missing the property key. ```json { "query": "from(: \\"iot_center\\")\\ |> range(start: -90d)\\ |> filter(fn: (r) => r._measurement == \\"environment\\")", "type": "flux" } ``` If you pass this in a request to the `/api/v2/analyze` endpoint, InfluxDB returns an `errors` list that contains an error object for the missing key. #### Limitations - The endpoint doesn't validate values in the query--for example: - The following sample query has correct syntax, but contains an incorrect `from()` property key: ```json { "query": "from(foo: \\"iot_center\\")\\ |> range(start: -90d)\\ |> filter(fn: (r) => r._measurement == \\"environment\\")", "type": "flux" } ``` If you pass this in a request to the `/api/v2/analyze` endpoint, InfluxDB returns an empty `errors` list. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_query_analyze(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str content_type: + :param Query query: Flux query to analyze + :return: AnalyzeQueryResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_query_analyze_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.post_query_analyze_with_http_info(**kwargs) # noqa: E501 + return data + + def post_query_analyze_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + r"""Analyze a Flux query. + + Analyzes a [Flux query](https://docs.influxdata.com/flux/v0.x/) for syntax errors and returns the list of errors. In the following sample query, `from()` is missing the property key. ```json { "query": "from(: \\"iot_center\\")\\ |> range(start: -90d)\\ |> filter(fn: (r) => r._measurement == \\"environment\\")", "type": "flux" } ``` If you pass this in a request to the `/api/v2/analyze` endpoint, InfluxDB returns an `errors` list that contains an error object for the missing key. #### Limitations - The endpoint doesn't validate values in the query--for example: - The following sample query has correct syntax, but contains an incorrect `from()` property key: ```json { "query": "from(foo: \\"iot_center\\")\\ |> range(start: -90d)\\ |> filter(fn: (r) => r._measurement == \\"environment\\")", "type": "flux" } ``` If you pass this in a request to the `/api/v2/analyze` endpoint, InfluxDB returns an empty `errors` list. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_query_analyze_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str content_type: + :param Query query: Flux query to analyze + :return: AnalyzeQueryResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_query_analyze_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/query/analyze', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='AnalyzeQueryResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_query_analyze_async(self, **kwargs): # noqa: E501,D401,D403 + r"""Analyze a Flux query. + + Analyzes a [Flux query](https://docs.influxdata.com/flux/v0.x/) for syntax errors and returns the list of errors. In the following sample query, `from()` is missing the property key. ```json { "query": "from(: \\"iot_center\\")\\ |> range(start: -90d)\\ |> filter(fn: (r) => r._measurement == \\"environment\\")", "type": "flux" } ``` If you pass this in a request to the `/api/v2/analyze` endpoint, InfluxDB returns an `errors` list that contains an error object for the missing key. #### Limitations - The endpoint doesn't validate values in the query--for example: - The following sample query has correct syntax, but contains an incorrect `from()` property key: ```json { "query": "from(foo: \\"iot_center\\")\\ |> range(start: -90d)\\ |> filter(fn: (r) => r._measurement == \\"environment\\")", "type": "flux" } ``` If you pass this in a request to the `/api/v2/analyze` endpoint, InfluxDB returns an empty `errors` list. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str content_type: + :param Query query: Flux query to analyze + :return: AnalyzeQueryResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_query_analyze_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/query/analyze', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='AnalyzeQueryResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_query_analyze_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'content_type', 'query'] # noqa: E501 + self._check_operation_params('post_query_analyze', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'content_type' in local_var_params: + header_params['Content-Type'] = local_var_params['content_type'] # noqa: E501 + + body_params = None + if 'query' in local_var_params: + body_params = local_var_params['query'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_query_ast(self, **kwargs): # noqa: E501,D401,D403 + r"""Generate a query Abstract Syntax Tree (AST). + + Analyzes a Flux query and returns a complete package source [Abstract Syntax Tree (AST)](https://docs.influxdata.com/influxdb/latest/reference/glossary/#abstract-syntax-tree-ast) for the query. Use this endpoint for deep query analysis such as debugging unexpected query results. A Flux query AST provides a semantic, tree-like representation with contextual information about the query. The AST illustrates how the query is distributed into different components for execution. #### Limitations - The endpoint doesn't validate values in the query--for example: The following sample Flux query has correct syntax, but contains an incorrect `from()` property key: ```js from(foo: "iot_center") |> range(start: -90d) |> filter(fn: (r) => r._measurement == "environment") ``` The following sample JSON shows how to pass the query in the request body: ```js from(foo: "iot_center") |> range(start: -90d) |> filter(fn: (r) => r._measurement == "environment") ``` The following code sample shows how to pass the query as JSON in the request body: ```json { "query": "from(foo: \\"iot_center\\")\\ |> range(start: -90d)\\ |> filter(fn: (r) => r._measurement == \\"environment\\")" } ``` Passing this to `/api/v2/query/ast` will return a successful response with a generated AST. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_query_ast(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str content_type: + :param LanguageRequest language_request: The Flux query to analyze. + :return: ASTResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_query_ast_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.post_query_ast_with_http_info(**kwargs) # noqa: E501 + return data + + def post_query_ast_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + r"""Generate a query Abstract Syntax Tree (AST). + + Analyzes a Flux query and returns a complete package source [Abstract Syntax Tree (AST)](https://docs.influxdata.com/influxdb/latest/reference/glossary/#abstract-syntax-tree-ast) for the query. Use this endpoint for deep query analysis such as debugging unexpected query results. A Flux query AST provides a semantic, tree-like representation with contextual information about the query. The AST illustrates how the query is distributed into different components for execution. #### Limitations - The endpoint doesn't validate values in the query--for example: The following sample Flux query has correct syntax, but contains an incorrect `from()` property key: ```js from(foo: "iot_center") |> range(start: -90d) |> filter(fn: (r) => r._measurement == "environment") ``` The following sample JSON shows how to pass the query in the request body: ```js from(foo: "iot_center") |> range(start: -90d) |> filter(fn: (r) => r._measurement == "environment") ``` The following code sample shows how to pass the query as JSON in the request body: ```json { "query": "from(foo: \\"iot_center\\")\\ |> range(start: -90d)\\ |> filter(fn: (r) => r._measurement == \\"environment\\")" } ``` Passing this to `/api/v2/query/ast` will return a successful response with a generated AST. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_query_ast_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str content_type: + :param LanguageRequest language_request: The Flux query to analyze. + :return: ASTResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_query_ast_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/query/ast', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ASTResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_query_ast_async(self, **kwargs): # noqa: E501,D401,D403 + r"""Generate a query Abstract Syntax Tree (AST). + + Analyzes a Flux query and returns a complete package source [Abstract Syntax Tree (AST)](https://docs.influxdata.com/influxdb/latest/reference/glossary/#abstract-syntax-tree-ast) for the query. Use this endpoint for deep query analysis such as debugging unexpected query results. A Flux query AST provides a semantic, tree-like representation with contextual information about the query. The AST illustrates how the query is distributed into different components for execution. #### Limitations - The endpoint doesn't validate values in the query--for example: The following sample Flux query has correct syntax, but contains an incorrect `from()` property key: ```js from(foo: "iot_center") |> range(start: -90d) |> filter(fn: (r) => r._measurement == "environment") ``` The following sample JSON shows how to pass the query in the request body: ```js from(foo: "iot_center") |> range(start: -90d) |> filter(fn: (r) => r._measurement == "environment") ``` The following code sample shows how to pass the query as JSON in the request body: ```json { "query": "from(foo: \\"iot_center\\")\\ |> range(start: -90d)\\ |> filter(fn: (r) => r._measurement == \\"environment\\")" } ``` Passing this to `/api/v2/query/ast` will return a successful response with a generated AST. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str content_type: + :param LanguageRequest language_request: The Flux query to analyze. + :return: ASTResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_query_ast_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/query/ast', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ASTResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_query_ast_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'content_type', 'language_request'] # noqa: E501 + self._check_operation_params('post_query_ast', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'content_type' in local_var_params: + header_params['Content-Type'] = local_var_params['content_type'] # noqa: E501 + + body_params = None + if 'language_request' in local_var_params: + body_params = local_var_params['language_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/ready_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/ready_service.py new file mode 100644 index 0000000..407435b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/ready_service.py @@ -0,0 +1,137 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class ReadyService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """ReadyService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_ready(self, **kwargs): # noqa: E501,D401,D403 + """Get the readiness of an instance at startup. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_ready(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: Ready + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_ready_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_ready_with_http_info(**kwargs) # noqa: E501 + return data + + def get_ready_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Get the readiness of an instance at startup. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_ready_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: Ready + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_ready_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/ready', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Ready', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_ready_async(self, **kwargs): # noqa: E501,D401,D403 + """Get the readiness of an instance at startup. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: Ready + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_ready_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/ready', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Ready', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_ready_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('get_ready', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/remote_connections_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/remote_connections_service.py new file mode 100644 index 0000000..7c59107 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/remote_connections_service.py @@ -0,0 +1,632 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class RemoteConnectionsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """RemoteConnectionsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_remote_connection_by_id(self, remote_id, **kwargs): # noqa: E501,D401,D403 + """Delete a remote connection. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_remote_connection_by_id(remote_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str remote_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_remote_connection_by_id_with_http_info(remote_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_remote_connection_by_id_with_http_info(remote_id, **kwargs) # noqa: E501 + return data + + def delete_remote_connection_by_id_with_http_info(self, remote_id, **kwargs): # noqa: E501,D401,D403 + """Delete a remote connection. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_remote_connection_by_id_with_http_info(remote_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str remote_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_remote_connection_by_id_prepare(remote_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/remotes/{remoteID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_remote_connection_by_id_async(self, remote_id, **kwargs): # noqa: E501,D401,D403 + """Delete a remote connection. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str remote_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_remote_connection_by_id_prepare(remote_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/remotes/{remoteID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_remote_connection_by_id_prepare(self, remote_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['remote_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_remote_connection_by_id', all_params, local_var_params) + # verify the required parameter 'remote_id' is set + if ('remote_id' not in local_var_params or + local_var_params['remote_id'] is None): + raise ValueError("Missing the required parameter `remote_id` when calling `delete_remote_connection_by_id`") # noqa: E501 + + path_params = {} + if 'remote_id' in local_var_params: + path_params['remoteID'] = local_var_params['remote_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_remote_connection_by_id(self, remote_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a remote connection. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_remote_connection_by_id(remote_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str remote_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: RemoteConnection + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_remote_connection_by_id_with_http_info(remote_id, **kwargs) # noqa: E501 + else: + (data) = self.get_remote_connection_by_id_with_http_info(remote_id, **kwargs) # noqa: E501 + return data + + def get_remote_connection_by_id_with_http_info(self, remote_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a remote connection. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_remote_connection_by_id_with_http_info(remote_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str remote_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: RemoteConnection + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_remote_connection_by_id_prepare(remote_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/remotes/{remoteID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='RemoteConnection', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_remote_connection_by_id_async(self, remote_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a remote connection. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str remote_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: RemoteConnection + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_remote_connection_by_id_prepare(remote_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/remotes/{remoteID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='RemoteConnection', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_remote_connection_by_id_prepare(self, remote_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['remote_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_remote_connection_by_id', all_params, local_var_params) + # verify the required parameter 'remote_id' is set + if ('remote_id' not in local_var_params or + local_var_params['remote_id'] is None): + raise ValueError("Missing the required parameter `remote_id` when calling `get_remote_connection_by_id`") # noqa: E501 + + path_params = {} + if 'remote_id' in local_var_params: + path_params['remoteID'] = local_var_params['remote_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_remote_connections(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all remote connections. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_remote_connections(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str name: + :param str remote_url: + :return: RemoteConnections + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_remote_connections_with_http_info(org_id, **kwargs) # noqa: E501 + else: + (data) = self.get_remote_connections_with_http_info(org_id, **kwargs) # noqa: E501 + return data + + def get_remote_connections_with_http_info(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all remote connections. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_remote_connections_with_http_info(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str name: + :param str remote_url: + :return: RemoteConnections + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_remote_connections_prepare(org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/remotes', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='RemoteConnections', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_remote_connections_async(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all remote connections. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str name: + :param str remote_url: + :return: RemoteConnections + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_remote_connections_prepare(org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/remotes', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='RemoteConnections', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_remote_connections_prepare(self, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'zap_trace_span', 'name', 'remote_url'] # noqa: E501 + self._check_operation_params('get_remote_connections', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `get_remote_connections`") # noqa: E501 + + path_params = {} + + query_params = [] + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'name' in local_var_params: + query_params.append(('name', local_var_params['name'])) # noqa: E501 + if 'remote_url' in local_var_params: + query_params.append(('remoteURL', local_var_params['remote_url'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_remote_connection_by_id(self, remote_id, remote_connection_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a remote connection. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_remote_connection_by_id(remote_id, remote_connection_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str remote_id: (required) + :param RemoteConnectionUpdateRequest remote_connection_update_request: (required) + :param str zap_trace_span: OpenTracing span context + :return: RemoteConnection + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_remote_connection_by_id_with_http_info(remote_id, remote_connection_update_request, **kwargs) # noqa: E501 + else: + (data) = self.patch_remote_connection_by_id_with_http_info(remote_id, remote_connection_update_request, **kwargs) # noqa: E501 + return data + + def patch_remote_connection_by_id_with_http_info(self, remote_id, remote_connection_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a remote connection. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_remote_connection_by_id_with_http_info(remote_id, remote_connection_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str remote_id: (required) + :param RemoteConnectionUpdateRequest remote_connection_update_request: (required) + :param str zap_trace_span: OpenTracing span context + :return: RemoteConnection + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_remote_connection_by_id_prepare(remote_id, remote_connection_update_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/remotes/{remoteID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='RemoteConnection', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_remote_connection_by_id_async(self, remote_id, remote_connection_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a remote connection. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str remote_id: (required) + :param RemoteConnectionUpdateRequest remote_connection_update_request: (required) + :param str zap_trace_span: OpenTracing span context + :return: RemoteConnection + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_remote_connection_by_id_prepare(remote_id, remote_connection_update_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/remotes/{remoteID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='RemoteConnection', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_remote_connection_by_id_prepare(self, remote_id, remote_connection_update_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['remote_id', 'remote_connection_update_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_remote_connection_by_id', all_params, local_var_params) + # verify the required parameter 'remote_id' is set + if ('remote_id' not in local_var_params or + local_var_params['remote_id'] is None): + raise ValueError("Missing the required parameter `remote_id` when calling `patch_remote_connection_by_id`") # noqa: E501 + # verify the required parameter 'remote_connection_update_request' is set + if ('remote_connection_update_request' not in local_var_params or + local_var_params['remote_connection_update_request'] is None): + raise ValueError("Missing the required parameter `remote_connection_update_request` when calling `patch_remote_connection_by_id`") # noqa: E501 + + path_params = {} + if 'remote_id' in local_var_params: + path_params['remoteID'] = local_var_params['remote_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'remote_connection_update_request' in local_var_params: + body_params = local_var_params['remote_connection_update_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_remote_connection(self, remote_connection_creation_request, **kwargs): # noqa: E501,D401,D403 + """Register a new remote connection. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_remote_connection(remote_connection_creation_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param RemoteConnectionCreationRequest remote_connection_creation_request: (required) + :return: RemoteConnection + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_remote_connection_with_http_info(remote_connection_creation_request, **kwargs) # noqa: E501 + else: + (data) = self.post_remote_connection_with_http_info(remote_connection_creation_request, **kwargs) # noqa: E501 + return data + + def post_remote_connection_with_http_info(self, remote_connection_creation_request, **kwargs): # noqa: E501,D401,D403 + """Register a new remote connection. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_remote_connection_with_http_info(remote_connection_creation_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param RemoteConnectionCreationRequest remote_connection_creation_request: (required) + :return: RemoteConnection + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_remote_connection_prepare(remote_connection_creation_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/remotes', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='RemoteConnection', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_remote_connection_async(self, remote_connection_creation_request, **kwargs): # noqa: E501,D401,D403 + """Register a new remote connection. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param RemoteConnectionCreationRequest remote_connection_creation_request: (required) + :return: RemoteConnection + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_remote_connection_prepare(remote_connection_creation_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/remotes', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='RemoteConnection', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_remote_connection_prepare(self, remote_connection_creation_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['remote_connection_creation_request'] # noqa: E501 + self._check_operation_params('post_remote_connection', all_params, local_var_params) + # verify the required parameter 'remote_connection_creation_request' is set + if ('remote_connection_creation_request' not in local_var_params or + local_var_params['remote_connection_creation_request'] is None): + raise ValueError("Missing the required parameter `remote_connection_creation_request` when calling `post_remote_connection`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + + body_params = None + if 'remote_connection_creation_request' in local_var_params: + body_params = local_var_params['remote_connection_creation_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/replications_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/replications_service.py new file mode 100644 index 0000000..2130490 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/replications_service.py @@ -0,0 +1,768 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class ReplicationsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """ReplicationsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_replication_by_id(self, replication_id, **kwargs): # noqa: E501,D401,D403 + """Delete a replication. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_replication_by_id(replication_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str replication_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_replication_by_id_with_http_info(replication_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_replication_by_id_with_http_info(replication_id, **kwargs) # noqa: E501 + return data + + def delete_replication_by_id_with_http_info(self, replication_id, **kwargs): # noqa: E501,D401,D403 + """Delete a replication. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_replication_by_id_with_http_info(replication_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str replication_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_replication_by_id_prepare(replication_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/replications/{replicationID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_replication_by_id_async(self, replication_id, **kwargs): # noqa: E501,D401,D403 + """Delete a replication. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str replication_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_replication_by_id_prepare(replication_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/replications/{replicationID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_replication_by_id_prepare(self, replication_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['replication_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_replication_by_id', all_params, local_var_params) + # verify the required parameter 'replication_id' is set + if ('replication_id' not in local_var_params or + local_var_params['replication_id'] is None): + raise ValueError("Missing the required parameter `replication_id` when calling `delete_replication_by_id`") # noqa: E501 + + path_params = {} + if 'replication_id' in local_var_params: + path_params['replicationID'] = local_var_params['replication_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_replication_by_id(self, replication_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a replication. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_replication_by_id(replication_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str replication_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: Replication + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_replication_by_id_with_http_info(replication_id, **kwargs) # noqa: E501 + else: + (data) = self.get_replication_by_id_with_http_info(replication_id, **kwargs) # noqa: E501 + return data + + def get_replication_by_id_with_http_info(self, replication_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a replication. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_replication_by_id_with_http_info(replication_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str replication_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: Replication + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_replication_by_id_prepare(replication_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/replications/{replicationID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Replication', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_replication_by_id_async(self, replication_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a replication. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str replication_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: Replication + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_replication_by_id_prepare(replication_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/replications/{replicationID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Replication', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_replication_by_id_prepare(self, replication_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['replication_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_replication_by_id', all_params, local_var_params) + # verify the required parameter 'replication_id' is set + if ('replication_id' not in local_var_params or + local_var_params['replication_id'] is None): + raise ValueError("Missing the required parameter `replication_id` when calling `get_replication_by_id`") # noqa: E501 + + path_params = {} + if 'replication_id' in local_var_params: + path_params['replicationID'] = local_var_params['replication_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_replications(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all replications. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_replications(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str name: + :param str remote_id: + :param str local_bucket_id: + :return: Replications + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_replications_with_http_info(org_id, **kwargs) # noqa: E501 + else: + (data) = self.get_replications_with_http_info(org_id, **kwargs) # noqa: E501 + return data + + def get_replications_with_http_info(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all replications. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_replications_with_http_info(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str name: + :param str remote_id: + :param str local_bucket_id: + :return: Replications + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_replications_prepare(org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/replications', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Replications', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_replications_async(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all replications. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str name: + :param str remote_id: + :param str local_bucket_id: + :return: Replications + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_replications_prepare(org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/replications', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Replications', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_replications_prepare(self, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'zap_trace_span', 'name', 'remote_id', 'local_bucket_id'] # noqa: E501 + self._check_operation_params('get_replications', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `get_replications`") # noqa: E501 + + path_params = {} + + query_params = [] + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'name' in local_var_params: + query_params.append(('name', local_var_params['name'])) # noqa: E501 + if 'remote_id' in local_var_params: + query_params.append(('remoteID', local_var_params['remote_id'])) # noqa: E501 + if 'local_bucket_id' in local_var_params: + query_params.append(('localBucketID', local_var_params['local_bucket_id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_replication_by_id(self, replication_id, replication_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a replication. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_replication_by_id(replication_id, replication_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str replication_id: (required) + :param ReplicationUpdateRequest replication_update_request: (required) + :param str zap_trace_span: OpenTracing span context + :param bool validate: If true, validate the updated information, but don't save it. + :return: Replication + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_replication_by_id_with_http_info(replication_id, replication_update_request, **kwargs) # noqa: E501 + else: + (data) = self.patch_replication_by_id_with_http_info(replication_id, replication_update_request, **kwargs) # noqa: E501 + return data + + def patch_replication_by_id_with_http_info(self, replication_id, replication_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a replication. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_replication_by_id_with_http_info(replication_id, replication_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str replication_id: (required) + :param ReplicationUpdateRequest replication_update_request: (required) + :param str zap_trace_span: OpenTracing span context + :param bool validate: If true, validate the updated information, but don't save it. + :return: Replication + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_replication_by_id_prepare(replication_id, replication_update_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/replications/{replicationID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Replication', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_replication_by_id_async(self, replication_id, replication_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a replication. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str replication_id: (required) + :param ReplicationUpdateRequest replication_update_request: (required) + :param str zap_trace_span: OpenTracing span context + :param bool validate: If true, validate the updated information, but don't save it. + :return: Replication + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_replication_by_id_prepare(replication_id, replication_update_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/replications/{replicationID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Replication', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_replication_by_id_prepare(self, replication_id, replication_update_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['replication_id', 'replication_update_request', 'zap_trace_span', 'validate'] # noqa: E501 + self._check_operation_params('patch_replication_by_id', all_params, local_var_params) + # verify the required parameter 'replication_id' is set + if ('replication_id' not in local_var_params or + local_var_params['replication_id'] is None): + raise ValueError("Missing the required parameter `replication_id` when calling `patch_replication_by_id`") # noqa: E501 + # verify the required parameter 'replication_update_request' is set + if ('replication_update_request' not in local_var_params or + local_var_params['replication_update_request'] is None): + raise ValueError("Missing the required parameter `replication_update_request` when calling `patch_replication_by_id`") # noqa: E501 + + path_params = {} + if 'replication_id' in local_var_params: + path_params['replicationID'] = local_var_params['replication_id'] # noqa: E501 + + query_params = [] + if 'validate' in local_var_params: + query_params.append(('validate', local_var_params['validate'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'replication_update_request' in local_var_params: + body_params = local_var_params['replication_update_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_replication(self, replication_creation_request, **kwargs): # noqa: E501,D401,D403 + """Register a new replication. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_replication(replication_creation_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param ReplicationCreationRequest replication_creation_request: (required) + :param str zap_trace_span: OpenTracing span context + :param bool validate: If true, validate the replication, but don't save it. + :return: Replication + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_replication_with_http_info(replication_creation_request, **kwargs) # noqa: E501 + else: + (data) = self.post_replication_with_http_info(replication_creation_request, **kwargs) # noqa: E501 + return data + + def post_replication_with_http_info(self, replication_creation_request, **kwargs): # noqa: E501,D401,D403 + """Register a new replication. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_replication_with_http_info(replication_creation_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param ReplicationCreationRequest replication_creation_request: (required) + :param str zap_trace_span: OpenTracing span context + :param bool validate: If true, validate the replication, but don't save it. + :return: Replication + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_replication_prepare(replication_creation_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/replications', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Replication', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_replication_async(self, replication_creation_request, **kwargs): # noqa: E501,D401,D403 + """Register a new replication. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param ReplicationCreationRequest replication_creation_request: (required) + :param str zap_trace_span: OpenTracing span context + :param bool validate: If true, validate the replication, but don't save it. + :return: Replication + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_replication_prepare(replication_creation_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/replications', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Replication', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_replication_prepare(self, replication_creation_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['replication_creation_request', 'zap_trace_span', 'validate'] # noqa: E501 + self._check_operation_params('post_replication', all_params, local_var_params) + # verify the required parameter 'replication_creation_request' is set + if ('replication_creation_request' not in local_var_params or + local_var_params['replication_creation_request'] is None): + raise ValueError("Missing the required parameter `replication_creation_request` when calling `post_replication`") # noqa: E501 + + path_params = {} + + query_params = [] + if 'validate' in local_var_params: + query_params.append(('validate', local_var_params['validate'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'replication_creation_request' in local_var_params: + body_params = local_var_params['replication_creation_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_validate_replication_by_id(self, replication_id, **kwargs): # noqa: E501,D401,D403 + """Validate a replication. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_validate_replication_by_id(replication_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str replication_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_validate_replication_by_id_with_http_info(replication_id, **kwargs) # noqa: E501 + else: + (data) = self.post_validate_replication_by_id_with_http_info(replication_id, **kwargs) # noqa: E501 + return data + + def post_validate_replication_by_id_with_http_info(self, replication_id, **kwargs): # noqa: E501,D401,D403 + """Validate a replication. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_validate_replication_by_id_with_http_info(replication_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str replication_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_validate_replication_by_id_prepare(replication_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/replications/{replicationID}/validate', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_validate_replication_by_id_async(self, replication_id, **kwargs): # noqa: E501,D401,D403 + """Validate a replication. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str replication_id: (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_validate_replication_by_id_prepare(replication_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/replications/{replicationID}/validate', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_validate_replication_by_id_prepare(self, replication_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['replication_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_validate_replication_by_id', all_params, local_var_params) + # verify the required parameter 'replication_id' is set + if ('replication_id' not in local_var_params or + local_var_params['replication_id'] is None): + raise ValueError("Missing the required parameter `replication_id` when calling `post_validate_replication_by_id`") # noqa: E501 + + path_params = {} + if 'replication_id' in local_var_params: + path_params['replicationID'] = local_var_params['replication_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/resources_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/resources_service.py new file mode 100644 index 0000000..d86cfe3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/resources_service.py @@ -0,0 +1,137 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class ResourcesService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """ResourcesService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_resources(self, **kwargs): # noqa: E501,D401,D403 + """List all known resources. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_resources(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: list[str] + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_resources_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_resources_with_http_info(**kwargs) # noqa: E501 + return data + + def get_resources_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List all known resources. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_resources_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: list[str] + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_resources_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/resources', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='list[str]', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_resources_async(self, **kwargs): # noqa: E501,D401,D403 + """List all known resources. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: list[str] + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_resources_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/resources', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='list[str]', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_resources_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('get_resources', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/restore_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/restore_service.py new file mode 100644 index 0000000..6275ed0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/restore_service.py @@ -0,0 +1,683 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class RestoreService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """RestoreService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def post_restore_bucket_id(self, bucket_id, body, **kwargs): # noqa: E501,D401,D403 + """Overwrite storage metadata for a bucket with shard info from a backup.. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_restore_bucket_id(bucket_id, body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The bucket ID. (required) + :param str body: Database info serialized as protobuf. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_type: + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_restore_bucket_id_with_http_info(bucket_id, body, **kwargs) # noqa: E501 + else: + (data) = self.post_restore_bucket_id_with_http_info(bucket_id, body, **kwargs) # noqa: E501 + return data + + def post_restore_bucket_id_with_http_info(self, bucket_id, body, **kwargs): # noqa: E501,D401,D403 + """Overwrite storage metadata for a bucket with shard info from a backup.. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_restore_bucket_id_with_http_info(bucket_id, body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str bucket_id: The bucket ID. (required) + :param str body: Database info serialized as protobuf. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_type: + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_restore_bucket_id_prepare(bucket_id, body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/restore/bucket/{bucketID}', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='str', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_restore_bucket_id_async(self, bucket_id, body, **kwargs): # noqa: E501,D401,D403 + """Overwrite storage metadata for a bucket with shard info from a backup.. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str bucket_id: The bucket ID. (required) + :param str body: Database info serialized as protobuf. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_type: + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_restore_bucket_id_prepare(bucket_id, body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/restore/bucket/{bucketID}', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='str', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_restore_bucket_id_prepare(self, bucket_id, body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_id', 'body', 'zap_trace_span', 'content_type'] # noqa: E501 + self._check_operation_params('post_restore_bucket_id', all_params, local_var_params) + # verify the required parameter 'bucket_id' is set + if ('bucket_id' not in local_var_params or + local_var_params['bucket_id'] is None): + raise ValueError("Missing the required parameter `bucket_id` when calling `post_restore_bucket_id`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in local_var_params or + local_var_params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `post_restore_bucket_id`") # noqa: E501 + + path_params = {} + if 'bucket_id' in local_var_params: + path_params['bucketID'] = local_var_params['bucket_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'content_type' in local_var_params: + header_params['Content-Type'] = local_var_params['content_type'] # noqa: E501 + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['text/plain']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_restore_bucket_metadata(self, bucket_metadata_manifest, **kwargs): # noqa: E501,D401,D403 + """Create a new bucket pre-seeded with shard info from a backup.. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_restore_bucket_metadata(bucket_metadata_manifest, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param BucketMetadataManifest bucket_metadata_manifest: Metadata manifest for a bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: RestoredBucketMappings + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_restore_bucket_metadata_with_http_info(bucket_metadata_manifest, **kwargs) # noqa: E501 + else: + (data) = self.post_restore_bucket_metadata_with_http_info(bucket_metadata_manifest, **kwargs) # noqa: E501 + return data + + def post_restore_bucket_metadata_with_http_info(self, bucket_metadata_manifest, **kwargs): # noqa: E501,D401,D403 + """Create a new bucket pre-seeded with shard info from a backup.. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_restore_bucket_metadata_with_http_info(bucket_metadata_manifest, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param BucketMetadataManifest bucket_metadata_manifest: Metadata manifest for a bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: RestoredBucketMappings + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_restore_bucket_metadata_prepare(bucket_metadata_manifest, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/restore/bucketMetadata', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='RestoredBucketMappings', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_restore_bucket_metadata_async(self, bucket_metadata_manifest, **kwargs): # noqa: E501,D401,D403 + """Create a new bucket pre-seeded with shard info from a backup.. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param BucketMetadataManifest bucket_metadata_manifest: Metadata manifest for a bucket. (required) + :param str zap_trace_span: OpenTracing span context + :return: RestoredBucketMappings + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_restore_bucket_metadata_prepare(bucket_metadata_manifest, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/restore/bucketMetadata', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='RestoredBucketMappings', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_restore_bucket_metadata_prepare(self, bucket_metadata_manifest, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['bucket_metadata_manifest', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_restore_bucket_metadata', all_params, local_var_params) + # verify the required parameter 'bucket_metadata_manifest' is set + if ('bucket_metadata_manifest' not in local_var_params or + local_var_params['bucket_metadata_manifest'] is None): + raise ValueError("Missing the required parameter `bucket_metadata_manifest` when calling `post_restore_bucket_metadata`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'bucket_metadata_manifest' in local_var_params: + body_params = local_var_params['bucket_metadata_manifest'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_restore_kv(self, body, **kwargs): # noqa: E501,D401,D403 + """Overwrite the embedded KV store on the server with a backed-up snapshot.. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_restore_kv(body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param file body: Full KV snapshot. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The value tells InfluxDB what compression is applied to the line protocol in the request payload. To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + :param str content_type: + :return: PostRestoreKVResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_restore_kv_with_http_info(body, **kwargs) # noqa: E501 + else: + (data) = self.post_restore_kv_with_http_info(body, **kwargs) # noqa: E501 + return data + + def post_restore_kv_with_http_info(self, body, **kwargs): # noqa: E501,D401,D403 + """Overwrite the embedded KV store on the server with a backed-up snapshot.. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_restore_kv_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param file body: Full KV snapshot. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The value tells InfluxDB what compression is applied to the line protocol in the request payload. To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + :param str content_type: + :return: PostRestoreKVResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_restore_kv_prepare(body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/restore/kv', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='PostRestoreKVResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_restore_kv_async(self, body, **kwargs): # noqa: E501,D401,D403 + """Overwrite the embedded KV store on the server with a backed-up snapshot.. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param file body: Full KV snapshot. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The value tells InfluxDB what compression is applied to the line protocol in the request payload. To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + :param str content_type: + :return: PostRestoreKVResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_restore_kv_prepare(body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/restore/kv', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='PostRestoreKVResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_restore_kv_prepare(self, body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['body', 'zap_trace_span', 'content_encoding', 'content_type'] # noqa: E501 + self._check_operation_params('post_restore_kv', all_params, local_var_params) + # verify the required parameter 'body' is set + if ('body' not in local_var_params or + local_var_params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `post_restore_kv`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'content_encoding' in local_var_params: + header_params['Content-Encoding'] = local_var_params['content_encoding'] # noqa: E501 + if 'content_type' in local_var_params: + header_params['Content-Type'] = local_var_params['content_type'] # noqa: E501 + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['text/plain']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_restore_shard_id(self, shard_id, body, **kwargs): # noqa: E501,D401,D403 + """Restore a TSM snapshot into a shard.. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_restore_shard_id(shard_id, body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str shard_id: The shard ID. (required) + :param file body: TSM snapshot. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The value tells InfluxDB what compression is applied to the line protocol in the request payload. To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + :param str content_type: + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_restore_shard_id_with_http_info(shard_id, body, **kwargs) # noqa: E501 + else: + (data) = self.post_restore_shard_id_with_http_info(shard_id, body, **kwargs) # noqa: E501 + return data + + def post_restore_shard_id_with_http_info(self, shard_id, body, **kwargs): # noqa: E501,D401,D403 + """Restore a TSM snapshot into a shard.. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_restore_shard_id_with_http_info(shard_id, body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str shard_id: The shard ID. (required) + :param file body: TSM snapshot. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The value tells InfluxDB what compression is applied to the line protocol in the request payload. To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + :param str content_type: + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_restore_shard_id_prepare(shard_id, body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/restore/shards/{shardID}', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_restore_shard_id_async(self, shard_id, body, **kwargs): # noqa: E501,D401,D403 + """Restore a TSM snapshot into a shard.. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str shard_id: The shard ID. (required) + :param file body: TSM snapshot. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The value tells InfluxDB what compression is applied to the line protocol in the request payload. To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + :param str content_type: + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_restore_shard_id_prepare(shard_id, body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/restore/shards/{shardID}', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_restore_shard_id_prepare(self, shard_id, body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['shard_id', 'body', 'zap_trace_span', 'content_encoding', 'content_type'] # noqa: E501 + self._check_operation_params('post_restore_shard_id', all_params, local_var_params) + # verify the required parameter 'shard_id' is set + if ('shard_id' not in local_var_params or + local_var_params['shard_id'] is None): + raise ValueError("Missing the required parameter `shard_id` when calling `post_restore_shard_id`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in local_var_params or + local_var_params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `post_restore_shard_id`") # noqa: E501 + + path_params = {} + if 'shard_id' in local_var_params: + path_params['shardID'] = local_var_params['shard_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'content_encoding' in local_var_params: + header_params['Content-Encoding'] = local_var_params['content_encoding'] # noqa: E501 + if 'content_type' in local_var_params: + header_params['Content-Type'] = local_var_params['content_type'] # noqa: E501 + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['text/plain']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_restore_sql(self, body, **kwargs): # noqa: E501,D401,D403 + """Overwrite the embedded SQL store on the server with a backed-up snapshot.. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_restore_sql(body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param file body: Full SQL snapshot. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The value tells InfluxDB what compression is applied to the line protocol in the request payload. To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + :param str content_type: + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_restore_sql_with_http_info(body, **kwargs) # noqa: E501 + else: + (data) = self.post_restore_sql_with_http_info(body, **kwargs) # noqa: E501 + return data + + def post_restore_sql_with_http_info(self, body, **kwargs): # noqa: E501,D401,D403 + """Overwrite the embedded SQL store on the server with a backed-up snapshot.. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_restore_sql_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param file body: Full SQL snapshot. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The value tells InfluxDB what compression is applied to the line protocol in the request payload. To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + :param str content_type: + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_restore_sql_prepare(body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/restore/sql', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_restore_sql_async(self, body, **kwargs): # noqa: E501,D401,D403 + """Overwrite the embedded SQL store on the server with a backed-up snapshot.. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param file body: Full SQL snapshot. (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The value tells InfluxDB what compression is applied to the line protocol in the request payload. To make an API request with a GZIP payload, send `Content-Encoding: gzip` as a request header. + :param str content_type: + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_restore_sql_prepare(body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/restore/sql', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_restore_sql_prepare(self, body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['body', 'zap_trace_span', 'content_encoding', 'content_type'] # noqa: E501 + self._check_operation_params('post_restore_sql', all_params, local_var_params) + # verify the required parameter 'body' is set + if ('body' not in local_var_params or + local_var_params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `post_restore_sql`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'content_encoding' in local_var_params: + header_params['Content-Encoding'] = local_var_params['content_encoding'] # noqa: E501 + if 'content_type' in local_var_params: + header_params['Content-Type'] = local_var_params['content_type'] # noqa: E501 + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['text/plain']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/routes_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/routes_service.py new file mode 100644 index 0000000..8b79c4f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/routes_service.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class RoutesService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """RoutesService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_routes(self, **kwargs): # noqa: E501,D401,D403 + """List all top level routes. + + Retrieves all the top level routes for the InfluxDB API. #### Limitations - Only returns top level routes--for example, the response contains `"tasks":"/api/v2/tasks"`, and doesn't contain resource-specific routes for tasks (`/api/v2/tasks/TASK_ID/...`). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_routes(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: Routes + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_routes_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_routes_with_http_info(**kwargs) # noqa: E501 + return data + + def get_routes_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List all top level routes. + + Retrieves all the top level routes for the InfluxDB API. #### Limitations - Only returns top level routes--for example, the response contains `"tasks":"/api/v2/tasks"`, and doesn't contain resource-specific routes for tasks (`/api/v2/tasks/TASK_ID/...`). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_routes_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: Routes + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_routes_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Routes', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_routes_async(self, **kwargs): # noqa: E501,D401,D403 + """List all top level routes. + + Retrieves all the top level routes for the InfluxDB API. #### Limitations - Only returns top level routes--for example, the response contains `"tasks":"/api/v2/tasks"`, and doesn't contain resource-specific routes for tasks (`/api/v2/tasks/TASK_ID/...`). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: Routes + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_routes_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Routes', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_routes_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('get_routes', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/rules_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/rules_service.py new file mode 100644 index 0000000..38a74f4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/rules_service.py @@ -0,0 +1,146 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class RulesService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """RulesService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_notification_rules_id_query(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a notification rule query. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_rules_id_query(rule_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: FluxResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_notification_rules_id_query_with_http_info(rule_id, **kwargs) # noqa: E501 + else: + (data) = self.get_notification_rules_id_query_with_http_info(rule_id, **kwargs) # noqa: E501 + return data + + def get_notification_rules_id_query_with_http_info(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a notification rule query. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_notification_rules_id_query_with_http_info(rule_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: FluxResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_rules_id_query_prepare(rule_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}/query', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='FluxResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_notification_rules_id_query_async(self, rule_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a notification rule query. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str rule_id: The notification rule ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: FluxResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_notification_rules_id_query_prepare(rule_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/notificationRules/{ruleID}/query', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='FluxResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_notification_rules_id_query_prepare(self, rule_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['rule_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_notification_rules_id_query', all_params, local_var_params) + # verify the required parameter 'rule_id' is set + if ('rule_id' not in local_var_params or + local_var_params['rule_id'] is None): + raise ValueError("Missing the required parameter `rule_id` when calling `get_notification_rules_id_query`") # noqa: E501 + + path_params = {} + if 'rule_id' in local_var_params: + path_params['ruleID'] = local_var_params['rule_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/scraper_targets_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/scraper_targets_service.py new file mode 100644 index 0000000..144a393 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/scraper_targets_service.py @@ -0,0 +1,1748 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class ScraperTargetsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """ScraperTargetsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_scrapers_id(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Delete a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_scrapers_id(scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The identifier of the scraper target. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_scrapers_id_with_http_info(scraper_target_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_scrapers_id_with_http_info(scraper_target_id, **kwargs) # noqa: E501 + return data + + def delete_scrapers_id_with_http_info(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Delete a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_scrapers_id_with_http_info(scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The identifier of the scraper target. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_scrapers_id_prepare(scraper_target_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_scrapers_id_async(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Delete a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str scraper_target_id: The identifier of the scraper target. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_scrapers_id_prepare(scraper_target_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_scrapers_id_prepare(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['scraper_target_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_scrapers_id', all_params, local_var_params) + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `delete_scrapers_id`") # noqa: E501 + + path_params = {} + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_scrapers_id_labels_id(self, scraper_target_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_scrapers_id_labels_id(scraper_target_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str label_id: The label ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_scrapers_id_labels_id_with_http_info(scraper_target_id, label_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_scrapers_id_labels_id_with_http_info(scraper_target_id, label_id, **kwargs) # noqa: E501 + return data + + def delete_scrapers_id_labels_id_with_http_info(self, scraper_target_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_scrapers_id_labels_id_with_http_info(scraper_target_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str label_id: The label ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_scrapers_id_labels_id_prepare(scraper_target_id, label_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_scrapers_id_labels_id_async(self, scraper_target_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str label_id: The label ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_scrapers_id_labels_id_prepare(scraper_target_id, label_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_scrapers_id_labels_id_prepare(self, scraper_target_id, label_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['scraper_target_id', 'label_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_scrapers_id_labels_id', all_params, local_var_params) + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `delete_scrapers_id_labels_id`") # noqa: E501 + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `delete_scrapers_id_labels_id`") # noqa: E501 + + path_params = {} + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_scrapers_id_members_id(self, user_id, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_scrapers_id_members_id(user_id, scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of member to remove. (required) + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_scrapers_id_members_id_with_http_info(user_id, scraper_target_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_scrapers_id_members_id_with_http_info(user_id, scraper_target_id, **kwargs) # noqa: E501 + return data + + def delete_scrapers_id_members_id_with_http_info(self, user_id, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_scrapers_id_members_id_with_http_info(user_id, scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of member to remove. (required) + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_scrapers_id_members_id_prepare(user_id, scraper_target_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_scrapers_id_members_id_async(self, user_id, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of member to remove. (required) + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_scrapers_id_members_id_prepare(user_id, scraper_target_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_scrapers_id_members_id_prepare(self, user_id, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'scraper_target_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_scrapers_id_members_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_scrapers_id_members_id`") # noqa: E501 + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `delete_scrapers_id_members_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_scrapers_id_owners_id(self, user_id, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_scrapers_id_owners_id(user_id, scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of owner to remove. (required) + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_scrapers_id_owners_id_with_http_info(user_id, scraper_target_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_scrapers_id_owners_id_with_http_info(user_id, scraper_target_id, **kwargs) # noqa: E501 + return data + + def delete_scrapers_id_owners_id_with_http_info(self, user_id, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_scrapers_id_owners_id_with_http_info(user_id, scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of owner to remove. (required) + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_scrapers_id_owners_id_prepare(user_id, scraper_target_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_scrapers_id_owners_id_async(self, user_id, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of owner to remove. (required) + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_scrapers_id_owners_id_prepare(user_id, scraper_target_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_scrapers_id_owners_id_prepare(self, user_id, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'scraper_target_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_scrapers_id_owners_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_scrapers_id_owners_id`") # noqa: E501 + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `delete_scrapers_id_owners_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_scrapers(self, **kwargs): # noqa: E501,D401,D403 + """List all scraper targets. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scrapers(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str name: Specifies the name of the scraper target. + :param list[str] id: List of scraper target IDs to return. If both `id` and `owner` are specified, only `id` is used. + :param str org_id: Specifies the organization ID of the scraper target. + :param str org: Specifies the organization name of the scraper target. + :return: ScraperTargetResponses + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_scrapers_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_scrapers_with_http_info(**kwargs) # noqa: E501 + return data + + def get_scrapers_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List all scraper targets. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scrapers_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str name: Specifies the name of the scraper target. + :param list[str] id: List of scraper target IDs to return. If both `id` and `owner` are specified, only `id` is used. + :param str org_id: Specifies the organization ID of the scraper target. + :param str org: Specifies the organization name of the scraper target. + :return: ScraperTargetResponses + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scrapers_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ScraperTargetResponses', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_scrapers_async(self, **kwargs): # noqa: E501,D401,D403 + """List all scraper targets. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str name: Specifies the name of the scraper target. + :param list[str] id: List of scraper target IDs to return. If both `id` and `owner` are specified, only `id` is used. + :param str org_id: Specifies the organization ID of the scraper target. + :param str org: Specifies the organization name of the scraper target. + :return: ScraperTargetResponses + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scrapers_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ScraperTargetResponses', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_scrapers_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'name', 'id', 'org_id', 'org'] # noqa: E501 + self._check_operation_params('get_scrapers', all_params, local_var_params) + + path_params = {} + + query_params = [] + if 'name' in local_var_params: + query_params.append(('name', local_var_params['name'])) # noqa: E501 + if 'id' in local_var_params: + query_params.append(('id', local_var_params['id'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_scrapers_id(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scrapers_id(scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The identifier of the scraper target. (required) + :param str zap_trace_span: OpenTracing span context + :return: ScraperTargetResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_scrapers_id_with_http_info(scraper_target_id, **kwargs) # noqa: E501 + else: + (data) = self.get_scrapers_id_with_http_info(scraper_target_id, **kwargs) # noqa: E501 + return data + + def get_scrapers_id_with_http_info(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scrapers_id_with_http_info(scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The identifier of the scraper target. (required) + :param str zap_trace_span: OpenTracing span context + :return: ScraperTargetResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scrapers_id_prepare(scraper_target_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ScraperTargetResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_scrapers_id_async(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str scraper_target_id: The identifier of the scraper target. (required) + :param str zap_trace_span: OpenTracing span context + :return: ScraperTargetResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scrapers_id_prepare(scraper_target_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ScraperTargetResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_scrapers_id_prepare(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['scraper_target_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_scrapers_id', all_params, local_var_params) + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `get_scrapers_id`") # noqa: E501 + + path_params = {} + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_scrapers_id_labels(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scrapers_id_labels(scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_scrapers_id_labels_with_http_info(scraper_target_id, **kwargs) # noqa: E501 + else: + (data) = self.get_scrapers_id_labels_with_http_info(scraper_target_id, **kwargs) # noqa: E501 + return data + + def get_scrapers_id_labels_with_http_info(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scrapers_id_labels_with_http_info(scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scrapers_id_labels_prepare(scraper_target_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_scrapers_id_labels_async(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scrapers_id_labels_prepare(scraper_target_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_scrapers_id_labels_prepare(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['scraper_target_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_scrapers_id_labels', all_params, local_var_params) + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `get_scrapers_id_labels`") # noqa: E501 + + path_params = {} + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_scrapers_id_members(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """List all users with member privileges for a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scrapers_id_members(scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_scrapers_id_members_with_http_info(scraper_target_id, **kwargs) # noqa: E501 + else: + (data) = self.get_scrapers_id_members_with_http_info(scraper_target_id, **kwargs) # noqa: E501 + return data + + def get_scrapers_id_members_with_http_info(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """List all users with member privileges for a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scrapers_id_members_with_http_info(scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scrapers_id_members_prepare(scraper_target_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_scrapers_id_members_async(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """List all users with member privileges for a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scrapers_id_members_prepare(scraper_target_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_scrapers_id_members_prepare(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['scraper_target_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_scrapers_id_members', all_params, local_var_params) + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `get_scrapers_id_members`") # noqa: E501 + + path_params = {} + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_scrapers_id_owners(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scrapers_id_owners(scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_scrapers_id_owners_with_http_info(scraper_target_id, **kwargs) # noqa: E501 + else: + (data) = self.get_scrapers_id_owners_with_http_info(scraper_target_id, **kwargs) # noqa: E501 + return data + + def get_scrapers_id_owners_with_http_info(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_scrapers_id_owners_with_http_info(scraper_target_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scrapers_id_owners_prepare(scraper_target_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_scrapers_id_owners_async(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_scrapers_id_owners_prepare(scraper_target_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_scrapers_id_owners_prepare(self, scraper_target_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['scraper_target_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_scrapers_id_owners', all_params, local_var_params) + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `get_scrapers_id_owners`") # noqa: E501 + + path_params = {} + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_scrapers_id(self, scraper_target_id, scraper_target_request, **kwargs): # noqa: E501,D401,D403 + """Update a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_scrapers_id(scraper_target_id, scraper_target_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The identifier of the scraper target. (required) + :param ScraperTargetRequest scraper_target_request: Scraper target update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: ScraperTargetResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_scrapers_id_with_http_info(scraper_target_id, scraper_target_request, **kwargs) # noqa: E501 + else: + (data) = self.patch_scrapers_id_with_http_info(scraper_target_id, scraper_target_request, **kwargs) # noqa: E501 + return data + + def patch_scrapers_id_with_http_info(self, scraper_target_id, scraper_target_request, **kwargs): # noqa: E501,D401,D403 + """Update a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_scrapers_id_with_http_info(scraper_target_id, scraper_target_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The identifier of the scraper target. (required) + :param ScraperTargetRequest scraper_target_request: Scraper target update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: ScraperTargetResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_scrapers_id_prepare(scraper_target_id, scraper_target_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ScraperTargetResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_scrapers_id_async(self, scraper_target_id, scraper_target_request, **kwargs): # noqa: E501,D401,D403 + """Update a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str scraper_target_id: The identifier of the scraper target. (required) + :param ScraperTargetRequest scraper_target_request: Scraper target update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: ScraperTargetResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_scrapers_id_prepare(scraper_target_id, scraper_target_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ScraperTargetResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_scrapers_id_prepare(self, scraper_target_id, scraper_target_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['scraper_target_id', 'scraper_target_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_scrapers_id', all_params, local_var_params) + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `patch_scrapers_id`") # noqa: E501 + # verify the required parameter 'scraper_target_request' is set + if ('scraper_target_request' not in local_var_params or + local_var_params['scraper_target_request'] is None): + raise ValueError("Missing the required parameter `scraper_target_request` when calling `patch_scrapers_id`") # noqa: E501 + + path_params = {} + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'scraper_target_request' in local_var_params: + body_params = local_var_params['scraper_target_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_scrapers(self, scraper_target_request, **kwargs): # noqa: E501,D401,D403 + """Create a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scrapers(scraper_target_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param ScraperTargetRequest scraper_target_request: Scraper target to create (required) + :param str zap_trace_span: OpenTracing span context + :return: ScraperTargetResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_scrapers_with_http_info(scraper_target_request, **kwargs) # noqa: E501 + else: + (data) = self.post_scrapers_with_http_info(scraper_target_request, **kwargs) # noqa: E501 + return data + + def post_scrapers_with_http_info(self, scraper_target_request, **kwargs): # noqa: E501,D401,D403 + """Create a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scrapers_with_http_info(scraper_target_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param ScraperTargetRequest scraper_target_request: Scraper target to create (required) + :param str zap_trace_span: OpenTracing span context + :return: ScraperTargetResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scrapers_prepare(scraper_target_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ScraperTargetResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_scrapers_async(self, scraper_target_request, **kwargs): # noqa: E501,D401,D403 + """Create a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param ScraperTargetRequest scraper_target_request: Scraper target to create (required) + :param str zap_trace_span: OpenTracing span context + :return: ScraperTargetResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scrapers_prepare(scraper_target_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ScraperTargetResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_scrapers_prepare(self, scraper_target_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['scraper_target_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_scrapers', all_params, local_var_params) + # verify the required parameter 'scraper_target_request' is set + if ('scraper_target_request' not in local_var_params or + local_var_params['scraper_target_request'] is None): + raise ValueError("Missing the required parameter `scraper_target_request` when calling `post_scrapers`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'scraper_target_request' in local_var_params: + body_params = local_var_params['scraper_target_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_scrapers_id_labels(self, scraper_target_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scrapers_id_labels(scraper_target_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_scrapers_id_labels_with_http_info(scraper_target_id, label_mapping, **kwargs) # noqa: E501 + else: + (data) = self.post_scrapers_id_labels_with_http_info(scraper_target_id, label_mapping, **kwargs) # noqa: E501 + return data + + def post_scrapers_id_labels_with_http_info(self, scraper_target_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scrapers_id_labels_with_http_info(scraper_target_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scrapers_id_labels_prepare(scraper_target_id, label_mapping, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_scrapers_id_labels_async(self, scraper_target_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scrapers_id_labels_prepare(scraper_target_id, label_mapping, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_scrapers_id_labels_prepare(self, scraper_target_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['scraper_target_id', 'label_mapping', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_scrapers_id_labels', all_params, local_var_params) + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `post_scrapers_id_labels`") # noqa: E501 + # verify the required parameter 'label_mapping' is set + if ('label_mapping' not in local_var_params or + local_var_params['label_mapping'] is None): + raise ValueError("Missing the required parameter `label_mapping` when calling `post_scrapers_id_labels`") # noqa: E501 + + path_params = {} + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'label_mapping' in local_var_params: + body_params = local_var_params['label_mapping'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_scrapers_id_members(self, scraper_target_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scrapers_id_members(scraper_target_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_scrapers_id_members_with_http_info(scraper_target_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_scrapers_id_members_with_http_info(scraper_target_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_scrapers_id_members_with_http_info(self, scraper_target_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scrapers_id_members_with_http_info(scraper_target_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scrapers_id_members_prepare(scraper_target_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_scrapers_id_members_async(self, scraper_target_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scrapers_id_members_prepare(scraper_target_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_scrapers_id_members_prepare(self, scraper_target_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['scraper_target_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_scrapers_id_members', all_params, local_var_params) + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `post_scrapers_id_members`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_scrapers_id_members`") # noqa: E501 + + path_params = {} + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_scrapers_id_owners(self, scraper_target_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scrapers_id_owners(scraper_target_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_scrapers_id_owners_with_http_info(scraper_target_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_scrapers_id_owners_with_http_info(scraper_target_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_scrapers_id_owners_with_http_info(self, scraper_target_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a scraper target. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_scrapers_id_owners_with_http_info(scraper_target_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scrapers_id_owners_prepare(scraper_target_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_scrapers_id_owners_async(self, scraper_target_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a scraper target. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str scraper_target_id: The scraper target ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_scrapers_id_owners_prepare(scraper_target_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/scrapers/{scraperTargetID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_scrapers_id_owners_prepare(self, scraper_target_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['scraper_target_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_scrapers_id_owners', all_params, local_var_params) + # verify the required parameter 'scraper_target_id' is set + if ('scraper_target_id' not in local_var_params or + local_var_params['scraper_target_id'] is None): + raise ValueError("Missing the required parameter `scraper_target_id` when calling `post_scrapers_id_owners`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_scrapers_id_owners`") # noqa: E501 + + path_params = {} + if 'scraper_target_id' in local_var_params: + path_params['scraperTargetID'] = local_var_params['scraper_target_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/secrets_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/secrets_service.py new file mode 100644 index 0000000..495ac95 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/secrets_service.py @@ -0,0 +1,529 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class SecretsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """SecretsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_orgs_id_secrets_id(self, org_id, secret_id, **kwargs): # noqa: E501,D401,D403 + """Delete a secret from an organization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_orgs_id_secrets_id(org_id, secret_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str secret_id: The secret ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_orgs_id_secrets_id_with_http_info(org_id, secret_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_orgs_id_secrets_id_with_http_info(org_id, secret_id, **kwargs) # noqa: E501 + return data + + def delete_orgs_id_secrets_id_with_http_info(self, org_id, secret_id, **kwargs): # noqa: E501,D401,D403 + """Delete a secret from an organization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_orgs_id_secrets_id_with_http_info(org_id, secret_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str secret_id: The secret ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_orgs_id_secrets_id_prepare(org_id, secret_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}/secrets/{secretID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_orgs_id_secrets_id_async(self, org_id, secret_id, **kwargs): # noqa: E501,D401,D403 + """Delete a secret from an organization. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str secret_id: The secret ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_orgs_id_secrets_id_prepare(org_id, secret_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}/secrets/{secretID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_orgs_id_secrets_id_prepare(self, org_id, secret_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'secret_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_orgs_id_secrets_id', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `delete_orgs_id_secrets_id`") # noqa: E501 + # verify the required parameter 'secret_id' is set + if ('secret_id' not in local_var_params or + local_var_params['secret_id'] is None): + raise ValueError("Missing the required parameter `secret_id` when calling `delete_orgs_id_secrets_id`") # noqa: E501 + + path_params = {} + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + if 'secret_id' in local_var_params: + path_params['secretID'] = local_var_params['secret_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_orgs_id_secrets(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all secret keys for an organization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_orgs_id_secrets(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: SecretKeysResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_orgs_id_secrets_with_http_info(org_id, **kwargs) # noqa: E501 + else: + (data) = self.get_orgs_id_secrets_with_http_info(org_id, **kwargs) # noqa: E501 + return data + + def get_orgs_id_secrets_with_http_info(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all secret keys for an organization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_orgs_id_secrets_with_http_info(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: SecretKeysResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_orgs_id_secrets_prepare(org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}/secrets', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='SecretKeysResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_orgs_id_secrets_async(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List all secret keys for an organization. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The organization ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: SecretKeysResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_orgs_id_secrets_prepare(org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}/secrets', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='SecretKeysResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_orgs_id_secrets_prepare(self, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_orgs_id_secrets', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `get_orgs_id_secrets`") # noqa: E501 + + path_params = {} + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_orgs_id_secrets(self, org_id, request_body, **kwargs): # noqa: E501,D401,D403 + """Update secrets in an organization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_orgs_id_secrets(org_id, request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param dict(str, str) request_body: Secret key value pairs to update/add (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_orgs_id_secrets_with_http_info(org_id, request_body, **kwargs) # noqa: E501 + else: + (data) = self.patch_orgs_id_secrets_with_http_info(org_id, request_body, **kwargs) # noqa: E501 + return data + + def patch_orgs_id_secrets_with_http_info(self, org_id, request_body, **kwargs): # noqa: E501,D401,D403 + """Update secrets in an organization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_orgs_id_secrets_with_http_info(org_id, request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param dict(str, str) request_body: Secret key value pairs to update/add (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_orgs_id_secrets_prepare(org_id, request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}/secrets', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_orgs_id_secrets_async(self, org_id, request_body, **kwargs): # noqa: E501,D401,D403 + """Update secrets in an organization. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The organization ID. (required) + :param dict(str, str) request_body: Secret key value pairs to update/add (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_orgs_id_secrets_prepare(org_id, request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}/secrets', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_orgs_id_secrets_prepare(self, org_id, request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_orgs_id_secrets', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `patch_orgs_id_secrets`") # noqa: E501 + # verify the required parameter 'request_body' is set + if ('request_body' not in local_var_params or + local_var_params['request_body'] is None): + raise ValueError("Missing the required parameter `request_body` when calling `patch_orgs_id_secrets`") # noqa: E501 + + path_params = {} + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'request_body' in local_var_params: + body_params = local_var_params['request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_orgs_id_secrets(self, org_id, secret_keys, **kwargs): # noqa: E501,D401,D403 + """Delete secrets from an organization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_orgs_id_secrets(org_id, secret_keys, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param SecretKeys secret_keys: Secret key to delete (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_orgs_id_secrets_with_http_info(org_id, secret_keys, **kwargs) # noqa: E501 + else: + (data) = self.post_orgs_id_secrets_with_http_info(org_id, secret_keys, **kwargs) # noqa: E501 + return data + + def post_orgs_id_secrets_with_http_info(self, org_id, secret_keys, **kwargs): # noqa: E501,D401,D403 + """Delete secrets from an organization. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_orgs_id_secrets_with_http_info(org_id, secret_keys, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: The organization ID. (required) + :param SecretKeys secret_keys: Secret key to delete (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_orgs_id_secrets_prepare(org_id, secret_keys, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/orgs/{orgID}/secrets/delete', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_orgs_id_secrets_async(self, org_id, secret_keys, **kwargs): # noqa: E501,D401,D403 + """Delete secrets from an organization. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: The organization ID. (required) + :param SecretKeys secret_keys: Secret key to delete (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_orgs_id_secrets_prepare(org_id, secret_keys, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/orgs/{orgID}/secrets/delete', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_orgs_id_secrets_prepare(self, org_id, secret_keys, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'secret_keys', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_orgs_id_secrets', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `post_orgs_id_secrets`") # noqa: E501 + # verify the required parameter 'secret_keys' is set + if ('secret_keys' not in local_var_params or + local_var_params['secret_keys'] is None): + raise ValueError("Missing the required parameter `secret_keys` when calling `post_orgs_id_secrets`") # noqa: E501 + + path_params = {} + if 'org_id' in local_var_params: + path_params['orgID'] = local_var_params['org_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'secret_keys' in local_var_params: + body_params = local_var_params['secret_keys'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/setup_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/setup_service.py new file mode 100644 index 0000000..ed12bde --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/setup_service.py @@ -0,0 +1,263 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class SetupService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """SetupService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_setup(self, **kwargs): # noqa: E501,D401,D403 + """Check if database has default user, org, bucket. + + Returns `true` if no default user, organization, or bucket has been created. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_setup(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: IsOnboarding + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_setup_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_setup_with_http_info(**kwargs) # noqa: E501 + return data + + def get_setup_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Check if database has default user, org, bucket. + + Returns `true` if no default user, organization, or bucket has been created. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_setup_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: IsOnboarding + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_setup_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/setup', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='IsOnboarding', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_setup_async(self, **kwargs): # noqa: E501,D401,D403 + """Check if database has default user, org, bucket. + + Returns `true` if no default user, organization, or bucket has been created. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: IsOnboarding + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_setup_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/setup', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='IsOnboarding', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_setup_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('get_setup', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_setup(self, onboarding_request, **kwargs): # noqa: E501,D401,D403 + """Set up initial user, org and bucket. + + Post an onboarding request to set up initial user, org and bucket. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_setup(onboarding_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param OnboardingRequest onboarding_request: Source to create (required) + :param str zap_trace_span: OpenTracing span context + :return: OnboardingResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_setup_with_http_info(onboarding_request, **kwargs) # noqa: E501 + else: + (data) = self.post_setup_with_http_info(onboarding_request, **kwargs) # noqa: E501 + return data + + def post_setup_with_http_info(self, onboarding_request, **kwargs): # noqa: E501,D401,D403 + """Set up initial user, org and bucket. + + Post an onboarding request to set up initial user, org and bucket. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_setup_with_http_info(onboarding_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param OnboardingRequest onboarding_request: Source to create (required) + :param str zap_trace_span: OpenTracing span context + :return: OnboardingResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_setup_prepare(onboarding_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/setup', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='OnboardingResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_setup_async(self, onboarding_request, **kwargs): # noqa: E501,D401,D403 + """Set up initial user, org and bucket. + + Post an onboarding request to set up initial user, org and bucket. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param OnboardingRequest onboarding_request: Source to create (required) + :param str zap_trace_span: OpenTracing span context + :return: OnboardingResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_setup_prepare(onboarding_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/setup', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='OnboardingResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_setup_prepare(self, onboarding_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['onboarding_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_setup', all_params, local_var_params) + # verify the required parameter 'onboarding_request' is set + if ('onboarding_request' not in local_var_params or + local_var_params['onboarding_request'] is None): + raise ValueError("Missing the required parameter `onboarding_request` when calling `post_setup`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'onboarding_request' in local_var_params: + body_params = local_var_params['onboarding_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/signin_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/signin_service.py new file mode 100644 index 0000000..f3ca698 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/signin_service.py @@ -0,0 +1,145 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class SigninService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """SigninService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def post_signin(self, **kwargs): # noqa: E501,D401,D403 + """Create a user session.. + + Authenticates [Basic authentication credentials](#section/Authentication/BasicAuthentication) for a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user), and then, if successful, generates a user session. To authenticate a user, pass the HTTP `Authorization` header with the `Basic` scheme and the base64-encoded username and password. For syntax and more information, see [Basic Authentication](#section/Authentication/BasicAuthentication) for syntax and more information. If authentication is successful, InfluxDB creates a new session for the user and then returns the session cookie in the `Set-Cookie` response header. InfluxDB stores user sessions in memory only. They expire within ten minutes and during restarts of the InfluxDB instance. #### User sessions with authorizations - In InfluxDB Cloud, a user session inherits all the user's permissions for the organization. - In InfluxDB OSS, a user session inherits all the user's permissions for all the organizations that the user belongs to. #### Related endpoints - [Signout](#tag/Signout) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_signin(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str authorization: An auth credential for the Basic scheme + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_signin_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.post_signin_with_http_info(**kwargs) # noqa: E501 + return data + + def post_signin_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Create a user session.. + + Authenticates [Basic authentication credentials](#section/Authentication/BasicAuthentication) for a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user), and then, if successful, generates a user session. To authenticate a user, pass the HTTP `Authorization` header with the `Basic` scheme and the base64-encoded username and password. For syntax and more information, see [Basic Authentication](#section/Authentication/BasicAuthentication) for syntax and more information. If authentication is successful, InfluxDB creates a new session for the user and then returns the session cookie in the `Set-Cookie` response header. InfluxDB stores user sessions in memory only. They expire within ten minutes and during restarts of the InfluxDB instance. #### User sessions with authorizations - In InfluxDB Cloud, a user session inherits all the user's permissions for the organization. - In InfluxDB OSS, a user session inherits all the user's permissions for all the organizations that the user belongs to. #### Related endpoints - [Signout](#tag/Signout) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_signin_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str authorization: An auth credential for the Basic scheme + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_signin_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/signin', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=['BasicAuthentication'], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_signin_async(self, **kwargs): # noqa: E501,D401,D403 + """Create a user session.. + + Authenticates [Basic authentication credentials](#section/Authentication/BasicAuthentication) for a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user), and then, if successful, generates a user session. To authenticate a user, pass the HTTP `Authorization` header with the `Basic` scheme and the base64-encoded username and password. For syntax and more information, see [Basic Authentication](#section/Authentication/BasicAuthentication) for syntax and more information. If authentication is successful, InfluxDB creates a new session for the user and then returns the session cookie in the `Set-Cookie` response header. InfluxDB stores user sessions in memory only. They expire within ten minutes and during restarts of the InfluxDB instance. #### User sessions with authorizations - In InfluxDB Cloud, a user session inherits all the user's permissions for the organization. - In InfluxDB OSS, a user session inherits all the user's permissions for all the organizations that the user belongs to. #### Related endpoints - [Signout](#tag/Signout) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str authorization: An auth credential for the Basic scheme + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_signin_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/signin', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=['BasicAuthentication'], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_signin_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'authorization'] # noqa: E501 + self._check_operation_params('post_signin', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'authorization' in local_var_params: + header_params['Authorization'] = local_var_params['authorization'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/signout_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/signout_service.py new file mode 100644 index 0000000..882e787 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/signout_service.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class SignoutService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """SignoutService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def post_signout(self, **kwargs): # noqa: E501,D401,D403 + """Expire a user session. + + Expires a user session specified by a session cookie. Use this endpoint to expire a user session that was generated when the user authenticated with the InfluxDB Developer Console (UI) or the `POST /api/v2/signin` endpoint. For example, the `POST /api/v2/signout` endpoint represents the third step in the following three-step process to authenticate a user, retrieve the `user` resource, and then expire the session: 1. Send a request with the user's [Basic authentication credentials](#section/Authentication/BasicAuthentication) to the `POST /api/v2/signin` endpoint to create a user session and generate a session cookie. 2. Send a request to the `GET /api/v2/me` endpoint, passing the stored session cookie from step 1 to retrieve user information. 3. Send a request to the `POST /api/v2/signout` endpoint, passing the stored session cookie to expire the session. _See the complete example in request samples._ InfluxDB stores user sessions in memory only. If a user doesn't sign out, then the user session automatically expires within ten minutes or during a restart of the InfluxDB instance. To learn more about cookies in HTTP requests, see [Mozilla Developer Network (MDN) Web Docs, HTTP cookies](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies). #### Related endpoints - [Signin](#tag/Signin) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_signout(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_signout_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.post_signout_with_http_info(**kwargs) # noqa: E501 + return data + + def post_signout_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Expire a user session. + + Expires a user session specified by a session cookie. Use this endpoint to expire a user session that was generated when the user authenticated with the InfluxDB Developer Console (UI) or the `POST /api/v2/signin` endpoint. For example, the `POST /api/v2/signout` endpoint represents the third step in the following three-step process to authenticate a user, retrieve the `user` resource, and then expire the session: 1. Send a request with the user's [Basic authentication credentials](#section/Authentication/BasicAuthentication) to the `POST /api/v2/signin` endpoint to create a user session and generate a session cookie. 2. Send a request to the `GET /api/v2/me` endpoint, passing the stored session cookie from step 1 to retrieve user information. 3. Send a request to the `POST /api/v2/signout` endpoint, passing the stored session cookie to expire the session. _See the complete example in request samples._ InfluxDB stores user sessions in memory only. If a user doesn't sign out, then the user session automatically expires within ten minutes or during a restart of the InfluxDB instance. To learn more about cookies in HTTP requests, see [Mozilla Developer Network (MDN) Web Docs, HTTP cookies](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies). #### Related endpoints - [Signin](#tag/Signin) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_signout_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_signout_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/signout', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_signout_async(self, **kwargs): # noqa: E501,D401,D403 + """Expire a user session. + + Expires a user session specified by a session cookie. Use this endpoint to expire a user session that was generated when the user authenticated with the InfluxDB Developer Console (UI) or the `POST /api/v2/signin` endpoint. For example, the `POST /api/v2/signout` endpoint represents the third step in the following three-step process to authenticate a user, retrieve the `user` resource, and then expire the session: 1. Send a request with the user's [Basic authentication credentials](#section/Authentication/BasicAuthentication) to the `POST /api/v2/signin` endpoint to create a user session and generate a session cookie. 2. Send a request to the `GET /api/v2/me` endpoint, passing the stored session cookie from step 1 to retrieve user information. 3. Send a request to the `POST /api/v2/signout` endpoint, passing the stored session cookie to expire the session. _See the complete example in request samples._ InfluxDB stores user sessions in memory only. If a user doesn't sign out, then the user session automatically expires within ten minutes or during a restart of the InfluxDB instance. To learn more about cookies in HTTP requests, see [Mozilla Developer Network (MDN) Web Docs, HTTP cookies](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies). #### Related endpoints - [Signin](#tag/Signin) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_signout_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/signout', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_signout_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('post_signout', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/sources_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/sources_service.py new file mode 100644 index 0000000..5bb381f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/sources_service.py @@ -0,0 +1,860 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class SourcesService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """SourcesService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_sources_id(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Delete a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_sources_id(source_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_sources_id_with_http_info(source_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_sources_id_with_http_info(source_id, **kwargs) # noqa: E501 + return data + + def delete_sources_id_with_http_info(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Delete a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_sources_id_with_http_info(source_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_sources_id_prepare(source_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/sources/{sourceID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_sources_id_async(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Delete a source. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_sources_id_prepare(source_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/sources/{sourceID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_sources_id_prepare(self, source_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['source_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_sources_id', all_params, local_var_params) + # verify the required parameter 'source_id' is set + if ('source_id' not in local_var_params or + local_var_params['source_id'] is None): + raise ValueError("Missing the required parameter `source_id` when calling `delete_sources_id`") # noqa: E501 + + path_params = {} + if 'source_id' in local_var_params: + path_params['sourceID'] = local_var_params['source_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_sources(self, **kwargs): # noqa: E501,D401,D403 + """List all sources. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_sources(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :return: Sources + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_sources_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_sources_with_http_info(**kwargs) # noqa: E501 + return data + + def get_sources_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List all sources. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_sources_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :return: Sources + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_sources_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/sources', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Sources', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_sources_async(self, **kwargs): # noqa: E501,D401,D403 + """List all sources. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :return: Sources + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_sources_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/sources', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Sources', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_sources_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'org'] # noqa: E501 + self._check_operation_params('get_sources', all_params, local_var_params) + + path_params = {} + + query_params = [] + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_sources_id(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_sources_id(source_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Source + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_sources_id_with_http_info(source_id, **kwargs) # noqa: E501 + else: + (data) = self.get_sources_id_with_http_info(source_id, **kwargs) # noqa: E501 + return data + + def get_sources_id_with_http_info(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_sources_id_with_http_info(source_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Source + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_sources_id_prepare(source_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/sources/{sourceID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Source', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_sources_id_async(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a source. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Source + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_sources_id_prepare(source_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/sources/{sourceID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Source', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_sources_id_prepare(self, source_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['source_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_sources_id', all_params, local_var_params) + # verify the required parameter 'source_id' is set + if ('source_id' not in local_var_params or + local_var_params['source_id'] is None): + raise ValueError("Missing the required parameter `source_id` when calling `get_sources_id`") # noqa: E501 + + path_params = {} + if 'source_id' in local_var_params: + path_params['sourceID'] = local_var_params['source_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_sources_id_buckets(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Get buckets in a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_sources_id_buckets(source_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :return: Buckets + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_sources_id_buckets_with_http_info(source_id, **kwargs) # noqa: E501 + else: + (data) = self.get_sources_id_buckets_with_http_info(source_id, **kwargs) # noqa: E501 + return data + + def get_sources_id_buckets_with_http_info(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Get buckets in a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_sources_id_buckets_with_http_info(source_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :return: Buckets + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_sources_id_buckets_prepare(source_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/sources/{sourceID}/buckets', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Buckets', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_sources_id_buckets_async(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Get buckets in a source. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :return: Buckets + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_sources_id_buckets_prepare(source_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/sources/{sourceID}/buckets', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Buckets', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_sources_id_buckets_prepare(self, source_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['source_id', 'zap_trace_span', 'org'] # noqa: E501 + self._check_operation_params('get_sources_id_buckets', all_params, local_var_params) + # verify the required parameter 'source_id' is set + if ('source_id' not in local_var_params or + local_var_params['source_id'] is None): + raise ValueError("Missing the required parameter `source_id` when calling `get_sources_id_buckets`") # noqa: E501 + + path_params = {} + if 'source_id' in local_var_params: + path_params['sourceID'] = local_var_params['source_id'] # noqa: E501 + + query_params = [] + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_sources_id_health(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Get the health of a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_sources_id_health(source_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: HealthCheck + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_sources_id_health_with_http_info(source_id, **kwargs) # noqa: E501 + else: + (data) = self.get_sources_id_health_with_http_info(source_id, **kwargs) # noqa: E501 + return data + + def get_sources_id_health_with_http_info(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Get the health of a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_sources_id_health_with_http_info(source_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: HealthCheck + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_sources_id_health_prepare(source_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/sources/{sourceID}/health', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='HealthCheck', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_sources_id_health_async(self, source_id, **kwargs): # noqa: E501,D401,D403 + """Get the health of a source. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str source_id: The source ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: HealthCheck + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_sources_id_health_prepare(source_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/sources/{sourceID}/health', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='HealthCheck', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_sources_id_health_prepare(self, source_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['source_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_sources_id_health', all_params, local_var_params) + # verify the required parameter 'source_id' is set + if ('source_id' not in local_var_params or + local_var_params['source_id'] is None): + raise ValueError("Missing the required parameter `source_id` when calling `get_sources_id_health`") # noqa: E501 + + path_params = {} + if 'source_id' in local_var_params: + path_params['sourceID'] = local_var_params['source_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_sources_id(self, source_id, source, **kwargs): # noqa: E501,D401,D403 + """Update a Source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_sources_id(source_id, source, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param Source source: Source update (required) + :param str zap_trace_span: OpenTracing span context + :return: Source + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_sources_id_with_http_info(source_id, source, **kwargs) # noqa: E501 + else: + (data) = self.patch_sources_id_with_http_info(source_id, source, **kwargs) # noqa: E501 + return data + + def patch_sources_id_with_http_info(self, source_id, source, **kwargs): # noqa: E501,D401,D403 + """Update a Source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_sources_id_with_http_info(source_id, source, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str source_id: The source ID. (required) + :param Source source: Source update (required) + :param str zap_trace_span: OpenTracing span context + :return: Source + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_sources_id_prepare(source_id, source, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/sources/{sourceID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Source', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_sources_id_async(self, source_id, source, **kwargs): # noqa: E501,D401,D403 + """Update a Source. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str source_id: The source ID. (required) + :param Source source: Source update (required) + :param str zap_trace_span: OpenTracing span context + :return: Source + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_sources_id_prepare(source_id, source, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/sources/{sourceID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Source', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_sources_id_prepare(self, source_id, source, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['source_id', 'source', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_sources_id', all_params, local_var_params) + # verify the required parameter 'source_id' is set + if ('source_id' not in local_var_params or + local_var_params['source_id'] is None): + raise ValueError("Missing the required parameter `source_id` when calling `patch_sources_id`") # noqa: E501 + # verify the required parameter 'source' is set + if ('source' not in local_var_params or + local_var_params['source'] is None): + raise ValueError("Missing the required parameter `source` when calling `patch_sources_id`") # noqa: E501 + + path_params = {} + if 'source_id' in local_var_params: + path_params['sourceID'] = local_var_params['source_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'source' in local_var_params: + body_params = local_var_params['source'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_sources(self, source, **kwargs): # noqa: E501,D401,D403 + """Create a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_sources(source, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param Source source: Source to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Source + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_sources_with_http_info(source, **kwargs) # noqa: E501 + else: + (data) = self.post_sources_with_http_info(source, **kwargs) # noqa: E501 + return data + + def post_sources_with_http_info(self, source, **kwargs): # noqa: E501,D401,D403 + """Create a source. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_sources_with_http_info(source, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param Source source: Source to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Source + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_sources_prepare(source, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/sources', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Source', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_sources_async(self, source, **kwargs): # noqa: E501,D401,D403 + """Create a source. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param Source source: Source to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Source + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_sources_prepare(source, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/sources', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Source', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_sources_prepare(self, source, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['source', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_sources', all_params, local_var_params) + # verify the required parameter 'source' is set + if ('source' not in local_var_params or + local_var_params['source'] is None): + raise ValueError("Missing the required parameter `source` when calling `post_sources`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'source' in local_var_params: + body_params = local_var_params['source'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/tasks_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/tasks_service.py new file mode 100644 index 0000000..9142951 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/tasks_service.py @@ -0,0 +1,2725 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class TasksService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """TasksService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_tasks_id(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Delete a task. + + Deletes a task and associated records. Use this endpoint to delete a task and all associated records (task runs, logs, and labels). Once the task is deleted, InfluxDB cancels all scheduled runs of the task. If you want to disable a task instead of delete it, [update the task status to `inactive`](#operation/PatchTasksID). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_tasks_id(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_tasks_id_with_http_info(task_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_tasks_id_with_http_info(task_id, **kwargs) # noqa: E501 + return data + + def delete_tasks_id_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Delete a task. + + Deletes a task and associated records. Use this endpoint to delete a task and all associated records (task runs, logs, and labels). Once the task is deleted, InfluxDB cancels all scheduled runs of the task. If you want to disable a task instead of delete it, [update the task status to `inactive`](#operation/PatchTasksID). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_tasks_id_with_http_info(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_tasks_id_prepare(task_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_tasks_id_async(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Delete a task. + + Deletes a task and associated records. Use this endpoint to delete a task and all associated records (task runs, logs, and labels). Once the task is deleted, InfluxDB cancels all scheduled runs of the task. If you want to disable a task instead of delete it, [update the task status to `inactive`](#operation/PatchTasksID). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The ID of the task to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_tasks_id_prepare(task_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_tasks_id_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_tasks_id', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `delete_tasks_id`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_tasks_id_labels_id(self, task_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a task. + + Deletes a label from a task. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_tasks_id_labels_id(task_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to delete the label from. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_tasks_id_labels_id_with_http_info(task_id, label_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_tasks_id_labels_id_with_http_info(task_id, label_id, **kwargs) # noqa: E501 + return data + + def delete_tasks_id_labels_id_with_http_info(self, task_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a task. + + Deletes a label from a task. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_tasks_id_labels_id_with_http_info(task_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to delete the label from. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_tasks_id_labels_id_prepare(task_id, label_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_tasks_id_labels_id_async(self, task_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a task. + + Deletes a label from a task. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The ID of the task to delete the label from. (required) + :param str label_id: The ID of the label to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_tasks_id_labels_id_prepare(task_id, label_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_tasks_id_labels_id_prepare(self, task_id, label_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'label_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_tasks_id_labels_id', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `delete_tasks_id_labels_id`") # noqa: E501 + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `delete_tasks_id_labels_id`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_tasks_id_members_id(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Removes a member from a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_tasks_id_members_id(user_id, task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the member to remove. (required) + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_tasks_id_members_id_with_http_info(user_id, task_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_tasks_id_members_id_with_http_info(user_id, task_id, **kwargs) # noqa: E501 + return data + + def delete_tasks_id_members_id_with_http_info(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Removes a member from a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_tasks_id_members_id_with_http_info(user_id, task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the member to remove. (required) + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_tasks_id_members_id_prepare(user_id, task_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_tasks_id_members_id_async(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Removes a member from a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the member to remove. (required) + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_tasks_id_members_id_prepare(user_id, task_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_tasks_id_members_id_prepare(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'task_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_tasks_id_members_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_tasks_id_members_id`") # noqa: E501 + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `delete_tasks_id_members_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_tasks_id_owners_id(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_tasks_id_owners_id(user_id, task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_tasks_id_owners_id_with_http_info(user_id, task_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_tasks_id_owners_id_with_http_info(user_id, task_id, **kwargs) # noqa: E501 + return data + + def delete_tasks_id_owners_id_with_http_info(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_tasks_id_owners_id_with_http_info(user_id, task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_tasks_id_owners_id_prepare(user_id, task_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_tasks_id_owners_id_async(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_tasks_id_owners_id_prepare(user_id, task_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_tasks_id_owners_id_prepare(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'task_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_tasks_id_owners_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_tasks_id_owners_id`") # noqa: E501 + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `delete_tasks_id_owners_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_tasks_id_runs_id(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Cancel a running task. + + Cancels a running [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). Use this endpoint with InfluxDB OSS to cancel a running task. #### InfluxDB Cloud - Doesn't support this operation. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_tasks_id_runs_id(task_id, run_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to cancel. (required) + :param str run_id: The ID of the task run to cancel. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_tasks_id_runs_id_with_http_info(task_id, run_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_tasks_id_runs_id_with_http_info(task_id, run_id, **kwargs) # noqa: E501 + return data + + def delete_tasks_id_runs_id_with_http_info(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Cancel a running task. + + Cancels a running [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). Use this endpoint with InfluxDB OSS to cancel a running task. #### InfluxDB Cloud - Doesn't support this operation. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_tasks_id_runs_id_with_http_info(task_id, run_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to cancel. (required) + :param str run_id: The ID of the task run to cancel. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_tasks_id_runs_id_prepare(task_id, run_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs/{runID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_tasks_id_runs_id_async(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Cancel a running task. + + Cancels a running [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). Use this endpoint with InfluxDB OSS to cancel a running task. #### InfluxDB Cloud - Doesn't support this operation. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The ID of the task to cancel. (required) + :param str run_id: The ID of the task run to cancel. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_tasks_id_runs_id_prepare(task_id, run_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs/{runID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_tasks_id_runs_id_prepare(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'run_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_tasks_id_runs_id', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `delete_tasks_id_runs_id`") # noqa: E501 + # verify the required parameter 'run_id' is set + if ('run_id' not in local_var_params or + local_var_params['run_id'] is None): + raise ValueError("Missing the required parameter `run_id` when calling `delete_tasks_id_runs_id`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + if 'run_id' in local_var_params: + path_params['runID'] = local_var_params['run_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_tasks(self, **kwargs): # noqa: E501,D401,D403 + """List tasks. + + Retrieves a list of [tasks](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). To limit which tasks are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all tasks up to the default `limit`. #### Related guide - [Process data with InfluxDB tasks](https://docs.influxdata.com/influxdb/latest/process-data/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str name: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) name. Only returns tasks with the specified name. Different tasks may have the same name. + :param str after: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) ID. Only returns tasks created after the specified task. + :param str user: A [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) ID. Only returns tasks owned by the specified user. + :param str org: An [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) name. Only returns tasks owned by the specified organization. + :param str org_id: An [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) ID. Only returns tasks owned by the specified organization. + :param str status: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) status. Only returns tasks that have the specified status (`active` or `inactive`). + :param int limit: The maximum number of [tasks](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) to return. Default is `100`. The minimum is `1` and the maximum is `500`. To reduce the payload size, combine _`type=basic`_ and _`limit`_ (see _Request samples_). For more information about the `basic` response, see the _`type`_ parameter. + :param str type: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) type (`basic` or `system`). Default is `system`. Specifies the level of detail for tasks in the response. The default (`system`) response contains all the metadata properties for tasks. To reduce the response size, pass `basic` to omit some task properties (`flux`, `createdAt`, `updatedAt`). + :return: Tasks + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_tasks_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_tasks_with_http_info(**kwargs) # noqa: E501 + return data + + def get_tasks_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List tasks. + + Retrieves a list of [tasks](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). To limit which tasks are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all tasks up to the default `limit`. #### Related guide - [Process data with InfluxDB tasks](https://docs.influxdata.com/influxdb/latest/process-data/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str name: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) name. Only returns tasks with the specified name. Different tasks may have the same name. + :param str after: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) ID. Only returns tasks created after the specified task. + :param str user: A [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) ID. Only returns tasks owned by the specified user. + :param str org: An [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) name. Only returns tasks owned by the specified organization. + :param str org_id: An [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) ID. Only returns tasks owned by the specified organization. + :param str status: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) status. Only returns tasks that have the specified status (`active` or `inactive`). + :param int limit: The maximum number of [tasks](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) to return. Default is `100`. The minimum is `1` and the maximum is `500`. To reduce the payload size, combine _`type=basic`_ and _`limit`_ (see _Request samples_). For more information about the `basic` response, see the _`type`_ parameter. + :param str type: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) type (`basic` or `system`). Default is `system`. Specifies the level of detail for tasks in the response. The default (`system`) response contains all the metadata properties for tasks. To reduce the response size, pass `basic` to omit some task properties (`flux`, `createdAt`, `updatedAt`). + :return: Tasks + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Tasks', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_tasks_async(self, **kwargs): # noqa: E501,D401,D403 + """List tasks. + + Retrieves a list of [tasks](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). To limit which tasks are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all tasks up to the default `limit`. #### Related guide - [Process data with InfluxDB tasks](https://docs.influxdata.com/influxdb/latest/process-data/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str name: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) name. Only returns tasks with the specified name. Different tasks may have the same name. + :param str after: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) ID. Only returns tasks created after the specified task. + :param str user: A [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) ID. Only returns tasks owned by the specified user. + :param str org: An [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) name. Only returns tasks owned by the specified organization. + :param str org_id: An [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization) ID. Only returns tasks owned by the specified organization. + :param str status: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) status. Only returns tasks that have the specified status (`active` or `inactive`). + :param int limit: The maximum number of [tasks](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) to return. Default is `100`. The minimum is `1` and the maximum is `500`. To reduce the payload size, combine _`type=basic`_ and _`limit`_ (see _Request samples_). For more information about the `basic` response, see the _`type`_ parameter. + :param str type: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) type (`basic` or `system`). Default is `system`. Specifies the level of detail for tasks in the response. The default (`system`) response contains all the metadata properties for tasks. To reduce the response size, pass `basic` to omit some task properties (`flux`, `createdAt`, `updatedAt`). + :return: Tasks + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Tasks', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_tasks_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'name', 'after', 'user', 'org', 'org_id', 'status', 'limit', 'type'] # noqa: E501 + self._check_operation_params('get_tasks', all_params, local_var_params) + + if 'limit' in local_var_params and local_var_params['limit'] > 500: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_tasks`, must be a value less than or equal to `500`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_tasks`, must be a value greater than or equal to `1`") # noqa: E501 + path_params = {} + + query_params = [] + if 'name' in local_var_params: + query_params.append(('name', local_var_params['name'])) # noqa: E501 + if 'after' in local_var_params: + query_params.append(('after', local_var_params['after'])) # noqa: E501 + if 'user' in local_var_params: + query_params.append(('user', local_var_params['user'])) # noqa: E501 + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'status' in local_var_params: + query_params.append(('status', local_var_params['status'])) # noqa: E501 + if 'limit' in local_var_params: + query_params.append(('limit', local_var_params['limit'])) # noqa: E501 + if 'type' in local_var_params: + query_params.append(('type', local_var_params['type'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_tasks_id(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a task. + + Retrieves a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Task + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_tasks_id_with_http_info(task_id, **kwargs) # noqa: E501 + else: + (data) = self.get_tasks_id_with_http_info(task_id, **kwargs) # noqa: E501 + return data + + def get_tasks_id_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a task. + + Retrieves a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_with_http_info(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Task + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_prepare(task_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Task', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_tasks_id_async(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a task. + + Retrieves a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The ID of the task to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Task + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_prepare(task_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Task', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_tasks_id_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_tasks_id', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_tasks_id_labels(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List labels for a task. + + Retrieves a list of all labels for a task. Labels may be used for grouping and filtering tasks. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_labels(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to retrieve labels for. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_tasks_id_labels_with_http_info(task_id, **kwargs) # noqa: E501 + else: + (data) = self.get_tasks_id_labels_with_http_info(task_id, **kwargs) # noqa: E501 + return data + + def get_tasks_id_labels_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List labels for a task. + + Retrieves a list of all labels for a task. Labels may be used for grouping and filtering tasks. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_labels_with_http_info(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to retrieve labels for. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_labels_prepare(task_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_tasks_id_labels_async(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List labels for a task. + + Retrieves a list of all labels for a task. Labels may be used for grouping and filtering tasks. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The ID of the task to retrieve labels for. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_labels_prepare(task_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_tasks_id_labels_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_tasks_id_labels', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_labels`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_tasks_id_logs(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve all logs for a task. + + Retrieves a list of all logs for a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). When an InfluxDB task runs, a “run” record is created in the task’s history. Logs associated with each run provide relevant log messages, timestamps, and the exit status of the run attempt. Use this endpoint to retrieve only the log events for a task, without additional task metadata. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_logs(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Logs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_tasks_id_logs_with_http_info(task_id, **kwargs) # noqa: E501 + else: + (data) = self.get_tasks_id_logs_with_http_info(task_id, **kwargs) # noqa: E501 + return data + + def get_tasks_id_logs_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve all logs for a task. + + Retrieves a list of all logs for a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). When an InfluxDB task runs, a “run” record is created in the task’s history. Logs associated with each run provide relevant log messages, timestamps, and the exit status of the run attempt. Use this endpoint to retrieve only the log events for a task, without additional task metadata. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_logs_with_http_info(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Logs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_logs_prepare(task_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/logs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Logs', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_tasks_id_logs_async(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve all logs for a task. + + Retrieves a list of all logs for a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). When an InfluxDB task runs, a “run” record is created in the task’s history. Logs associated with each run provide relevant log messages, timestamps, and the exit status of the run attempt. Use this endpoint to retrieve only the log events for a task, without additional task metadata. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Logs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_logs_prepare(task_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/logs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Logs', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_tasks_id_logs_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_tasks_id_logs', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_logs`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_tasks_id_members(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List all task members. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Lists all users that have the `member` role for the specified [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_members(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_tasks_id_members_with_http_info(task_id, **kwargs) # noqa: E501 + else: + (data) = self.get_tasks_id_members_with_http_info(task_id, **kwargs) # noqa: E501 + return data + + def get_tasks_id_members_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List all task members. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Lists all users that have the `member` role for the specified [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_members_with_http_info(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_members_prepare(task_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_tasks_id_members_async(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List all task members. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Lists all users that have the `member` role for the specified [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The task ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_members_prepare(task_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_tasks_id_members_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_tasks_id_members', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_members`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_tasks_id_owners(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Retrieves all users that have owner permission for a task. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_owners(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to retrieve owners for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_tasks_id_owners_with_http_info(task_id, **kwargs) # noqa: E501 + else: + (data) = self.get_tasks_id_owners_with_http_info(task_id, **kwargs) # noqa: E501 + return data + + def get_tasks_id_owners_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Retrieves all users that have owner permission for a task. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_owners_with_http_info(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to retrieve owners for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_owners_prepare(task_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_tasks_id_owners_async(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Retrieves all users that have owner permission for a task. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The ID of the task to retrieve owners for. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_owners_prepare(task_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_tasks_id_owners_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_tasks_id_owners', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_owners`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_tasks_id_runs(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List runs for a task. + + Retrieves a list of runs for a [task](https://docs.influxdata.com/influxdb/latest/process-data/). To limit which task runs are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all task runs up to the default `limit`. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_runs(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to get runs for. Only returns runs for this task. (required) + :param str zap_trace_span: OpenTracing span context + :param str after: A task run ID. Only returns runs created after this run. + :param int limit: Limits the number of task runs returned. Default is `100`. + :param datetime after_time: A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)). Only returns runs scheduled after this time. + :param datetime before_time: A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)). Only returns runs scheduled before this time. + :return: Runs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_tasks_id_runs_with_http_info(task_id, **kwargs) # noqa: E501 + else: + (data) = self.get_tasks_id_runs_with_http_info(task_id, **kwargs) # noqa: E501 + return data + + def get_tasks_id_runs_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List runs for a task. + + Retrieves a list of runs for a [task](https://docs.influxdata.com/influxdb/latest/process-data/). To limit which task runs are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all task runs up to the default `limit`. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_runs_with_http_info(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to get runs for. Only returns runs for this task. (required) + :param str zap_trace_span: OpenTracing span context + :param str after: A task run ID. Only returns runs created after this run. + :param int limit: Limits the number of task runs returned. Default is `100`. + :param datetime after_time: A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)). Only returns runs scheduled after this time. + :param datetime before_time: A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)). Only returns runs scheduled before this time. + :return: Runs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_runs_prepare(task_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Runs', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_tasks_id_runs_async(self, task_id, **kwargs): # noqa: E501,D401,D403 + """List runs for a task. + + Retrieves a list of runs for a [task](https://docs.influxdata.com/influxdb/latest/process-data/). To limit which task runs are returned, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all task runs up to the default `limit`. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The ID of the task to get runs for. Only returns runs for this task. (required) + :param str zap_trace_span: OpenTracing span context + :param str after: A task run ID. Only returns runs created after this run. + :param int limit: Limits the number of task runs returned. Default is `100`. + :param datetime after_time: A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)). Only returns runs scheduled after this time. + :param datetime before_time: A timestamp ([RFC3339 date/time format](https://docs.influxdata.com/influxdb/latest/reference/glossary/#rfc3339-timestamp)). Only returns runs scheduled before this time. + :return: Runs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_runs_prepare(task_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Runs', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_tasks_id_runs_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'zap_trace_span', 'after', 'limit', 'after_time', 'before_time'] # noqa: E501 + self._check_operation_params('get_tasks_id_runs', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_runs`") # noqa: E501 + + if 'limit' in local_var_params and local_var_params['limit'] > 500: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_tasks_id_runs`, must be a value less than or equal to `500`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_tasks_id_runs`, must be a value greater than or equal to `1`") # noqa: E501 + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + if 'after' in local_var_params: + query_params.append(('after', local_var_params['after'])) # noqa: E501 + if 'limit' in local_var_params: + query_params.append(('limit', local_var_params['limit'])) # noqa: E501 + if 'after_time' in local_var_params: + query_params.append(('afterTime', local_var_params['after_time'])) # noqa: E501 + if 'before_time' in local_var_params: + query_params.append(('beforeTime', local_var_params['before_time'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_tasks_id_runs_id(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a run for a task.. + + Retrieves a specific run for a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). Use this endpoint to retrieve detail and logs for a specific task run. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_runs_id(task_id, run_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to retrieve runs for. (required) + :param str run_id: The ID of the run to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Run + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_tasks_id_runs_id_with_http_info(task_id, run_id, **kwargs) # noqa: E501 + else: + (data) = self.get_tasks_id_runs_id_with_http_info(task_id, run_id, **kwargs) # noqa: E501 + return data + + def get_tasks_id_runs_id_with_http_info(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a run for a task.. + + Retrieves a specific run for a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). Use this endpoint to retrieve detail and logs for a specific task run. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_runs_id_with_http_info(task_id, run_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to retrieve runs for. (required) + :param str run_id: The ID of the run to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Run + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_runs_id_prepare(task_id, run_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs/{runID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Run', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_tasks_id_runs_id_async(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a run for a task.. + + Retrieves a specific run for a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task). Use this endpoint to retrieve detail and logs for a specific task run. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The ID of the task to retrieve runs for. (required) + :param str run_id: The ID of the run to retrieve. (required) + :param str zap_trace_span: OpenTracing span context + :return: Run + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_runs_id_prepare(task_id, run_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs/{runID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Run', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_tasks_id_runs_id_prepare(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'run_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_tasks_id_runs_id', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_runs_id`") # noqa: E501 + # verify the required parameter 'run_id' is set + if ('run_id' not in local_var_params or + local_var_params['run_id'] is None): + raise ValueError("Missing the required parameter `run_id` when calling `get_tasks_id_runs_id`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + if 'run_id' in local_var_params: + path_params['runID'] = local_var_params['run_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_tasks_id_runs_id_logs(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve all logs for a run. + + Retrieves all logs for a task run. A log is a list of run events with `runID`, `time`, and `message` properties. Use this endpoint to help analyze task performance and troubleshoot failed task runs. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_runs_id_logs(task_id, run_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to get logs for. (required) + :param str run_id: The ID of the run to get logs for. (required) + :param str zap_trace_span: OpenTracing span context + :return: Logs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_tasks_id_runs_id_logs_with_http_info(task_id, run_id, **kwargs) # noqa: E501 + else: + (data) = self.get_tasks_id_runs_id_logs_with_http_info(task_id, run_id, **kwargs) # noqa: E501 + return data + + def get_tasks_id_runs_id_logs_with_http_info(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve all logs for a run. + + Retrieves all logs for a task run. A log is a list of run events with `runID`, `time`, and `message` properties. Use this endpoint to help analyze task performance and troubleshoot failed task runs. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_tasks_id_runs_id_logs_with_http_info(task_id, run_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to get logs for. (required) + :param str run_id: The ID of the run to get logs for. (required) + :param str zap_trace_span: OpenTracing span context + :return: Logs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_runs_id_logs_prepare(task_id, run_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs/{runID}/logs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Logs', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_tasks_id_runs_id_logs_async(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve all logs for a run. + + Retrieves all logs for a task run. A log is a list of run events with `runID`, `time`, and `message` properties. Use this endpoint to help analyze task performance and troubleshoot failed task runs. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The ID of the task to get logs for. (required) + :param str run_id: The ID of the run to get logs for. (required) + :param str zap_trace_span: OpenTracing span context + :return: Logs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_tasks_id_runs_id_logs_prepare(task_id, run_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs/{runID}/logs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Logs', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_tasks_id_runs_id_logs_prepare(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'run_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_tasks_id_runs_id_logs', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_runs_id_logs`") # noqa: E501 + # verify the required parameter 'run_id' is set + if ('run_id' not in local_var_params or + local_var_params['run_id'] is None): + raise ValueError("Missing the required parameter `run_id` when calling `get_tasks_id_runs_id_logs`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + if 'run_id' in local_var_params: + path_params['runID'] = local_var_params['run_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_tasks_id(self, task_id, task_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a task. + + Updates a task and then cancels all scheduled runs of the task. Use this endpoint to set, modify, and clear task properties (for example: `cron`, `name`, `flux`, `status`). Once InfluxDB applies the update, it cancels all previously scheduled runs of the task. To update a task, pass an object that contains the updated key-value pairs. To activate or inactivate a task, set the `status` property. _`"status": "inactive"`_ cancels scheduled runs and prevents manual runs of the task. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_tasks_id(task_id, task_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to update. (required) + :param TaskUpdateRequest task_update_request: An object that contains updated task properties to apply. (required) + :param str zap_trace_span: OpenTracing span context + :return: Task + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_tasks_id_with_http_info(task_id, task_update_request, **kwargs) # noqa: E501 + else: + (data) = self.patch_tasks_id_with_http_info(task_id, task_update_request, **kwargs) # noqa: E501 + return data + + def patch_tasks_id_with_http_info(self, task_id, task_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a task. + + Updates a task and then cancels all scheduled runs of the task. Use this endpoint to set, modify, and clear task properties (for example: `cron`, `name`, `flux`, `status`). Once InfluxDB applies the update, it cancels all previously scheduled runs of the task. To update a task, pass an object that contains the updated key-value pairs. To activate or inactivate a task, set the `status` property. _`"status": "inactive"`_ cancels scheduled runs and prevents manual runs of the task. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_tasks_id_with_http_info(task_id, task_update_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to update. (required) + :param TaskUpdateRequest task_update_request: An object that contains updated task properties to apply. (required) + :param str zap_trace_span: OpenTracing span context + :return: Task + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_tasks_id_prepare(task_id, task_update_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Task', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_tasks_id_async(self, task_id, task_update_request, **kwargs): # noqa: E501,D401,D403 + """Update a task. + + Updates a task and then cancels all scheduled runs of the task. Use this endpoint to set, modify, and clear task properties (for example: `cron`, `name`, `flux`, `status`). Once InfluxDB applies the update, it cancels all previously scheduled runs of the task. To update a task, pass an object that contains the updated key-value pairs. To activate or inactivate a task, set the `status` property. _`"status": "inactive"`_ cancels scheduled runs and prevents manual runs of the task. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The ID of the task to update. (required) + :param TaskUpdateRequest task_update_request: An object that contains updated task properties to apply. (required) + :param str zap_trace_span: OpenTracing span context + :return: Task + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_tasks_id_prepare(task_id, task_update_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Task', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_tasks_id_prepare(self, task_id, task_update_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'task_update_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_tasks_id', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `patch_tasks_id`") # noqa: E501 + # verify the required parameter 'task_update_request' is set + if ('task_update_request' not in local_var_params or + local_var_params['task_update_request'] is None): + raise ValueError("Missing the required parameter `task_update_request` when calling `patch_tasks_id`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'task_update_request' in local_var_params: + body_params = local_var_params['task_update_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_tasks(self, task_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a task. + + Creates a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) and returns the task. #### Related guides - [Get started with tasks](https://docs.influxdata.com/influxdb/latest/process-data/get-started/) - [Create a task](https://docs.influxdata.com/influxdb/latest/process-data/manage-tasks/create-task/) - [Common tasks](https://docs.influxdata.com/influxdb/latest/process-data/common-tasks/) - [Task configuration options](https://docs.influxdata.com/influxdb/latest/process-data/task-options/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks(task_create_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param TaskCreateRequest task_create_request: The task to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Task + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_tasks_with_http_info(task_create_request, **kwargs) # noqa: E501 + else: + (data) = self.post_tasks_with_http_info(task_create_request, **kwargs) # noqa: E501 + return data + + def post_tasks_with_http_info(self, task_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a task. + + Creates a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) and returns the task. #### Related guides - [Get started with tasks](https://docs.influxdata.com/influxdb/latest/process-data/get-started/) - [Create a task](https://docs.influxdata.com/influxdb/latest/process-data/manage-tasks/create-task/) - [Common tasks](https://docs.influxdata.com/influxdb/latest/process-data/common-tasks/) - [Task configuration options](https://docs.influxdata.com/influxdb/latest/process-data/task-options/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks_with_http_info(task_create_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param TaskCreateRequest task_create_request: The task to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Task + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_prepare(task_create_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Task', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_tasks_async(self, task_create_request, **kwargs): # noqa: E501,D401,D403 + """Create a task. + + Creates a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) and returns the task. #### Related guides - [Get started with tasks](https://docs.influxdata.com/influxdb/latest/process-data/get-started/) - [Create a task](https://docs.influxdata.com/influxdb/latest/process-data/manage-tasks/create-task/) - [Common tasks](https://docs.influxdata.com/influxdb/latest/process-data/common-tasks/) - [Task configuration options](https://docs.influxdata.com/influxdb/latest/process-data/task-options/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param TaskCreateRequest task_create_request: The task to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: Task + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_prepare(task_create_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Task', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_tasks_prepare(self, task_create_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_create_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_tasks', all_params, local_var_params) + # verify the required parameter 'task_create_request' is set + if ('task_create_request' not in local_var_params or + local_var_params['task_create_request'] is None): + raise ValueError("Missing the required parameter `task_create_request` when calling `post_tasks`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'task_create_request' in local_var_params: + body_params = local_var_params['task_create_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_tasks_id_labels(self, task_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a task. + + Adds a label to a task. Use this endpoint to add a label that you can use to filter tasks in the InfluxDB UI. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks_id_labels(task_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to label. (required) + :param LabelMapping label_mapping: An object that contains a _`labelID`_ to add to the task. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_tasks_id_labels_with_http_info(task_id, label_mapping, **kwargs) # noqa: E501 + else: + (data) = self.post_tasks_id_labels_with_http_info(task_id, label_mapping, **kwargs) # noqa: E501 + return data + + def post_tasks_id_labels_with_http_info(self, task_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a task. + + Adds a label to a task. Use this endpoint to add a label that you can use to filter tasks in the InfluxDB UI. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks_id_labels_with_http_info(task_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The ID of the task to label. (required) + :param LabelMapping label_mapping: An object that contains a _`labelID`_ to add to the task. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_id_labels_prepare(task_id, label_mapping, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_tasks_id_labels_async(self, task_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a task. + + Adds a label to a task. Use this endpoint to add a label that you can use to filter tasks in the InfluxDB UI. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The ID of the task to label. (required) + :param LabelMapping label_mapping: An object that contains a _`labelID`_ to add to the task. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_id_labels_prepare(task_id, label_mapping, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_tasks_id_labels_prepare(self, task_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'label_mapping', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_tasks_id_labels', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `post_tasks_id_labels`") # noqa: E501 + # verify the required parameter 'label_mapping' is set + if ('label_mapping' not in local_var_params or + local_var_params['label_mapping'] is None): + raise ValueError("Missing the required parameter `label_mapping` when calling `post_tasks_id_labels`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'label_mapping' in local_var_params: + body_params = local_var_params['label_mapping'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_tasks_id_members(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Adds a user to members of a task and returns the member. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks_id_members(task_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The task ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as a member of the task. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_tasks_id_members_with_http_info(task_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_tasks_id_members_with_http_info(task_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_tasks_id_members_with_http_info(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Adds a user to members of a task and returns the member. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks_id_members_with_http_info(task_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The task ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as a member of the task. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_id_members_prepare(task_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_tasks_id_members_async(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Adds a user to members of a task and returns the member. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The task ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as a member of the task. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_id_members_prepare(task_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_tasks_id_members_prepare(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_tasks_id_members', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `post_tasks_id_members`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_tasks_id_members`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_tasks_id_owners(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner for a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Assigns a task `owner` role to a user. Use this endpoint to create a _resource owner_ for the task. A _resource owner_ is a user with `role: owner` for a specific resource. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks_id_owners(task_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The task ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as an owner of the task. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_tasks_id_owners_with_http_info(task_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_tasks_id_owners_with_http_info(task_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_tasks_id_owners_with_http_info(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner for a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Assigns a task `owner` role to a user. Use this endpoint to create a _resource owner_ for the task. A _resource owner_ is a user with `role: owner` for a specific resource. + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks_id_owners_with_http_info(task_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: The task ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as an owner of the task. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_id_owners_prepare(task_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_tasks_id_owners_async(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner for a task. + + **Deprecated**: Tasks don't use `owner` and `member` roles. Use [`/api/v2/authorizations`](#tag/Authorizations-(API-tokens)) to assign user permissions. Assigns a task `owner` role to a user. Use this endpoint to create a _resource owner_ for the task. A _resource owner_ is a user with `role: owner` for a specific resource. + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: The task ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: A user to add as an owner of the task. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_id_owners_prepare(task_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_tasks_id_owners_prepare(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_tasks_id_owners', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `post_tasks_id_owners`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_tasks_id_owners`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_tasks_id_runs(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Start a task run, overriding the schedule. + + Schedules a task run to start immediately, ignoring scheduled runs. Use this endpoint to manually start a task run. Scheduled runs will continue to run as scheduled. This may result in concurrently running tasks. To _retry_ a previous run (and avoid creating a new run), use the [`POST /api/v2/tasks/{taskID}/runs/{runID}/retry` endpoint](#operation/PostTasksIDRunsIDRetry). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks_id_runs(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: (required) + :param str zap_trace_span: OpenTracing span context + :param RunManually run_manually: + :return: Run + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_tasks_id_runs_with_http_info(task_id, **kwargs) # noqa: E501 + else: + (data) = self.post_tasks_id_runs_with_http_info(task_id, **kwargs) # noqa: E501 + return data + + def post_tasks_id_runs_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Start a task run, overriding the schedule. + + Schedules a task run to start immediately, ignoring scheduled runs. Use this endpoint to manually start a task run. Scheduled runs will continue to run as scheduled. This may result in concurrently running tasks. To _retry_ a previous run (and avoid creating a new run), use the [`POST /api/v2/tasks/{taskID}/runs/{runID}/retry` endpoint](#operation/PostTasksIDRunsIDRetry). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks_id_runs_with_http_info(task_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: (required) + :param str zap_trace_span: OpenTracing span context + :param RunManually run_manually: + :return: Run + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_id_runs_prepare(task_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Run', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_tasks_id_runs_async(self, task_id, **kwargs): # noqa: E501,D401,D403 + """Start a task run, overriding the schedule. + + Schedules a task run to start immediately, ignoring scheduled runs. Use this endpoint to manually start a task run. Scheduled runs will continue to run as scheduled. This may result in concurrently running tasks. To _retry_ a previous run (and avoid creating a new run), use the [`POST /api/v2/tasks/{taskID}/runs/{runID}/retry` endpoint](#operation/PostTasksIDRunsIDRetry). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: (required) + :param str zap_trace_span: OpenTracing span context + :param RunManually run_manually: + :return: Run + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_id_runs_prepare(task_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Run', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_tasks_id_runs_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'zap_trace_span', 'run_manually'] # noqa: E501 + self._check_operation_params('post_tasks_id_runs', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `post_tasks_id_runs`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'run_manually' in local_var_params: + body_params = local_var_params['run_manually'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_tasks_id_runs_id_retry(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Retry a task run. + + Queues a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) run to retry and returns the scheduled run. To manually start a _new_ task run, use the [`POST /api/v2/tasks/{taskID}/runs` endpoint](#operation/PostTasksIDRuns). #### Limitations - The task must be _active_ (`status: "active"`). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks_id_runs_id_retry(task_id, run_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) ID. Specifies the task to retry. (required) + :param str run_id: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) run ID. Specifies the task run to retry. To find a task run ID, use the [`GET /api/v2/tasks/{taskID}/runs` endpoint](#operation/GetTasksIDRuns) to list task runs. (required) + :param str zap_trace_span: OpenTracing span context + :param str body: + :return: Run + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_tasks_id_runs_id_retry_with_http_info(task_id, run_id, **kwargs) # noqa: E501 + else: + (data) = self.post_tasks_id_runs_id_retry_with_http_info(task_id, run_id, **kwargs) # noqa: E501 + return data + + def post_tasks_id_runs_id_retry_with_http_info(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Retry a task run. + + Queues a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) run to retry and returns the scheduled run. To manually start a _new_ task run, use the [`POST /api/v2/tasks/{taskID}/runs` endpoint](#operation/PostTasksIDRuns). #### Limitations - The task must be _active_ (`status: "active"`). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_tasks_id_runs_id_retry_with_http_info(task_id, run_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_id: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) ID. Specifies the task to retry. (required) + :param str run_id: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) run ID. Specifies the task run to retry. To find a task run ID, use the [`GET /api/v2/tasks/{taskID}/runs` endpoint](#operation/GetTasksIDRuns) to list task runs. (required) + :param str zap_trace_span: OpenTracing span context + :param str body: + :return: Run + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_id_runs_id_retry_prepare(task_id, run_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs/{runID}/retry', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Run', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_tasks_id_runs_id_retry_async(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + """Retry a task run. + + Queues a [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) run to retry and returns the scheduled run. To manually start a _new_ task run, use the [`POST /api/v2/tasks/{taskID}/runs` endpoint](#operation/PostTasksIDRuns). #### Limitations - The task must be _active_ (`status: "active"`). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str task_id: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) ID. Specifies the task to retry. (required) + :param str run_id: A [task](https://docs.influxdata.com/influxdb/latest/reference/glossary/#task) run ID. Specifies the task run to retry. To find a task run ID, use the [`GET /api/v2/tasks/{taskID}/runs` endpoint](#operation/GetTasksIDRuns) to list task runs. (required) + :param str zap_trace_span: OpenTracing span context + :param str body: + :return: Run + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_tasks_id_runs_id_retry_prepare(task_id, run_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/tasks/{taskID}/runs/{runID}/retry', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Run', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_tasks_id_runs_id_retry_prepare(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['task_id', 'run_id', 'zap_trace_span', 'body'] # noqa: E501 + self._check_operation_params('post_tasks_id_runs_id_retry', all_params, local_var_params) + # verify the required parameter 'task_id' is set + if ('task_id' not in local_var_params or + local_var_params['task_id'] is None): + raise ValueError("Missing the required parameter `task_id` when calling `post_tasks_id_runs_id_retry`") # noqa: E501 + # verify the required parameter 'run_id' is set + if ('run_id' not in local_var_params or + local_var_params['run_id'] is None): + raise ValueError("Missing the required parameter `run_id` when calling `post_tasks_id_runs_id_retry`") # noqa: E501 + + path_params = {} + if 'task_id' in local_var_params: + path_params['taskID'] = local_var_params['task_id'] # noqa: E501 + if 'run_id' in local_var_params: + path_params['runID'] = local_var_params['run_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json; charset=utf-8']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/telegraf_plugins_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/telegraf_plugins_service.py new file mode 100644 index 0000000..905fdd3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/telegraf_plugins_service.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class TelegrafPluginsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """TelegrafPluginsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_telegraf_plugins(self, **kwargs): # noqa: E501,D401,D403 + """List all Telegraf plugins. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegraf_plugins(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str type: The type of plugin desired. + :return: TelegrafPlugins + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_telegraf_plugins_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_telegraf_plugins_with_http_info(**kwargs) # noqa: E501 + return data + + def get_telegraf_plugins_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List all Telegraf plugins. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegraf_plugins_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str type: The type of plugin desired. + :return: TelegrafPlugins + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegraf_plugins_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegraf/plugins', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='TelegrafPlugins', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_telegraf_plugins_async(self, **kwargs): # noqa: E501,D401,D403 + """List all Telegraf plugins. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str type: The type of plugin desired. + :return: TelegrafPlugins + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegraf_plugins_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegraf/plugins', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='TelegrafPlugins', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_telegraf_plugins_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'type'] # noqa: E501 + self._check_operation_params('get_telegraf_plugins', all_params, local_var_params) + + path_params = {} + + query_params = [] + if 'type' in local_var_params: + query_params.append(('type', local_var_params['type'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/telegrafs_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/telegrafs_service.py new file mode 100644 index 0000000..4c3bfe9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/telegrafs_service.py @@ -0,0 +1,1738 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class TelegrafsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """TelegrafsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_telegrafs_id(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Delete a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_telegrafs_id(telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_telegrafs_id_with_http_info(telegraf_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_telegrafs_id_with_http_info(telegraf_id, **kwargs) # noqa: E501 + return data + + def delete_telegrafs_id_with_http_info(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Delete a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_telegrafs_id_with_http_info(telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_telegrafs_id_prepare(telegraf_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_telegrafs_id_async(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Delete a Telegraf configuration. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_telegrafs_id_prepare(telegraf_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_telegrafs_id_prepare(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['telegraf_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_telegrafs_id', all_params, local_var_params) + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `delete_telegrafs_id`") # noqa: E501 + + path_params = {} + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_telegrafs_id_labels_id(self, telegraf_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_telegrafs_id_labels_id(telegraf_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param str label_id: The label ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_telegrafs_id_labels_id_with_http_info(telegraf_id, label_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_telegrafs_id_labels_id_with_http_info(telegraf_id, label_id, **kwargs) # noqa: E501 + return data + + def delete_telegrafs_id_labels_id_with_http_info(self, telegraf_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_telegrafs_id_labels_id_with_http_info(telegraf_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param str label_id: The label ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_telegrafs_id_labels_id_prepare(telegraf_id, label_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_telegrafs_id_labels_id_async(self, telegraf_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a Telegraf config. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param str label_id: The label ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_telegrafs_id_labels_id_prepare(telegraf_id, label_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_telegrafs_id_labels_id_prepare(self, telegraf_id, label_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['telegraf_id', 'label_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_telegrafs_id_labels_id', all_params, local_var_params) + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `delete_telegrafs_id_labels_id`") # noqa: E501 + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `delete_telegrafs_id_labels_id`") # noqa: E501 + + path_params = {} + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_telegrafs_id_members_id(self, user_id, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_telegrafs_id_members_id(user_id, telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the member to remove. (required) + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_telegrafs_id_members_id_with_http_info(user_id, telegraf_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_telegrafs_id_members_id_with_http_info(user_id, telegraf_id, **kwargs) # noqa: E501 + return data + + def delete_telegrafs_id_members_id_with_http_info(self, user_id, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_telegrafs_id_members_id_with_http_info(user_id, telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the member to remove. (required) + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_telegrafs_id_members_id_prepare(user_id, telegraf_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_telegrafs_id_members_id_async(self, user_id, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Remove a member from a Telegraf config. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the member to remove. (required) + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_telegrafs_id_members_id_prepare(user_id, telegraf_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/members/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_telegrafs_id_members_id_prepare(self, user_id, telegraf_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'telegraf_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_telegrafs_id_members_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_telegrafs_id_members_id`") # noqa: E501 + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `delete_telegrafs_id_members_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_telegrafs_id_owners_id(self, user_id, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_telegrafs_id_owners_id(user_id, telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_telegrafs_id_owners_id_with_http_info(user_id, telegraf_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_telegrafs_id_owners_id_with_http_info(user_id, telegraf_id, **kwargs) # noqa: E501 + return data + + def delete_telegrafs_id_owners_id_with_http_info(self, user_id, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_telegrafs_id_owners_id_with_http_info(user_id, telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_telegrafs_id_owners_id_prepare(user_id, telegraf_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_telegrafs_id_owners_id_async(self, user_id, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Remove an owner from a Telegraf config. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the owner to remove. (required) + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_telegrafs_id_owners_id_prepare(user_id, telegraf_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/owners/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_telegrafs_id_owners_id_prepare(self, user_id, telegraf_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'telegraf_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_telegrafs_id_owners_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_telegrafs_id_owners_id`") # noqa: E501 + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `delete_telegrafs_id_owners_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_telegrafs(self, **kwargs): # noqa: E501,D401,D403 + """List all Telegraf configurations. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegrafs(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org_id: The organization ID the Telegraf config belongs to. + :return: Telegrafs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_telegrafs_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_telegrafs_with_http_info(**kwargs) # noqa: E501 + return data + + def get_telegrafs_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List all Telegraf configurations. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegrafs_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org_id: The organization ID the Telegraf config belongs to. + :return: Telegrafs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegrafs_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Telegrafs', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_telegrafs_async(self, **kwargs): # noqa: E501,D401,D403 + """List all Telegraf configurations. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org_id: The organization ID the Telegraf config belongs to. + :return: Telegrafs + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegrafs_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Telegrafs', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_telegrafs_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'org_id'] # noqa: E501 + self._check_operation_params('get_telegrafs', all_params, local_var_params) + + path_params = {} + + query_params = [] + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_telegrafs_id(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegrafs_id(telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str accept: + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_telegrafs_id_with_http_info(telegraf_id, **kwargs) # noqa: E501 + else: + (data) = self.get_telegrafs_id_with_http_info(telegraf_id, **kwargs) # noqa: E501 + return data + + def get_telegrafs_id_with_http_info(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegrafs_id_with_http_info(telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str accept: + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegrafs_id_prepare(telegraf_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='str', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_telegrafs_id_async(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a Telegraf configuration. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param str zap_trace_span: OpenTracing span context + :param str accept: + :return: str + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegrafs_id_prepare(telegraf_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='str', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_telegrafs_id_prepare(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['telegraf_id', 'zap_trace_span', 'accept'] # noqa: E501 + self._check_operation_params('get_telegrafs_id', all_params, local_var_params) + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `get_telegrafs_id`") # noqa: E501 + + path_params = {} + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'accept' in local_var_params: + header_params['Accept'] = local_var_params['accept'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/toml', 'application/json', 'application/octet-stream']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_telegrafs_id_labels(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegrafs_id_labels(telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_telegrafs_id_labels_with_http_info(telegraf_id, **kwargs) # noqa: E501 + else: + (data) = self.get_telegrafs_id_labels_with_http_info(telegraf_id, **kwargs) # noqa: E501 + return data + + def get_telegrafs_id_labels_with_http_info(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegrafs_id_labels_with_http_info(telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegrafs_id_labels_prepare(telegraf_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_telegrafs_id_labels_async(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a Telegraf config. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegrafs_id_labels_prepare(telegraf_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_telegrafs_id_labels_prepare(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['telegraf_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_telegrafs_id_labels', all_params, local_var_params) + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `get_telegrafs_id_labels`") # noqa: E501 + + path_params = {} + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_telegrafs_id_members(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """List all users with member privileges for a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegrafs_id_members(telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_telegrafs_id_members_with_http_info(telegraf_id, **kwargs) # noqa: E501 + else: + (data) = self.get_telegrafs_id_members_with_http_info(telegraf_id, **kwargs) # noqa: E501 + return data + + def get_telegrafs_id_members_with_http_info(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """List all users with member privileges for a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegrafs_id_members_with_http_info(telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegrafs_id_members_prepare(telegraf_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_telegrafs_id_members_async(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """List all users with member privileges for a Telegraf config. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMembers + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegrafs_id_members_prepare(telegraf_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/members', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMembers', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_telegrafs_id_members_prepare(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['telegraf_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_telegrafs_id_members', all_params, local_var_params) + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `get_telegrafs_id_members`") # noqa: E501 + + path_params = {} + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_telegrafs_id_owners(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegrafs_id_owners(telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_telegrafs_id_owners_with_http_info(telegraf_id, **kwargs) # noqa: E501 + else: + (data) = self.get_telegrafs_id_owners_with_http_info(telegraf_id, **kwargs) # noqa: E501 + return data + + def get_telegrafs_id_owners_with_http_info(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_telegrafs_id_owners_with_http_info(telegraf_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegrafs_id_owners_prepare(telegraf_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_telegrafs_id_owners_async(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + """List all owners of a Telegraf configuration. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwners + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_telegrafs_id_owners_prepare(telegraf_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/owners', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwners', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_telegrafs_id_owners_prepare(self, telegraf_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['telegraf_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_telegrafs_id_owners', all_params, local_var_params) + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `get_telegrafs_id_owners`") # noqa: E501 + + path_params = {} + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_telegrafs(self, telegraf_plugin_request, **kwargs): # noqa: E501,D401,D403 + """Create a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_telegrafs(telegraf_plugin_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param TelegrafPluginRequest telegraf_plugin_request: Telegraf configuration to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Telegraf + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_telegrafs_with_http_info(telegraf_plugin_request, **kwargs) # noqa: E501 + else: + (data) = self.post_telegrafs_with_http_info(telegraf_plugin_request, **kwargs) # noqa: E501 + return data + + def post_telegrafs_with_http_info(self, telegraf_plugin_request, **kwargs): # noqa: E501,D401,D403 + """Create a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_telegrafs_with_http_info(telegraf_plugin_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param TelegrafPluginRequest telegraf_plugin_request: Telegraf configuration to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Telegraf + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_telegrafs_prepare(telegraf_plugin_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Telegraf', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_telegrafs_async(self, telegraf_plugin_request, **kwargs): # noqa: E501,D401,D403 + """Create a Telegraf configuration. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param TelegrafPluginRequest telegraf_plugin_request: Telegraf configuration to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Telegraf + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_telegrafs_prepare(telegraf_plugin_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Telegraf', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_telegrafs_prepare(self, telegraf_plugin_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['telegraf_plugin_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_telegrafs', all_params, local_var_params) + # verify the required parameter 'telegraf_plugin_request' is set + if ('telegraf_plugin_request' not in local_var_params or + local_var_params['telegraf_plugin_request'] is None): + raise ValueError("Missing the required parameter `telegraf_plugin_request` when calling `post_telegrafs`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'telegraf_plugin_request' in local_var_params: + body_params = local_var_params['telegraf_plugin_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_telegrafs_id_labels(self, telegraf_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_telegrafs_id_labels(telegraf_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_telegrafs_id_labels_with_http_info(telegraf_id, label_mapping, **kwargs) # noqa: E501 + else: + (data) = self.post_telegrafs_id_labels_with_http_info(telegraf_id, label_mapping, **kwargs) # noqa: E501 + return data + + def post_telegrafs_id_labels_with_http_info(self, telegraf_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_telegrafs_id_labels_with_http_info(telegraf_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_telegrafs_id_labels_prepare(telegraf_id, label_mapping, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_telegrafs_id_labels_async(self, telegraf_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a Telegraf config. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_telegrafs_id_labels_prepare(telegraf_id, label_mapping, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_telegrafs_id_labels_prepare(self, telegraf_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['telegraf_id', 'label_mapping', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_telegrafs_id_labels', all_params, local_var_params) + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `post_telegrafs_id_labels`") # noqa: E501 + # verify the required parameter 'label_mapping' is set + if ('label_mapping' not in local_var_params or + local_var_params['label_mapping'] is None): + raise ValueError("Missing the required parameter `label_mapping` when calling `post_telegrafs_id_labels`") # noqa: E501 + + path_params = {} + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'label_mapping' in local_var_params: + body_params = local_var_params['label_mapping'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_telegrafs_id_members(self, telegraf_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_telegrafs_id_members(telegraf_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_telegrafs_id_members_with_http_info(telegraf_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_telegrafs_id_members_with_http_info(telegraf_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_telegrafs_id_members_with_http_info(self, telegraf_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a Telegraf config. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_telegrafs_id_members_with_http_info(telegraf_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_telegrafs_id_members_prepare(telegraf_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_telegrafs_id_members_async(self, telegraf_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add a member to a Telegraf config. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceMember + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_telegrafs_id_members_prepare(telegraf_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/members', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceMember', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_telegrafs_id_members_prepare(self, telegraf_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['telegraf_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_telegrafs_id_members', all_params, local_var_params) + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `post_telegrafs_id_members`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_telegrafs_id_members`") # noqa: E501 + + path_params = {} + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_telegrafs_id_owners(self, telegraf_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_telegrafs_id_owners(telegraf_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_telegrafs_id_owners_with_http_info(telegraf_id, add_resource_member_request_body, **kwargs) # noqa: E501 + else: + (data) = self.post_telegrafs_id_owners_with_http_info(telegraf_id, add_resource_member_request_body, **kwargs) # noqa: E501 + return data + + def post_telegrafs_id_owners_with_http_info(self, telegraf_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_telegrafs_id_owners_with_http_info(telegraf_id, add_resource_member_request_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_telegrafs_id_owners_prepare(telegraf_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_telegrafs_id_owners_async(self, telegraf_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + """Add an owner to a Telegraf configuration. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str telegraf_id: The Telegraf configuration ID. (required) + :param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required) + :param str zap_trace_span: OpenTracing span context + :return: ResourceOwner + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_telegrafs_id_owners_prepare(telegraf_id, add_resource_member_request_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}/owners', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ResourceOwner', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_telegrafs_id_owners_prepare(self, telegraf_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['telegraf_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_telegrafs_id_owners', all_params, local_var_params) + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `post_telegrafs_id_owners`") # noqa: E501 + # verify the required parameter 'add_resource_member_request_body' is set + if ('add_resource_member_request_body' not in local_var_params or + local_var_params['add_resource_member_request_body'] is None): + raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_telegrafs_id_owners`") # noqa: E501 + + path_params = {} + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'add_resource_member_request_body' in local_var_params: + body_params = local_var_params['add_resource_member_request_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def put_telegrafs_id(self, telegraf_id, telegraf_plugin_request, **kwargs): # noqa: E501,D401,D403 + """Update a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_telegrafs_id(telegraf_id, telegraf_plugin_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param TelegrafPluginRequest telegraf_plugin_request: Telegraf configuration update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Telegraf + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.put_telegrafs_id_with_http_info(telegraf_id, telegraf_plugin_request, **kwargs) # noqa: E501 + else: + (data) = self.put_telegrafs_id_with_http_info(telegraf_id, telegraf_plugin_request, **kwargs) # noqa: E501 + return data + + def put_telegrafs_id_with_http_info(self, telegraf_id, telegraf_plugin_request, **kwargs): # noqa: E501,D401,D403 + """Update a Telegraf configuration. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_telegrafs_id_with_http_info(telegraf_id, telegraf_plugin_request, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param TelegrafPluginRequest telegraf_plugin_request: Telegraf configuration update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Telegraf + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_telegrafs_id_prepare(telegraf_id, telegraf_plugin_request, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Telegraf', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def put_telegrafs_id_async(self, telegraf_id, telegraf_plugin_request, **kwargs): # noqa: E501,D401,D403 + """Update a Telegraf configuration. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str telegraf_id: The Telegraf config ID. (required) + :param TelegrafPluginRequest telegraf_plugin_request: Telegraf configuration update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Telegraf + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_telegrafs_id_prepare(telegraf_id, telegraf_plugin_request, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/telegrafs/{telegrafID}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Telegraf', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _put_telegrafs_id_prepare(self, telegraf_id, telegraf_plugin_request, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['telegraf_id', 'telegraf_plugin_request', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('put_telegrafs_id', all_params, local_var_params) + # verify the required parameter 'telegraf_id' is set + if ('telegraf_id' not in local_var_params or + local_var_params['telegraf_id'] is None): + raise ValueError("Missing the required parameter `telegraf_id` when calling `put_telegrafs_id`") # noqa: E501 + # verify the required parameter 'telegraf_plugin_request' is set + if ('telegraf_plugin_request' not in local_var_params or + local_var_params['telegraf_plugin_request'] is None): + raise ValueError("Missing the required parameter `telegraf_plugin_request` when calling `put_telegrafs_id`") # noqa: E501 + + path_params = {} + if 'telegraf_id' in local_var_params: + path_params['telegrafID'] = local_var_params['telegraf_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'telegraf_plugin_request' in local_var_params: + body_params = local_var_params['telegraf_plugin_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/templates_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/templates_service.py new file mode 100644 index 0000000..92780a0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/templates_service.py @@ -0,0 +1,959 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class TemplatesService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """TemplatesService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def apply_template(self, template_apply, **kwargs): # noqa: E501,D401,D403 + """Apply or dry-run a template. + + Applies a template to create or update a [stack](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/) of InfluxDB [resources](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/export/all/#resources). The response contains the diff of changes and the stack ID. Use this endpoint to install an InfluxDB template to an organization. Provide template URLs or template objects in your request. To customize which template resources are installed, use the `actions` parameter. By default, when you apply a template, InfluxDB installs the template to create and update stack resources and then generates a diff of the changes. If you pass `dryRun: true` in the request body, InfluxDB validates the template and generates the resource diff, but doesn’t make any changes to your instance. #### Custom values for templates - Some templates may contain [environment references](https://docs.influxdata.com/influxdb/latest/influxdb-templates/create/#include-user-definable-resource-names) for custom metadata. To provide custom values for environment references, pass the _`envRefs`_ property in the request body. For more information and examples, see how to [define environment references](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#define-environment-references). - Some templates may contain queries that use [secrets](https://docs.influxdata.com/influxdb/latest/security/secrets/). To provide custom secret values, pass the _`secrets`_ property in the request body. Don't expose secret values in templates. For more information, see [how to pass secrets when installing a template](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#pass-secrets-when-installing-a-template). #### Required permissions - `write` permissions for resource types in the template. #### Rate limits (with InfluxDB Cloud) - Adjustable service quotas apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Use templates](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/) - [Stacks](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.apply_template(template_apply, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param TemplateApply template_apply: Parameters for applying templates. (required) + :return: TemplateSummary + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.apply_template_with_http_info(template_apply, **kwargs) # noqa: E501 + else: + (data) = self.apply_template_with_http_info(template_apply, **kwargs) # noqa: E501 + return data + + def apply_template_with_http_info(self, template_apply, **kwargs): # noqa: E501,D401,D403 + """Apply or dry-run a template. + + Applies a template to create or update a [stack](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/) of InfluxDB [resources](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/export/all/#resources). The response contains the diff of changes and the stack ID. Use this endpoint to install an InfluxDB template to an organization. Provide template URLs or template objects in your request. To customize which template resources are installed, use the `actions` parameter. By default, when you apply a template, InfluxDB installs the template to create and update stack resources and then generates a diff of the changes. If you pass `dryRun: true` in the request body, InfluxDB validates the template and generates the resource diff, but doesn’t make any changes to your instance. #### Custom values for templates - Some templates may contain [environment references](https://docs.influxdata.com/influxdb/latest/influxdb-templates/create/#include-user-definable-resource-names) for custom metadata. To provide custom values for environment references, pass the _`envRefs`_ property in the request body. For more information and examples, see how to [define environment references](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#define-environment-references). - Some templates may contain queries that use [secrets](https://docs.influxdata.com/influxdb/latest/security/secrets/). To provide custom secret values, pass the _`secrets`_ property in the request body. Don't expose secret values in templates. For more information, see [how to pass secrets when installing a template](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#pass-secrets-when-installing-a-template). #### Required permissions - `write` permissions for resource types in the template. #### Rate limits (with InfluxDB Cloud) - Adjustable service quotas apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Use templates](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/) - [Stacks](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.apply_template_with_http_info(template_apply, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param TemplateApply template_apply: Parameters for applying templates. (required) + :return: TemplateSummary + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._apply_template_prepare(template_apply, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/templates/apply', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='TemplateSummary', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def apply_template_async(self, template_apply, **kwargs): # noqa: E501,D401,D403 + """Apply or dry-run a template. + + Applies a template to create or update a [stack](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/) of InfluxDB [resources](https://docs.influxdata.com/influxdb/latest/reference/cli/influx/export/all/#resources). The response contains the diff of changes and the stack ID. Use this endpoint to install an InfluxDB template to an organization. Provide template URLs or template objects in your request. To customize which template resources are installed, use the `actions` parameter. By default, when you apply a template, InfluxDB installs the template to create and update stack resources and then generates a diff of the changes. If you pass `dryRun: true` in the request body, InfluxDB validates the template and generates the resource diff, but doesn’t make any changes to your instance. #### Custom values for templates - Some templates may contain [environment references](https://docs.influxdata.com/influxdb/latest/influxdb-templates/create/#include-user-definable-resource-names) for custom metadata. To provide custom values for environment references, pass the _`envRefs`_ property in the request body. For more information and examples, see how to [define environment references](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#define-environment-references). - Some templates may contain queries that use [secrets](https://docs.influxdata.com/influxdb/latest/security/secrets/). To provide custom secret values, pass the _`secrets`_ property in the request body. Don't expose secret values in templates. For more information, see [how to pass secrets when installing a template](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#pass-secrets-when-installing-a-template). #### Required permissions - `write` permissions for resource types in the template. #### Rate limits (with InfluxDB Cloud) - Adjustable service quotas apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Use templates](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/) - [Stacks](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param TemplateApply template_apply: Parameters for applying templates. (required) + :return: TemplateSummary + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._apply_template_prepare(template_apply, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/templates/apply', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='TemplateSummary', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _apply_template_prepare(self, template_apply, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['template_apply'] # noqa: E501 + self._check_operation_params('apply_template', all_params, local_var_params) + # verify the required parameter 'template_apply' is set + if ('template_apply' not in local_var_params or + local_var_params['template_apply'] is None): + raise ValueError("Missing the required parameter `template_apply` when calling `apply_template`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + + body_params = None + if 'template_apply' in local_var_params: + body_params = local_var_params['template_apply'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json', 'application/x-jsonnet', 'text/yml']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def create_stack(self, **kwargs): # noqa: E501,D401,D403 + """Create a stack. + + Creates or initializes a stack. Use this endpoint to _manually_ initialize a new stack with the following optional information: - Stack name - Stack description - URLs for template manifest files To automatically create a stack when applying templates, use the [/api/v2/templates/apply endpoint](#operation/ApplyTemplate). #### Required permissions - `write` permission for the organization #### Related guides - [Initialize an InfluxDB stack](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/init/). - [Use InfluxDB templates](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#apply-templates-to-an-influxdb-instance). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.create_stack(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostStackRequest post_stack_request: + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.create_stack_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.create_stack_with_http_info(**kwargs) # noqa: E501 + return data + + def create_stack_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Create a stack. + + Creates or initializes a stack. Use this endpoint to _manually_ initialize a new stack with the following optional information: - Stack name - Stack description - URLs for template manifest files To automatically create a stack when applying templates, use the [/api/v2/templates/apply endpoint](#operation/ApplyTemplate). #### Required permissions - `write` permission for the organization #### Related guides - [Initialize an InfluxDB stack](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/init/). - [Use InfluxDB templates](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#apply-templates-to-an-influxdb-instance). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.create_stack_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PostStackRequest post_stack_request: + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._create_stack_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/stacks', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Stack', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def create_stack_async(self, **kwargs): # noqa: E501,D401,D403 + """Create a stack. + + Creates or initializes a stack. Use this endpoint to _manually_ initialize a new stack with the following optional information: - Stack name - Stack description - URLs for template manifest files To automatically create a stack when applying templates, use the [/api/v2/templates/apply endpoint](#operation/ApplyTemplate). #### Required permissions - `write` permission for the organization #### Related guides - [Initialize an InfluxDB stack](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/init/). - [Use InfluxDB templates](https://docs.influxdata.com/influxdb/latest/influxdb-templates/use/#apply-templates-to-an-influxdb-instance). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param PostStackRequest post_stack_request: + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._create_stack_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/stacks', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Stack', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _create_stack_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['post_stack_request'] # noqa: E501 + self._check_operation_params('create_stack', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + + body_params = None + if 'post_stack_request' in local_var_params: + body_params = local_var_params['post_stack_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_stack(self, stack_id, org_id, **kwargs): # noqa: E501,D401,D403 + """Delete a stack and associated resources. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_stack(stack_id, org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :param str org_id: The identifier of the organization. (required) + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_stack_with_http_info(stack_id, org_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_stack_with_http_info(stack_id, org_id, **kwargs) # noqa: E501 + return data + + def delete_stack_with_http_info(self, stack_id, org_id, **kwargs): # noqa: E501,D401,D403 + """Delete a stack and associated resources. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_stack_with_http_info(stack_id, org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :param str org_id: The identifier of the organization. (required) + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_stack_prepare(stack_id, org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/stacks/{stack_id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_stack_async(self, stack_id, org_id, **kwargs): # noqa: E501,D401,D403 + """Delete a stack and associated resources. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :param str org_id: The identifier of the organization. (required) + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_stack_prepare(stack_id, org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/stacks/{stack_id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_stack_prepare(self, stack_id, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['stack_id', 'org_id'] # noqa: E501 + self._check_operation_params('delete_stack', all_params, local_var_params) + # verify the required parameter 'stack_id' is set + if ('stack_id' not in local_var_params or + local_var_params['stack_id'] is None): + raise ValueError("Missing the required parameter `stack_id` when calling `delete_stack`") # noqa: E501 + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `delete_stack`") # noqa: E501 + + path_params = {} + if 'stack_id' in local_var_params: + path_params['stack_id'] = local_var_params['stack_id'] # noqa: E501 + + query_params = [] + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + + header_params = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def export_template(self, **kwargs): # noqa: E501,D401,D403 + """Export a new template. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.export_template(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param TemplateExportByID template_export_by_id: Export resources as an InfluxDB template. + :return: list[object] + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.export_template_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.export_template_with_http_info(**kwargs) # noqa: E501 + return data + + def export_template_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Export a new template. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.export_template_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param TemplateExportByID template_export_by_id: Export resources as an InfluxDB template. + :return: list[object] + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._export_template_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/templates/export', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='list[object]', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def export_template_async(self, **kwargs): # noqa: E501,D401,D403 + """Export a new template. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param TemplateExportByID template_export_by_id: Export resources as an InfluxDB template. + :return: list[object] + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._export_template_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/templates/export', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='list[object]', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _export_template_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['template_export_by_id'] # noqa: E501 + self._check_operation_params('export_template', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + + body_params = None + if 'template_export_by_id' in local_var_params: + body_params = local_var_params['template_export_by_id'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json', 'application/x-yaml']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def list_stacks(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List installed stacks. + + Lists installed InfluxDB stacks. To limit stacks in the response, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all installed stacks for the organization. #### Related guides - [View InfluxDB stacks](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_stacks(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: An organization ID. Only returns stacks owned by the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). #### InfluxDB Cloud - Doesn't require this parameter; InfluxDB only returns resources allowed by the API token. (required) + :param str name: A stack name. Finds stack `events` with this name and returns the stacks. Repeatable. To filter for more than one stack name, repeat this parameter with each name--for example: - `INFLUX_URL/api/v2/stacks?&orgID=INFLUX_ORG_ID&name=project-stack-0&name=project-stack-1` + :param str stack_id: A stack ID. Only returns the specified stack. Repeatable. To filter for more than one stack ID, repeat this parameter with each ID--for example: - `INFLUX_URL/api/v2/stacks?&orgID=INFLUX_ORG_ID&stackID=09bd87cd33be3000&stackID=09bef35081fe3000` + :return: ListStacksResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_stacks_with_http_info(org_id, **kwargs) # noqa: E501 + else: + (data) = self.list_stacks_with_http_info(org_id, **kwargs) # noqa: E501 + return data + + def list_stacks_with_http_info(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List installed stacks. + + Lists installed InfluxDB stacks. To limit stacks in the response, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all installed stacks for the organization. #### Related guides - [View InfluxDB stacks](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_stacks_with_http_info(org_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org_id: An organization ID. Only returns stacks owned by the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). #### InfluxDB Cloud - Doesn't require this parameter; InfluxDB only returns resources allowed by the API token. (required) + :param str name: A stack name. Finds stack `events` with this name and returns the stacks. Repeatable. To filter for more than one stack name, repeat this parameter with each name--for example: - `INFLUX_URL/api/v2/stacks?&orgID=INFLUX_ORG_ID&name=project-stack-0&name=project-stack-1` + :param str stack_id: A stack ID. Only returns the specified stack. Repeatable. To filter for more than one stack ID, repeat this parameter with each ID--for example: - `INFLUX_URL/api/v2/stacks?&orgID=INFLUX_ORG_ID&stackID=09bd87cd33be3000&stackID=09bef35081fe3000` + :return: ListStacksResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._list_stacks_prepare(org_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/stacks', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ListStacksResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def list_stacks_async(self, org_id, **kwargs): # noqa: E501,D401,D403 + """List installed stacks. + + Lists installed InfluxDB stacks. To limit stacks in the response, pass query parameters in your request. If no query parameters are passed, InfluxDB returns all installed stacks for the organization. #### Related guides - [View InfluxDB stacks](https://docs.influxdata.com/influxdb/latest/influxdb-templates/stacks/). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org_id: An organization ID. Only returns stacks owned by the specified [organization](https://docs.influxdata.com/influxdb/latest/reference/glossary/#organization). #### InfluxDB Cloud - Doesn't require this parameter; InfluxDB only returns resources allowed by the API token. (required) + :param str name: A stack name. Finds stack `events` with this name and returns the stacks. Repeatable. To filter for more than one stack name, repeat this parameter with each name--for example: - `INFLUX_URL/api/v2/stacks?&orgID=INFLUX_ORG_ID&name=project-stack-0&name=project-stack-1` + :param str stack_id: A stack ID. Only returns the specified stack. Repeatable. To filter for more than one stack ID, repeat this parameter with each ID--for example: - `INFLUX_URL/api/v2/stacks?&orgID=INFLUX_ORG_ID&stackID=09bd87cd33be3000&stackID=09bef35081fe3000` + :return: ListStacksResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._list_stacks_prepare(org_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/stacks', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='ListStacksResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _list_stacks_prepare(self, org_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org_id', 'name', 'stack_id'] # noqa: E501 + self._check_operation_params('list_stacks', all_params, local_var_params) + # verify the required parameter 'org_id' is set + if ('org_id' not in local_var_params or + local_var_params['org_id'] is None): + raise ValueError("Missing the required parameter `org_id` when calling `list_stacks`") # noqa: E501 + + path_params = {} + + query_params = [] + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'name' in local_var_params: + query_params.append(('name', local_var_params['name'])) # noqa: E501 + if 'stack_id' in local_var_params: + query_params.append(('stackID', local_var_params['stack_id'])) # noqa: E501 + + header_params = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def read_stack(self, stack_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a stack. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.read_stack(stack_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.read_stack_with_http_info(stack_id, **kwargs) # noqa: E501 + else: + (data) = self.read_stack_with_http_info(stack_id, **kwargs) # noqa: E501 + return data + + def read_stack_with_http_info(self, stack_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a stack. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.read_stack_with_http_info(stack_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._read_stack_prepare(stack_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/stacks/{stack_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Stack', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def read_stack_async(self, stack_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a stack. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._read_stack_prepare(stack_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/stacks/{stack_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Stack', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _read_stack_prepare(self, stack_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['stack_id'] # noqa: E501 + self._check_operation_params('read_stack', all_params, local_var_params) + # verify the required parameter 'stack_id' is set + if ('stack_id' not in local_var_params or + local_var_params['stack_id'] is None): + raise ValueError("Missing the required parameter `stack_id` when calling `read_stack`") # noqa: E501 + + path_params = {} + if 'stack_id' in local_var_params: + path_params['stack_id'] = local_var_params['stack_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def uninstall_stack(self, stack_id, **kwargs): # noqa: E501,D401,D403 + """Uninstall a stack. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.uninstall_stack(stack_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.uninstall_stack_with_http_info(stack_id, **kwargs) # noqa: E501 + else: + (data) = self.uninstall_stack_with_http_info(stack_id, **kwargs) # noqa: E501 + return data + + def uninstall_stack_with_http_info(self, stack_id, **kwargs): # noqa: E501,D401,D403 + """Uninstall a stack. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.uninstall_stack_with_http_info(stack_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._uninstall_stack_prepare(stack_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/stacks/{stack_id}/uninstall', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Stack', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def uninstall_stack_async(self, stack_id, **kwargs): # noqa: E501,D401,D403 + """Uninstall a stack. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._uninstall_stack_prepare(stack_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/stacks/{stack_id}/uninstall', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Stack', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _uninstall_stack_prepare(self, stack_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['stack_id'] # noqa: E501 + self._check_operation_params('uninstall_stack', all_params, local_var_params) + # verify the required parameter 'stack_id' is set + if ('stack_id' not in local_var_params or + local_var_params['stack_id'] is None): + raise ValueError("Missing the required parameter `stack_id` when calling `uninstall_stack`") # noqa: E501 + + path_params = {} + if 'stack_id' in local_var_params: + path_params['stack_id'] = local_var_params['stack_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def update_stack(self, stack_id, **kwargs): # noqa: E501,D401,D403 + """Update a stack. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.update_stack(stack_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :param PatchStackRequest patch_stack_request: + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.update_stack_with_http_info(stack_id, **kwargs) # noqa: E501 + else: + (data) = self.update_stack_with_http_info(stack_id, **kwargs) # noqa: E501 + return data + + def update_stack_with_http_info(self, stack_id, **kwargs): # noqa: E501,D401,D403 + """Update a stack. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.update_stack_with_http_info(stack_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :param PatchStackRequest patch_stack_request: + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._update_stack_prepare(stack_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/stacks/{stack_id}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Stack', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def update_stack_async(self, stack_id, **kwargs): # noqa: E501,D401,D403 + """Update a stack. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str stack_id: The identifier of the stack. (required) + :param PatchStackRequest patch_stack_request: + :return: Stack + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._update_stack_prepare(stack_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/stacks/{stack_id}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Stack', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _update_stack_prepare(self, stack_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['stack_id', 'patch_stack_request'] # noqa: E501 + self._check_operation_params('update_stack', all_params, local_var_params) + # verify the required parameter 'stack_id' is set + if ('stack_id' not in local_var_params or + local_var_params['stack_id'] is None): + raise ValueError("Missing the required parameter `stack_id` when calling `update_stack`") # noqa: E501 + + path_params = {} + if 'stack_id' in local_var_params: + path_params['stack_id'] = local_var_params['stack_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + body_params = None + if 'patch_stack_request' in local_var_params: + body_params = local_var_params['patch_stack_request'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/users_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/users_service.py new file mode 100644 index 0000000..b532324 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/users_service.py @@ -0,0 +1,1168 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class UsersService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """UsersService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_users_id(self, user_id, **kwargs): # noqa: E501,D401,D403 + """Delete a user. + + Deletes a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). #### Required permissions | Action | Permission required | |:------------|:-----------------------------------------------| | Delete a user | `write-users` or `write-user USER_ID` | *`USER_ID`* is the ID of the user that you want to delete. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/organizations/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_users_id(user_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: A user ID. Specifies the [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_users_id_with_http_info(user_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_users_id_with_http_info(user_id, **kwargs) # noqa: E501 + return data + + def delete_users_id_with_http_info(self, user_id, **kwargs): # noqa: E501,D401,D403 + """Delete a user. + + Deletes a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). #### Required permissions | Action | Permission required | |:------------|:-----------------------------------------------| | Delete a user | `write-users` or `write-user USER_ID` | *`USER_ID`* is the ID of the user that you want to delete. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/organizations/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_users_id_with_http_info(user_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: A user ID. Specifies the [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_users_id_prepare(user_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/users/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_users_id_async(self, user_id, **kwargs): # noqa: E501,D401,D403 + """Delete a user. + + Deletes a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). #### Required permissions | Action | Permission required | |:------------|:-----------------------------------------------| | Delete a user | `write-users` or `write-user USER_ID` | *`USER_ID`* is the ID of the user that you want to delete. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/organizations/users/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: A user ID. Specifies the [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_users_id_prepare(user_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/users/{userID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_users_id_prepare(self, user_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_users_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `delete_users_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_me(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve the currently authenticated user. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_me(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_me_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_me_with_http_info(**kwargs) # noqa: E501 + return data + + def get_me_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve the currently authenticated user. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_me_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_me_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/me', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='UserResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_me_async(self, **kwargs): # noqa: E501,D401,D403 + """Retrieve the currently authenticated user. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_me_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/me', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='UserResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_me_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span'] # noqa: E501 + self._check_operation_params('get_me', all_params, local_var_params) + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_users(self, **kwargs): # noqa: E501,D401,D403 + """List users. + + Lists [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). Default limit is `20`. To limit which users are returned, pass query parameters in your request. #### Required permissions for InfluxDB OSS | Action | Permission required | Restriction | |:-------|:--------------------|:------------| | List all users | _[Operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_ | | | List a specific user | `read-users` or `read-user USER_ID` | | *`USER_ID`* is the ID of the user that you want to retrieve. #### Related guides - [View users](https://docs.influxdata.com/influxdb/latest/users/view-users/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_users(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param str after: A resource ID to seek from. Returns records created after the specified record; results don't include the specified record. Use `after` instead of the `offset` parameter. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param str name: A user name. Only lists the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :param str id: A user ID. Only lists the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :return: Users + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_users_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_users_with_http_info(**kwargs) # noqa: E501 + return data + + def get_users_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List users. + + Lists [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). Default limit is `20`. To limit which users are returned, pass query parameters in your request. #### Required permissions for InfluxDB OSS | Action | Permission required | Restriction | |:-------|:--------------------|:------------| | List all users | _[Operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_ | | | List a specific user | `read-users` or `read-user USER_ID` | | *`USER_ID`* is the ID of the user that you want to retrieve. #### Related guides - [View users](https://docs.influxdata.com/influxdb/latest/users/view-users/). + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_users_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param str after: A resource ID to seek from. Returns records created after the specified record; results don't include the specified record. Use `after` instead of the `offset` parameter. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param str name: A user name. Only lists the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :param str id: A user ID. Only lists the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :return: Users + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_users_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/users', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Users', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_users_async(self, **kwargs): # noqa: E501,D401,D403 + """List users. + + Lists [users](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). Default limit is `20`. To limit which users are returned, pass query parameters in your request. #### Required permissions for InfluxDB OSS | Action | Permission required | Restriction | |:-------|:--------------------|:------------| | List all users | _[Operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_ | | | List a specific user | `read-users` or `read-user USER_ID` | | *`USER_ID`* is the ID of the user that you want to retrieve. #### Related guides - [View users](https://docs.influxdata.com/influxdb/latest/users/view-users/). + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param int offset: The offset for pagination. The number of records to skip. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param int limit: Limits the number of records returned. Default is `20`. + :param str after: A resource ID to seek from. Returns records created after the specified record; results don't include the specified record. Use `after` instead of the `offset` parameter. For more information about pagination parameters, see [Pagination](https://docs.influxdata.com/influxdb/latest/api/#tag/Pagination). + :param str name: A user name. Only lists the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :param str id: A user ID. Only lists the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). + :return: Users + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_users_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/users', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Users', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_users_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'offset', 'limit', 'after', 'name', 'id'] # noqa: E501 + self._check_operation_params('get_users', all_params, local_var_params) + + if 'offset' in local_var_params and local_var_params['offset'] < 0: # noqa: E501 + raise ValueError("Invalid value for parameter `offset` when calling `get_users`, must be a value greater than or equal to `0`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] > 100: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_users`, must be a value less than or equal to `100`") # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501 + raise ValueError("Invalid value for parameter `limit` when calling `get_users`, must be a value greater than or equal to `1`") # noqa: E501 + path_params = {} + + query_params = [] + if 'offset' in local_var_params: + query_params.append(('offset', local_var_params['offset'])) # noqa: E501 + if 'limit' in local_var_params: + query_params.append(('limit', local_var_params['limit'])) # noqa: E501 + if 'after' in local_var_params: + query_params.append(('after', local_var_params['after'])) # noqa: E501 + if 'name' in local_var_params: + query_params.append(('name', local_var_params['name'])) # noqa: E501 + if 'id' in local_var_params: + query_params.append(('id', local_var_params['id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_users_id(self, user_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a user. + + Retrieves a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/organizations/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_users_id(user_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: A user ID. Retrieves the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). (required) + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_users_id_with_http_info(user_id, **kwargs) # noqa: E501 + else: + (data) = self.get_users_id_with_http_info(user_id, **kwargs) # noqa: E501 + return data + + def get_users_id_with_http_info(self, user_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a user. + + Retrieves a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/organizations/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_users_id_with_http_info(user_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: A user ID. Retrieves the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). (required) + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_users_id_prepare(user_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/users/{userID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='UserResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_users_id_async(self, user_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a user. + + Retrieves a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/organizations/users/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: A user ID. Retrieves the specified [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). (required) + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_users_id_prepare(user_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/users/{userID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='UserResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_users_id_prepare(self, user_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_users_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `get_users_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_users_id(self, user_id, user, **kwargs): # noqa: E501,D401,D403 + """Update a user. + + Updates a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) and returns the user. #### Required permissions | Action | Permission required | |:------------|:-----------------------------------------------| | Update a user | `write-users` or `write-user USER_ID` | *`USER_ID`* is the ID of the user that you want to update. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/organizations/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_users_id(user_id, user, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: A user ID. Specifies the [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) to update. (required) + :param User user: In the request body, provide the user properties to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_users_id_with_http_info(user_id, user, **kwargs) # noqa: E501 + else: + (data) = self.patch_users_id_with_http_info(user_id, user, **kwargs) # noqa: E501 + return data + + def patch_users_id_with_http_info(self, user_id, user, **kwargs): # noqa: E501,D401,D403 + """Update a user. + + Updates a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) and returns the user. #### Required permissions | Action | Permission required | |:------------|:-----------------------------------------------| | Update a user | `write-users` or `write-user USER_ID` | *`USER_ID`* is the ID of the user that you want to update. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/organizations/users/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_users_id_with_http_info(user_id, user, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: A user ID. Specifies the [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) to update. (required) + :param User user: In the request body, provide the user properties to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_users_id_prepare(user_id, user, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/users/{userID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='UserResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_users_id_async(self, user_id, user, **kwargs): # noqa: E501,D401,D403 + """Update a user. + + Updates a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) and returns the user. #### Required permissions | Action | Permission required | |:------------|:-----------------------------------------------| | Update a user | `write-users` or `write-user USER_ID` | *`USER_ID`* is the ID of the user that you want to update. #### Related guides - [Manage users](https://docs.influxdata.com/influxdb/latest/organizations/users/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: A user ID. Specifies the [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) to update. (required) + :param User user: In the request body, provide the user properties to update. (required) + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_users_id_prepare(user_id, user, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/users/{userID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='UserResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_users_id_prepare(self, user_id, user, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'user', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_users_id', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `patch_users_id`") # noqa: E501 + # verify the required parameter 'user' is set + if ('user' not in local_var_params or + local_var_params['user'] is None): + raise ValueError("Missing the required parameter `user` when calling `patch_users_id`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'user' in local_var_params: + body_params = local_var_params['user'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_users(self, user, **kwargs): # noqa: E501,D401,D403 + """Create a user. + + Creates a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) that can access InfluxDB. Returns the user. Use this endpoint to create a user that can sign in to start a user session through one of the following interfaces: - InfluxDB UI - `/api/v2/signin` InfluxDB API endpoint - InfluxDB CLI This endpoint represents the first two steps in a four-step process to allow a user to authenticate with a username and password, and then access data in an organization: 1. Create a user: send a `POST` request to `POST /api/v2/users`. The `name` property is required. 2. Extract the user ID (`id` property) value from the API response for _step 1_. 3. Create an authorization (and API token) for the user: send a `POST` request to [`POST /api/v2/authorizations`](#operation/PostAuthorizations), passing the user ID (`id`) from _step 2_. 4. Create a password for the user: send a `POST` request to [`POST /api/v2/users/USER_ID/password`](#operation/PostUsersIDPassword), passing the user ID from _step 2_. #### Required permissions | Action | Permission required | Restriction | |:-------|:--------------------|:------------| | Create a user | _[Operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_ | | #### Related guides - [Create a user](https://docs.influxdata.com/influxdb/latest/users/create-user/) - [Create an API token scoped to a user](https://docs.influxdata.com/influxdb/latest/security/tokens/create-token/#create-a-token-scoped-to-a-user) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_users(user, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param User user: The user to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_users_with_http_info(user, **kwargs) # noqa: E501 + else: + (data) = self.post_users_with_http_info(user, **kwargs) # noqa: E501 + return data + + def post_users_with_http_info(self, user, **kwargs): # noqa: E501,D401,D403 + """Create a user. + + Creates a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) that can access InfluxDB. Returns the user. Use this endpoint to create a user that can sign in to start a user session through one of the following interfaces: - InfluxDB UI - `/api/v2/signin` InfluxDB API endpoint - InfluxDB CLI This endpoint represents the first two steps in a four-step process to allow a user to authenticate with a username and password, and then access data in an organization: 1. Create a user: send a `POST` request to `POST /api/v2/users`. The `name` property is required. 2. Extract the user ID (`id` property) value from the API response for _step 1_. 3. Create an authorization (and API token) for the user: send a `POST` request to [`POST /api/v2/authorizations`](#operation/PostAuthorizations), passing the user ID (`id`) from _step 2_. 4. Create a password for the user: send a `POST` request to [`POST /api/v2/users/USER_ID/password`](#operation/PostUsersIDPassword), passing the user ID from _step 2_. #### Required permissions | Action | Permission required | Restriction | |:-------|:--------------------|:------------| | Create a user | _[Operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_ | | #### Related guides - [Create a user](https://docs.influxdata.com/influxdb/latest/users/create-user/) - [Create an API token scoped to a user](https://docs.influxdata.com/influxdb/latest/security/tokens/create-token/#create-a-token-scoped-to-a-user) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_users_with_http_info(user, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param User user: The user to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_users_prepare(user, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/users', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='UserResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_users_async(self, user, **kwargs): # noqa: E501,D401,D403 + """Create a user. + + Creates a [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user) that can access InfluxDB. Returns the user. Use this endpoint to create a user that can sign in to start a user session through one of the following interfaces: - InfluxDB UI - `/api/v2/signin` InfluxDB API endpoint - InfluxDB CLI This endpoint represents the first two steps in a four-step process to allow a user to authenticate with a username and password, and then access data in an organization: 1. Create a user: send a `POST` request to `POST /api/v2/users`. The `name` property is required. 2. Extract the user ID (`id` property) value from the API response for _step 1_. 3. Create an authorization (and API token) for the user: send a `POST` request to [`POST /api/v2/authorizations`](#operation/PostAuthorizations), passing the user ID (`id`) from _step 2_. 4. Create a password for the user: send a `POST` request to [`POST /api/v2/users/USER_ID/password`](#operation/PostUsersIDPassword), passing the user ID from _step 2_. #### Required permissions | Action | Permission required | Restriction | |:-------|:--------------------|:------------| | Create a user | _[Operator token](https://docs.influxdata.com/influxdb/latest/security/tokens/#operator-token)_ | | #### Related guides - [Create a user](https://docs.influxdata.com/influxdb/latest/users/create-user/) - [Create an API token scoped to a user](https://docs.influxdata.com/influxdb/latest/security/tokens/create-token/#create-a-token-scoped-to-a-user) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param User user: The user to create. (required) + :param str zap_trace_span: OpenTracing span context + :return: UserResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_users_prepare(user, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/users', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='UserResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_users_prepare(self, user, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_users', all_params, local_var_params) + # verify the required parameter 'user' is set + if ('user' not in local_var_params or + local_var_params['user'] is None): + raise ValueError("Missing the required parameter `user` when calling `post_users`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'user' in local_var_params: + body_params = local_var_params['user'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_users_id_password(self, user_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Update a password. + + Updates a user password. #### InfluxDB Cloud - Doesn't allow you to manage user passwords through the API. Use the InfluxDB Cloud user interface (UI) to update a password. #### Related guides - [InfluxDB Cloud - Change your password](https://docs.influxdata.com/influxdb/cloud/account-management/change-password/) - [InfluxDB OSS - Change your password](https://docs.influxdata.com/influxdb/latest/users/change-password/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_users_id_password(user_id, password_reset_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the user to set the password for. (required) + :param PasswordResetBody password_reset_body: The new password to set for the user. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_users_id_password_with_http_info(user_id, password_reset_body, **kwargs) # noqa: E501 + else: + (data) = self.post_users_id_password_with_http_info(user_id, password_reset_body, **kwargs) # noqa: E501 + return data + + def post_users_id_password_with_http_info(self, user_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Update a password. + + Updates a user password. #### InfluxDB Cloud - Doesn't allow you to manage user passwords through the API. Use the InfluxDB Cloud user interface (UI) to update a password. #### Related guides - [InfluxDB Cloud - Change your password](https://docs.influxdata.com/influxdb/cloud/account-management/change-password/) - [InfluxDB OSS - Change your password](https://docs.influxdata.com/influxdb/latest/users/change-password/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_users_id_password_with_http_info(user_id, password_reset_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the user to set the password for. (required) + :param PasswordResetBody password_reset_body: The new password to set for the user. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_users_id_password_prepare(user_id, password_reset_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/users/{userID}/password', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_users_id_password_async(self, user_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Update a password. + + Updates a user password. #### InfluxDB Cloud - Doesn't allow you to manage user passwords through the API. Use the InfluxDB Cloud user interface (UI) to update a password. #### Related guides - [InfluxDB Cloud - Change your password](https://docs.influxdata.com/influxdb/cloud/account-management/change-password/) - [InfluxDB OSS - Change your password](https://docs.influxdata.com/influxdb/latest/users/change-password/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the user to set the password for. (required) + :param PasswordResetBody password_reset_body: The new password to set for the user. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_users_id_password_prepare(user_id, password_reset_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/users/{userID}/password', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_users_id_password_prepare(self, user_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'password_reset_body', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_users_id_password', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `post_users_id_password`") # noqa: E501 + # verify the required parameter 'password_reset_body' is set + if ('password_reset_body' not in local_var_params or + local_var_params['password_reset_body'] is None): + raise ValueError("Missing the required parameter `password_reset_body` when calling `post_users_id_password`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'password_reset_body' in local_var_params: + body_params = local_var_params['password_reset_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def put_me_password(self, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Update a password. + + Updates the password for the signed-in [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). This endpoint represents the third step in the following three-step process to let a user with a user session update their password: 1. Pass the user's [Basic authentication credentials](#section/Authentication/BasicAuthentication) to the `POST /api/v2/signin` endpoint to create a user session and generate a session cookie. 2. From the response in the first step, extract the session cookie (`Set-Cookie`) header. 3. Pass the following in a request to the `PUT /api/v2/me/password` endpoint: - The `Set-Cookie` header from the second step - The `Authorization Basic` header with the user's _Basic authentication_ credentials - `{"password": "NEW_PASSWORD"}` in the request body #### InfluxDB Cloud - Doesn't let you manage user passwords through the API. Use the InfluxDB Cloud user interface (UI) to update your password. #### Related endpoints - [Signin](#tag/Signin) - [Signout](#tag/Signout) - [Users](#tag/Users) #### Related guides - [InfluxDB Cloud - Change your password](https://docs.influxdata.com/influxdb/cloud/account-management/change-password/) - [InfluxDB OSS - Change your password](https://docs.influxdata.com/influxdb/latest/users/change-password/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_me_password(password_reset_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PasswordResetBody password_reset_body: The new password. (required) + :param str zap_trace_span: OpenTracing span context + :param str authorization: An auth credential for the Basic scheme + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.put_me_password_with_http_info(password_reset_body, **kwargs) # noqa: E501 + else: + (data) = self.put_me_password_with_http_info(password_reset_body, **kwargs) # noqa: E501 + return data + + def put_me_password_with_http_info(self, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Update a password. + + Updates the password for the signed-in [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). This endpoint represents the third step in the following three-step process to let a user with a user session update their password: 1. Pass the user's [Basic authentication credentials](#section/Authentication/BasicAuthentication) to the `POST /api/v2/signin` endpoint to create a user session and generate a session cookie. 2. From the response in the first step, extract the session cookie (`Set-Cookie`) header. 3. Pass the following in a request to the `PUT /api/v2/me/password` endpoint: - The `Set-Cookie` header from the second step - The `Authorization Basic` header with the user's _Basic authentication_ credentials - `{"password": "NEW_PASSWORD"}` in the request body #### InfluxDB Cloud - Doesn't let you manage user passwords through the API. Use the InfluxDB Cloud user interface (UI) to update your password. #### Related endpoints - [Signin](#tag/Signin) - [Signout](#tag/Signout) - [Users](#tag/Users) #### Related guides - [InfluxDB Cloud - Change your password](https://docs.influxdata.com/influxdb/cloud/account-management/change-password/) - [InfluxDB OSS - Change your password](https://docs.influxdata.com/influxdb/latest/users/change-password/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_me_password_with_http_info(password_reset_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param PasswordResetBody password_reset_body: The new password. (required) + :param str zap_trace_span: OpenTracing span context + :param str authorization: An auth credential for the Basic scheme + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_me_password_prepare(password_reset_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/me/password', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=['BasicAuthentication'], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def put_me_password_async(self, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Update a password. + + Updates the password for the signed-in [user](https://docs.influxdata.com/influxdb/latest/reference/glossary/#user). This endpoint represents the third step in the following three-step process to let a user with a user session update their password: 1. Pass the user's [Basic authentication credentials](#section/Authentication/BasicAuthentication) to the `POST /api/v2/signin` endpoint to create a user session and generate a session cookie. 2. From the response in the first step, extract the session cookie (`Set-Cookie`) header. 3. Pass the following in a request to the `PUT /api/v2/me/password` endpoint: - The `Set-Cookie` header from the second step - The `Authorization Basic` header with the user's _Basic authentication_ credentials - `{"password": "NEW_PASSWORD"}` in the request body #### InfluxDB Cloud - Doesn't let you manage user passwords through the API. Use the InfluxDB Cloud user interface (UI) to update your password. #### Related endpoints - [Signin](#tag/Signin) - [Signout](#tag/Signout) - [Users](#tag/Users) #### Related guides - [InfluxDB Cloud - Change your password](https://docs.influxdata.com/influxdb/cloud/account-management/change-password/) - [InfluxDB OSS - Change your password](https://docs.influxdata.com/influxdb/latest/users/change-password/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param PasswordResetBody password_reset_body: The new password. (required) + :param str zap_trace_span: OpenTracing span context + :param str authorization: An auth credential for the Basic scheme + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_me_password_prepare(password_reset_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/me/password', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=['BasicAuthentication'], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _put_me_password_prepare(self, password_reset_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['password_reset_body', 'zap_trace_span', 'authorization'] # noqa: E501 + self._check_operation_params('put_me_password', all_params, local_var_params) + # verify the required parameter 'password_reset_body' is set + if ('password_reset_body' not in local_var_params or + local_var_params['password_reset_body'] is None): + raise ValueError("Missing the required parameter `password_reset_body` when calling `put_me_password`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'authorization' in local_var_params: + header_params['Authorization'] = local_var_params['authorization'] # noqa: E501 + + body_params = None + if 'password_reset_body' in local_var_params: + body_params = local_var_params['password_reset_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def put_users_id_password(self, user_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Update a password. + + Updates a user password. Use this endpoint to let a user authenticate with [Basic authentication credentials](#section/Authentication/BasicAuthentication) and set a new password. #### InfluxDB Cloud - Doesn't allow you to manage user passwords through the API. Use the InfluxDB Cloud user interface (UI) to update a password. #### Related guides - [InfluxDB Cloud - Change your password](https://docs.influxdata.com/influxdb/cloud/account-management/change-password/) - [InfluxDB OSS - Change your password](https://docs.influxdata.com/influxdb/latest/users/change-password/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_users_id_password(user_id, password_reset_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the user to set the password for. (required) + :param PasswordResetBody password_reset_body: The new password to set for the user. (required) + :param str zap_trace_span: OpenTracing span context + :param str authorization: An auth credential for the Basic scheme + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.put_users_id_password_with_http_info(user_id, password_reset_body, **kwargs) # noqa: E501 + else: + (data) = self.put_users_id_password_with_http_info(user_id, password_reset_body, **kwargs) # noqa: E501 + return data + + def put_users_id_password_with_http_info(self, user_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Update a password. + + Updates a user password. Use this endpoint to let a user authenticate with [Basic authentication credentials](#section/Authentication/BasicAuthentication) and set a new password. #### InfluxDB Cloud - Doesn't allow you to manage user passwords through the API. Use the InfluxDB Cloud user interface (UI) to update a password. #### Related guides - [InfluxDB Cloud - Change your password](https://docs.influxdata.com/influxdb/cloud/account-management/change-password/) - [InfluxDB OSS - Change your password](https://docs.influxdata.com/influxdb/latest/users/change-password/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_users_id_password_with_http_info(user_id, password_reset_body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_id: The ID of the user to set the password for. (required) + :param PasswordResetBody password_reset_body: The new password to set for the user. (required) + :param str zap_trace_span: OpenTracing span context + :param str authorization: An auth credential for the Basic scheme + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_users_id_password_prepare(user_id, password_reset_body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/users/{userID}/password', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=['BasicAuthentication'], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def put_users_id_password_async(self, user_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + """Update a password. + + Updates a user password. Use this endpoint to let a user authenticate with [Basic authentication credentials](#section/Authentication/BasicAuthentication) and set a new password. #### InfluxDB Cloud - Doesn't allow you to manage user passwords through the API. Use the InfluxDB Cloud user interface (UI) to update a password. #### Related guides - [InfluxDB Cloud - Change your password](https://docs.influxdata.com/influxdb/cloud/account-management/change-password/) - [InfluxDB OSS - Change your password](https://docs.influxdata.com/influxdb/latest/users/change-password/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str user_id: The ID of the user to set the password for. (required) + :param PasswordResetBody password_reset_body: The new password to set for the user. (required) + :param str zap_trace_span: OpenTracing span context + :param str authorization: An auth credential for the Basic scheme + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_users_id_password_prepare(user_id, password_reset_body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/users/{userID}/password', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=['BasicAuthentication'], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _put_users_id_password_prepare(self, user_id, password_reset_body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['user_id', 'password_reset_body', 'zap_trace_span', 'authorization'] # noqa: E501 + self._check_operation_params('put_users_id_password', all_params, local_var_params) + # verify the required parameter 'user_id' is set + if ('user_id' not in local_var_params or + local_var_params['user_id'] is None): + raise ValueError("Missing the required parameter `user_id` when calling `put_users_id_password`") # noqa: E501 + # verify the required parameter 'password_reset_body' is set + if ('password_reset_body' not in local_var_params or + local_var_params['password_reset_body'] is None): + raise ValueError("Missing the required parameter `password_reset_body` when calling `put_users_id_password`") # noqa: E501 + + path_params = {} + if 'user_id' in local_var_params: + path_params['userID'] = local_var_params['user_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'authorization' in local_var_params: + header_params['Authorization'] = local_var_params['authorization'] # noqa: E501 + + body_params = None + if 'password_reset_body' in local_var_params: + body_params = local_var_params['password_reset_body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/variables_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/variables_service.py new file mode 100644 index 0000000..143bd7c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/variables_service.py @@ -0,0 +1,1127 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class VariablesService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """VariablesService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def delete_variables_id(self, variable_id, **kwargs): # noqa: E501,D401,D403 + """Delete a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_variables_id(variable_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_variables_id_with_http_info(variable_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_variables_id_with_http_info(variable_id, **kwargs) # noqa: E501 + return data + + def delete_variables_id_with_http_info(self, variable_id, **kwargs): # noqa: E501,D401,D403 + """Delete a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_variables_id_with_http_info(variable_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_variables_id_prepare(variable_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/variables/{variableID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_variables_id_async(self, variable_id, **kwargs): # noqa: E501,D401,D403 + """Delete a variable. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_variables_id_prepare(variable_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/variables/{variableID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_variables_id_prepare(self, variable_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['variable_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_variables_id', all_params, local_var_params) + # verify the required parameter 'variable_id' is set + if ('variable_id' not in local_var_params or + local_var_params['variable_id'] is None): + raise ValueError("Missing the required parameter `variable_id` when calling `delete_variables_id`") # noqa: E501 + + path_params = {} + if 'variable_id' in local_var_params: + path_params['variableID'] = local_var_params['variable_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def delete_variables_id_labels_id(self, variable_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_variables_id_labels_id(variable_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str label_id: The label ID to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.delete_variables_id_labels_id_with_http_info(variable_id, label_id, **kwargs) # noqa: E501 + else: + (data) = self.delete_variables_id_labels_id_with_http_info(variable_id, label_id, **kwargs) # noqa: E501 + return data + + def delete_variables_id_labels_id_with_http_info(self, variable_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_variables_id_labels_id_with_http_info(variable_id, label_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str label_id: The label ID to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_variables_id_labels_id_prepare(variable_id, label_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/variables/{variableID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def delete_variables_id_labels_id_async(self, variable_id, label_id, **kwargs): # noqa: E501,D401,D403 + """Delete a label from a variable. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str label_id: The label ID to delete. (required) + :param str zap_trace_span: OpenTracing span context + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._delete_variables_id_labels_id_prepare(variable_id, label_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/variables/{variableID}/labels/{labelID}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _delete_variables_id_labels_id_prepare(self, variable_id, label_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['variable_id', 'label_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('delete_variables_id_labels_id', all_params, local_var_params) + # verify the required parameter 'variable_id' is set + if ('variable_id' not in local_var_params or + local_var_params['variable_id'] is None): + raise ValueError("Missing the required parameter `variable_id` when calling `delete_variables_id_labels_id`") # noqa: E501 + # verify the required parameter 'label_id' is set + if ('label_id' not in local_var_params or + local_var_params['label_id'] is None): + raise ValueError("Missing the required parameter `label_id` when calling `delete_variables_id_labels_id`") # noqa: E501 + + path_params = {} + if 'variable_id' in local_var_params: + path_params['variableID'] = local_var_params['variable_id'] # noqa: E501 + if 'label_id' in local_var_params: + path_params['labelID'] = local_var_params['label_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_variables(self, **kwargs): # noqa: E501,D401,D403 + """List all variables. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_variables(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :param str org_id: The organization ID. + :return: Variables + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_variables_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_variables_with_http_info(**kwargs) # noqa: E501 + return data + + def get_variables_with_http_info(self, **kwargs): # noqa: E501,D401,D403 + """List all variables. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_variables_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :param str org_id: The organization ID. + :return: Variables + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_variables_prepare(**kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/variables', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Variables', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_variables_async(self, **kwargs): # noqa: E501,D401,D403 + """List all variables. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str zap_trace_span: OpenTracing span context + :param str org: The name of the organization. + :param str org_id: The organization ID. + :return: Variables + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_variables_prepare(**kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/variables', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Variables', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_variables_prepare(self, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['zap_trace_span', 'org', 'org_id'] # noqa: E501 + self._check_operation_params('get_variables', all_params, local_var_params) + + path_params = {} + + query_params = [] + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_variables_id(self, variable_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_variables_id(variable_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_variables_id_with_http_info(variable_id, **kwargs) # noqa: E501 + else: + (data) = self.get_variables_id_with_http_info(variable_id, **kwargs) # noqa: E501 + return data + + def get_variables_id_with_http_info(self, variable_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_variables_id_with_http_info(variable_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_variables_id_prepare(variable_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/variables/{variableID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Variable', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_variables_id_async(self, variable_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve a variable. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_variables_id_prepare(variable_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/variables/{variableID}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Variable', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_variables_id_prepare(self, variable_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['variable_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_variables_id', all_params, local_var_params) + # verify the required parameter 'variable_id' is set + if ('variable_id' not in local_var_params or + local_var_params['variable_id'] is None): + raise ValueError("Missing the required parameter `variable_id` when calling `get_variables_id`") # noqa: E501 + + path_params = {} + if 'variable_id' in local_var_params: + path_params['variableID'] = local_var_params['variable_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def get_variables_id_labels(self, variable_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_variables_id_labels(variable_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_variables_id_labels_with_http_info(variable_id, **kwargs) # noqa: E501 + else: + (data) = self.get_variables_id_labels_with_http_info(variable_id, **kwargs) # noqa: E501 + return data + + def get_variables_id_labels_with_http_info(self, variable_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_variables_id_labels_with_http_info(variable_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_variables_id_labels_prepare(variable_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/variables/{variableID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_variables_id_labels_async(self, variable_id, **kwargs): # noqa: E501,D401,D403 + """List all labels for a variable. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelsResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_variables_id_labels_prepare(variable_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/variables/{variableID}/labels', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelsResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_variables_id_labels_prepare(self, variable_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['variable_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_variables_id_labels', all_params, local_var_params) + # verify the required parameter 'variable_id' is set + if ('variable_id' not in local_var_params or + local_var_params['variable_id'] is None): + raise ValueError("Missing the required parameter `variable_id` when calling `get_variables_id_labels`") # noqa: E501 + + path_params = {} + if 'variable_id' in local_var_params: + path_params['variableID'] = local_var_params['variable_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_variables_id(self, variable_id, variable, **kwargs): # noqa: E501,D401,D403 + """Update a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_variables_id(variable_id, variable, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param Variable variable: Variable update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_variables_id_with_http_info(variable_id, variable, **kwargs) # noqa: E501 + else: + (data) = self.patch_variables_id_with_http_info(variable_id, variable, **kwargs) # noqa: E501 + return data + + def patch_variables_id_with_http_info(self, variable_id, variable, **kwargs): # noqa: E501,D401,D403 + """Update a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_variables_id_with_http_info(variable_id, variable, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param Variable variable: Variable update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_variables_id_prepare(variable_id, variable, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/variables/{variableID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Variable', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_variables_id_async(self, variable_id, variable, **kwargs): # noqa: E501,D401,D403 + """Update a variable. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param Variable variable: Variable update to apply (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_variables_id_prepare(variable_id, variable, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/variables/{variableID}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Variable', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_variables_id_prepare(self, variable_id, variable, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['variable_id', 'variable', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_variables_id', all_params, local_var_params) + # verify the required parameter 'variable_id' is set + if ('variable_id' not in local_var_params or + local_var_params['variable_id'] is None): + raise ValueError("Missing the required parameter `variable_id` when calling `patch_variables_id`") # noqa: E501 + # verify the required parameter 'variable' is set + if ('variable' not in local_var_params or + local_var_params['variable'] is None): + raise ValueError("Missing the required parameter `variable` when calling `patch_variables_id`") # noqa: E501 + + path_params = {} + if 'variable_id' in local_var_params: + path_params['variableID'] = local_var_params['variable_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'variable' in local_var_params: + body_params = local_var_params['variable'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_variables(self, variable, **kwargs): # noqa: E501,D401,D403 + """Create a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_variables(variable, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param Variable variable: Variable to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_variables_with_http_info(variable, **kwargs) # noqa: E501 + else: + (data) = self.post_variables_with_http_info(variable, **kwargs) # noqa: E501 + return data + + def post_variables_with_http_info(self, variable, **kwargs): # noqa: E501,D401,D403 + """Create a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_variables_with_http_info(variable, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param Variable variable: Variable to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_variables_prepare(variable, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/variables', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Variable', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_variables_async(self, variable, **kwargs): # noqa: E501,D401,D403 + """Create a variable. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param Variable variable: Variable to create (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_variables_prepare(variable, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/variables', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Variable', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_variables_prepare(self, variable, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['variable', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_variables', all_params, local_var_params) + # verify the required parameter 'variable' is set + if ('variable' not in local_var_params or + local_var_params['variable'] is None): + raise ValueError("Missing the required parameter `variable` when calling `post_variables`") # noqa: E501 + + path_params = {} + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'variable' in local_var_params: + body_params = local_var_params['variable'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def post_variables_id_labels(self, variable_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_variables_id_labels(variable_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_variables_id_labels_with_http_info(variable_id, label_mapping, **kwargs) # noqa: E501 + else: + (data) = self.post_variables_id_labels_with_http_info(variable_id, label_mapping, **kwargs) # noqa: E501 + return data + + def post_variables_id_labels_with_http_info(self, variable_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_variables_id_labels_with_http_info(variable_id, label_mapping, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_variables_id_labels_prepare(variable_id, label_mapping, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/variables/{variableID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_variables_id_labels_async(self, variable_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + """Add a label to a variable. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param LabelMapping label_mapping: Label to add (required) + :param str zap_trace_span: OpenTracing span context + :return: LabelResponse + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_variables_id_labels_prepare(variable_id, label_mapping, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/variables/{variableID}/labels', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='LabelResponse', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_variables_id_labels_prepare(self, variable_id, label_mapping, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['variable_id', 'label_mapping', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('post_variables_id_labels', all_params, local_var_params) + # verify the required parameter 'variable_id' is set + if ('variable_id' not in local_var_params or + local_var_params['variable_id'] is None): + raise ValueError("Missing the required parameter `variable_id` when calling `post_variables_id_labels`") # noqa: E501 + # verify the required parameter 'label_mapping' is set + if ('label_mapping' not in local_var_params or + local_var_params['label_mapping'] is None): + raise ValueError("Missing the required parameter `label_mapping` when calling `post_variables_id_labels`") # noqa: E501 + + path_params = {} + if 'variable_id' in local_var_params: + path_params['variableID'] = local_var_params['variable_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'label_mapping' in local_var_params: + body_params = local_var_params['label_mapping'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def put_variables_id(self, variable_id, variable, **kwargs): # noqa: E501,D401,D403 + """Replace a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_variables_id(variable_id, variable, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param Variable variable: Variable to replace (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.put_variables_id_with_http_info(variable_id, variable, **kwargs) # noqa: E501 + else: + (data) = self.put_variables_id_with_http_info(variable_id, variable, **kwargs) # noqa: E501 + return data + + def put_variables_id_with_http_info(self, variable_id, variable, **kwargs): # noqa: E501,D401,D403 + """Replace a variable. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.put_variables_id_with_http_info(variable_id, variable, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param Variable variable: Variable to replace (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_variables_id_prepare(variable_id, variable, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/variables/{variableID}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Variable', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def put_variables_id_async(self, variable_id, variable, **kwargs): # noqa: E501,D401,D403 + """Replace a variable. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str variable_id: The variable ID. (required) + :param Variable variable: Variable to replace (required) + :param str zap_trace_span: OpenTracing span context + :return: Variable + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._put_variables_id_prepare(variable_id, variable, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/variables/{variableID}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='Variable', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _put_variables_id_prepare(self, variable_id, variable, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['variable_id', 'variable', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('put_variables_id', all_params, local_var_params) + # verify the required parameter 'variable_id' is set + if ('variable_id' not in local_var_params or + local_var_params['variable_id'] is None): + raise ValueError("Missing the required parameter `variable_id` when calling `put_variables_id`") # noqa: E501 + # verify the required parameter 'variable' is set + if ('variable' not in local_var_params or + local_var_params['variable'] is None): + raise ValueError("Missing the required parameter `variable` when calling `put_variables_id`") # noqa: E501 + + path_params = {} + if 'variable_id' in local_var_params: + path_params['variableID'] = local_var_params['variable_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'variable' in local_var_params: + body_params = local_var_params['variable'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/views_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/views_service.py new file mode 100644 index 0000000..203f6c6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/views_service.py @@ -0,0 +1,293 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class ViewsService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """ViewsService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def get_dashboards_id_cells_id_view(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_cells_id_view(dashboard_id, cell_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str cell_id: The cell ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, **kwargs) # noqa: E501 + else: + (data) = self.get_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, **kwargs) # noqa: E501 + return data + + def get_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str cell_id: The cell ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def get_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + """Retrieve the view for a cell. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The dashboard ID. (required) + :param str cell_id: The cell ID. (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._get_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _get_dashboards_id_cells_id_view_prepare(self, dashboard_id, cell_id, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell_id', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('get_dashboards_id_cells_id_view', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `get_dashboards_id_cells_id_view`") # noqa: E501 + # verify the required parameter 'cell_id' is set + if ('cell_id' not in local_var_params or + local_var_params['cell_id'] is None): + raise ValueError("Missing the required parameter `cell_id` when calling `get_dashboards_id_cells_id_view`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + if 'cell_id' in local_var_params: + path_params['cellID'] = local_var_params['cell_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params + + def patch_dashboards_id_cells_id_view(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + """Update the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id_cells_id_view(dashboard_id, cell_id, view, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param View view: (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.patch_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + else: + (data) = self.patch_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + return data + + def patch_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + """Update the view for a cell. + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.patch_dashboards_id_cells_id_view_with_http_info(dashboard_id, cell_id, view, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param View view: (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def patch_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + """Update the view for a cell. + + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str dashboard_id: The ID of the dashboard to update. (required) + :param str cell_id: The ID of the cell to update. (required) + :param View view: (required) + :param str zap_trace_span: OpenTracing span context + :return: View + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._patch_dashboards_id_cells_id_view_prepare(dashboard_id, cell_id, view, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/dashboards/{dashboardID}/cells/{cellID}/view', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type='View', # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _patch_dashboards_id_cells_id_view_prepare(self, dashboard_id, cell_id, view, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['dashboard_id', 'cell_id', 'view', 'zap_trace_span'] # noqa: E501 + self._check_operation_params('patch_dashboards_id_cells_id_view', all_params, local_var_params) + # verify the required parameter 'dashboard_id' is set + if ('dashboard_id' not in local_var_params or + local_var_params['dashboard_id'] is None): + raise ValueError("Missing the required parameter `dashboard_id` when calling `patch_dashboards_id_cells_id_view`") # noqa: E501 + # verify the required parameter 'cell_id' is set + if ('cell_id' not in local_var_params or + local_var_params['cell_id'] is None): + raise ValueError("Missing the required parameter `cell_id` when calling `patch_dashboards_id_cells_id_view`") # noqa: E501 + # verify the required parameter 'view' is set + if ('view' not in local_var_params or + local_var_params['view'] is None): + raise ValueError("Missing the required parameter `view` when calling `patch_dashboards_id_cells_id_view`") # noqa: E501 + + path_params = {} + if 'dashboard_id' in local_var_params: + path_params['dashboardID'] = local_var_params['dashboard_id'] # noqa: E501 + if 'cell_id' in local_var_params: + path_params['cellID'] = local_var_params['cell_id'] # noqa: E501 + + query_params = [] + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + + body_params = None + if 'view' in local_var_params: + body_params = local_var_params['view'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/service/write_service.py b/myenv/lib/python3.12/site-packages/influxdb_client/service/write_service.py new file mode 100644 index 0000000..bcd970d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/service/write_service.py @@ -0,0 +1,201 @@ +# coding: utf-8 + +""" +InfluxDB OSS API Service. + +The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 + +OpenAPI spec version: 2.0.0 +Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +from influxdb_client.service._base_service import _BaseService + + +class WriteService(_BaseService): + """NOTE: This class is auto generated by OpenAPI Generator. + + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): # noqa: E501,D401,D403 + """WriteService - a operation defined in OpenAPI.""" + super().__init__(api_client) + + def post_write(self, org, bucket, body, **kwargs): # noqa: E501,D401,D403 + """Write data. + + Writes data to a bucket. Use this endpoint to send data in [line protocol](https://docs.influxdata.com/influxdb/latest/reference/syntax/line-protocol/) format to InfluxDB. #### InfluxDB Cloud - Does the following when you send a write request: 1. Validates the request and queues the write. 2. If queued, responds with _success_ (HTTP `2xx` status code); _error_ otherwise. 3. Handles the delete asynchronously and reaches eventual consistency. To ensure that InfluxDB Cloud handles writes and deletes in the order you request them, wait for a success response (HTTP `2xx` status code) before you send the next request. Because writes and deletes are asynchronous, your change might not yet be readable when you receive the response. #### InfluxDB OSS - Validates the request and handles the write synchronously. - If all points were written successfully, responds with HTTP `2xx` status code; otherwise, returns the first line that failed. #### Required permissions - `write-buckets` or `write-bucket BUCKET_ID`. *`BUCKET_ID`* is the ID of the destination bucket. #### Rate limits (with InfluxDB Cloud) `write` rate limits apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Write data with the InfluxDB API](https://docs.influxdata.com/influxdb/latest/write-data/developer-tools/api) - [Optimize writes to InfluxDB](https://docs.influxdata.com/influxdb/latest/write-data/best-practices/optimize-writes/) - [Troubleshoot issues writing data](https://docs.influxdata.com/influxdb/latest/write-data/troubleshoot/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_write(org, bucket, body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: An organization name or ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Writes data to the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - If you pass both `orgID` and `org`, they must both be valid. - Writes data to the bucket in the specified organization. (required) + :param str bucket: A bucket name or ID. InfluxDB writes all points in the batch to the specified bucket. (required) + :param str body: In the request body, provide data in [line protocol format](https://docs.influxdata.com/influxdb/latest/reference/syntax/line-protocol/). To send compressed data, do the following: 1. Use [GZIP](https://www.gzip.org/) to compress the line protocol data. 2. In your request, send the compressed data and the `Content-Encoding: gzip` header. #### Related guides - [Best practices for optimizing writes](https://docs.influxdata.com/influxdb/latest/write-data/best-practices/optimize-writes/) (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The compression applied to the line protocol in the request payload. To send a GZIP payload, pass `Content-Encoding: gzip` header. + :param str content_type: The format of the data in the request body. To send a line protocol payload, pass `Content-Type: text/plain; charset=utf-8`. + :param int content_length: The size of the entity-body, in bytes, sent to InfluxDB. If the length is greater than the `max body` configuration option, the server responds with status code `413`. + :param str accept: The content type that the client can understand. Writes only return a response body if they fail--for example, due to a formatting problem or quota limit. #### InfluxDB Cloud - Returns only `application/json` for format and limit errors. - Returns only `text/html` for some quota limit errors. #### InfluxDB OSS - Returns only `application/json` for format and limit errors. #### Related guides - [Troubleshoot issues writing data](https://docs.influxdata.com/influxdb/latest/write-data/troubleshoot/) + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Writes data to the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - If you pass both `orgID` and `org`, they must both be valid. - Writes data to the bucket in the specified organization. + :param WritePrecision precision: The precision for unix timestamps in the line protocol batch. + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.post_write_with_http_info(org, bucket, body, **kwargs) # noqa: E501 + else: + (data) = self.post_write_with_http_info(org, bucket, body, **kwargs) # noqa: E501 + return data + + def post_write_with_http_info(self, org, bucket, body, **kwargs): # noqa: E501,D401,D403 + """Write data. + + Writes data to a bucket. Use this endpoint to send data in [line protocol](https://docs.influxdata.com/influxdb/latest/reference/syntax/line-protocol/) format to InfluxDB. #### InfluxDB Cloud - Does the following when you send a write request: 1. Validates the request and queues the write. 2. If queued, responds with _success_ (HTTP `2xx` status code); _error_ otherwise. 3. Handles the delete asynchronously and reaches eventual consistency. To ensure that InfluxDB Cloud handles writes and deletes in the order you request them, wait for a success response (HTTP `2xx` status code) before you send the next request. Because writes and deletes are asynchronous, your change might not yet be readable when you receive the response. #### InfluxDB OSS - Validates the request and handles the write synchronously. - If all points were written successfully, responds with HTTP `2xx` status code; otherwise, returns the first line that failed. #### Required permissions - `write-buckets` or `write-bucket BUCKET_ID`. *`BUCKET_ID`* is the ID of the destination bucket. #### Rate limits (with InfluxDB Cloud) `write` rate limits apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Write data with the InfluxDB API](https://docs.influxdata.com/influxdb/latest/write-data/developer-tools/api) - [Optimize writes to InfluxDB](https://docs.influxdata.com/influxdb/latest/write-data/best-practices/optimize-writes/) - [Troubleshoot issues writing data](https://docs.influxdata.com/influxdb/latest/write-data/troubleshoot/) + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.post_write_with_http_info(org, bucket, body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: An organization name or ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Writes data to the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - If you pass both `orgID` and `org`, they must both be valid. - Writes data to the bucket in the specified organization. (required) + :param str bucket: A bucket name or ID. InfluxDB writes all points in the batch to the specified bucket. (required) + :param str body: In the request body, provide data in [line protocol format](https://docs.influxdata.com/influxdb/latest/reference/syntax/line-protocol/). To send compressed data, do the following: 1. Use [GZIP](https://www.gzip.org/) to compress the line protocol data. 2. In your request, send the compressed data and the `Content-Encoding: gzip` header. #### Related guides - [Best practices for optimizing writes](https://docs.influxdata.com/influxdb/latest/write-data/best-practices/optimize-writes/) (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The compression applied to the line protocol in the request payload. To send a GZIP payload, pass `Content-Encoding: gzip` header. + :param str content_type: The format of the data in the request body. To send a line protocol payload, pass `Content-Type: text/plain; charset=utf-8`. + :param int content_length: The size of the entity-body, in bytes, sent to InfluxDB. If the length is greater than the `max body` configuration option, the server responds with status code `413`. + :param str accept: The content type that the client can understand. Writes only return a response body if they fail--for example, due to a formatting problem or quota limit. #### InfluxDB Cloud - Returns only `application/json` for format and limit errors. - Returns only `text/html` for some quota limit errors. #### InfluxDB OSS - Returns only `application/json` for format and limit errors. #### Related guides - [Troubleshoot issues writing data](https://docs.influxdata.com/influxdb/latest/write-data/troubleshoot/) + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Writes data to the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - If you pass both `orgID` and `org`, they must both be valid. - Writes data to the bucket in the specified organization. + :param WritePrecision precision: The precision for unix timestamps in the line protocol batch. + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_write_prepare(org, bucket, body, **kwargs) # noqa: E501 + + return self.api_client.call_api( + '/api/v2/write', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + async def post_write_async(self, org, bucket, body, **kwargs): # noqa: E501,D401,D403 + """Write data. + + Writes data to a bucket. Use this endpoint to send data in [line protocol](https://docs.influxdata.com/influxdb/latest/reference/syntax/line-protocol/) format to InfluxDB. #### InfluxDB Cloud - Does the following when you send a write request: 1. Validates the request and queues the write. 2. If queued, responds with _success_ (HTTP `2xx` status code); _error_ otherwise. 3. Handles the delete asynchronously and reaches eventual consistency. To ensure that InfluxDB Cloud handles writes and deletes in the order you request them, wait for a success response (HTTP `2xx` status code) before you send the next request. Because writes and deletes are asynchronous, your change might not yet be readable when you receive the response. #### InfluxDB OSS - Validates the request and handles the write synchronously. - If all points were written successfully, responds with HTTP `2xx` status code; otherwise, returns the first line that failed. #### Required permissions - `write-buckets` or `write-bucket BUCKET_ID`. *`BUCKET_ID`* is the ID of the destination bucket. #### Rate limits (with InfluxDB Cloud) `write` rate limits apply. For more information, see [limits and adjustable quotas](https://docs.influxdata.com/influxdb/cloud/account-management/limits/). #### Related guides - [Write data with the InfluxDB API](https://docs.influxdata.com/influxdb/latest/write-data/developer-tools/api) - [Optimize writes to InfluxDB](https://docs.influxdata.com/influxdb/latest/write-data/best-practices/optimize-writes/) - [Troubleshoot issues writing data](https://docs.influxdata.com/influxdb/latest/write-data/troubleshoot/) + This method makes an asynchronous HTTP request. + + :param async_req bool + :param str org: An organization name or ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Writes data to the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - If you pass both `orgID` and `org`, they must both be valid. - Writes data to the bucket in the specified organization. (required) + :param str bucket: A bucket name or ID. InfluxDB writes all points in the batch to the specified bucket. (required) + :param str body: In the request body, provide data in [line protocol format](https://docs.influxdata.com/influxdb/latest/reference/syntax/line-protocol/). To send compressed data, do the following: 1. Use [GZIP](https://www.gzip.org/) to compress the line protocol data. 2. In your request, send the compressed data and the `Content-Encoding: gzip` header. #### Related guides - [Best practices for optimizing writes](https://docs.influxdata.com/influxdb/latest/write-data/best-practices/optimize-writes/) (required) + :param str zap_trace_span: OpenTracing span context + :param str content_encoding: The compression applied to the line protocol in the request payload. To send a GZIP payload, pass `Content-Encoding: gzip` header. + :param str content_type: The format of the data in the request body. To send a line protocol payload, pass `Content-Type: text/plain; charset=utf-8`. + :param int content_length: The size of the entity-body, in bytes, sent to InfluxDB. If the length is greater than the `max body` configuration option, the server responds with status code `413`. + :param str accept: The content type that the client can understand. Writes only return a response body if they fail--for example, due to a formatting problem or quota limit. #### InfluxDB Cloud - Returns only `application/json` for format and limit errors. - Returns only `text/html` for some quota limit errors. #### InfluxDB OSS - Returns only `application/json` for format and limit errors. #### Related guides - [Troubleshoot issues writing data](https://docs.influxdata.com/influxdb/latest/write-data/troubleshoot/) + :param str org_id: An organization ID. #### InfluxDB Cloud - Doesn't use the `org` parameter or `orgID` parameter. - Writes data to the bucket in the organization associated with the authorization (API token). #### InfluxDB OSS - Requires either the `org` parameter or the `orgID` parameter. - If you pass both `orgID` and `org`, they must both be valid. - Writes data to the bucket in the specified organization. + :param WritePrecision precision: The precision for unix timestamps in the line protocol batch. + :return: None + If the method is called asynchronously, + returns the request thread. + """ # noqa: E501 + local_var_params, path_params, query_params, header_params, body_params = \ + self._post_write_prepare(org, bucket, body, **kwargs) # noqa: E501 + + return await self.api_client.call_api( + '/api/v2/write', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=[], + files={}, + response_type=None, # noqa: E501 + auth_settings=[], + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats={}, + urlopen_kw=kwargs.get('urlopen_kw', None)) + + def _post_write_prepare(self, org, bucket, body, **kwargs): # noqa: E501,D401,D403 + local_var_params = locals() + + all_params = ['org', 'bucket', 'body', 'zap_trace_span', 'content_encoding', 'content_type', 'content_length', 'accept', 'org_id', 'precision'] # noqa: E501 + self._check_operation_params('post_write', all_params, local_var_params) + # verify the required parameter 'org' is set + if ('org' not in local_var_params or + local_var_params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `post_write`") # noqa: E501 + # verify the required parameter 'bucket' is set + if ('bucket' not in local_var_params or + local_var_params['bucket'] is None): + raise ValueError("Missing the required parameter `bucket` when calling `post_write`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in local_var_params or + local_var_params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `post_write`") # noqa: E501 + + path_params = {} + + query_params = [] + if 'org' in local_var_params: + query_params.append(('org', local_var_params['org'])) # noqa: E501 + if 'org_id' in local_var_params: + query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501 + if 'bucket' in local_var_params: + query_params.append(('bucket', local_var_params['bucket'])) # noqa: E501 + if 'precision' in local_var_params: + query_params.append(('precision', local_var_params['precision'])) # noqa: E501 + + header_params = {} + if 'zap_trace_span' in local_var_params: + header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501 + if 'content_encoding' in local_var_params: + header_params['Content-Encoding'] = local_var_params['content_encoding'] # noqa: E501 + if 'content_type' in local_var_params: + header_params['Content-Type'] = local_var_params['content_type'] # noqa: E501 + if 'content_length' in local_var_params: + header_params['Content-Length'] = local_var_params['content_length'] # noqa: E501 + if 'accept' in local_var_params: + header_params['Accept'] = local_var_params['accept'] # noqa: E501 + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json', 'text/html', ]) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['text/plain']) # noqa: E501 + + return local_var_params, path_params, query_params, header_params, body_params diff --git a/myenv/lib/python3.12/site-packages/influxdb_client/version.py b/myenv/lib/python3.12/site-packages/influxdb_client/version.py new file mode 100644 index 0000000..413edc0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/influxdb_client/version.py @@ -0,0 +1,3 @@ +"""Version of the Client that is used in User-Agent header.""" + +VERSION = '1.46.0' diff --git a/myenv/lib/python3.12/site-packages/openhab/__init__.py b/myenv/lib/python3.12/site-packages/openhab/__init__.py new file mode 100644 index 0000000..dc26785 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/openhab/__init__.py @@ -0,0 +1,5 @@ +"""Module entry point.""" + +from .client import OpenHAB + +__all__ = ['OpenHAB'] diff --git a/myenv/lib/python3.12/site-packages/openhab/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/openhab/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..fde8839 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/openhab/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/openhab/__pycache__/client.cpython-312.pyc b/myenv/lib/python3.12/site-packages/openhab/__pycache__/client.cpython-312.pyc new file mode 100644 index 0000000..c75d4e6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/openhab/__pycache__/client.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/openhab/__pycache__/command_types.cpython-312.pyc b/myenv/lib/python3.12/site-packages/openhab/__pycache__/command_types.cpython-312.pyc new file mode 100644 index 0000000..2b25989 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/openhab/__pycache__/command_types.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/openhab/__pycache__/config.cpython-312.pyc b/myenv/lib/python3.12/site-packages/openhab/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000..97b1fe2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/openhab/__pycache__/config.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/openhab/__pycache__/exceptions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/openhab/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..e97040e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/openhab/__pycache__/exceptions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/openhab/__pycache__/items.cpython-312.pyc b/myenv/lib/python3.12/site-packages/openhab/__pycache__/items.cpython-312.pyc new file mode 100644 index 0000000..2e8d39e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/openhab/__pycache__/items.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/openhab/__pycache__/oauth2_helper.cpython-312.pyc b/myenv/lib/python3.12/site-packages/openhab/__pycache__/oauth2_helper.cpython-312.pyc new file mode 100644 index 0000000..710cdf1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/openhab/__pycache__/oauth2_helper.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/openhab/__pycache__/rules.cpython-312.pyc b/myenv/lib/python3.12/site-packages/openhab/__pycache__/rules.cpython-312.pyc new file mode 100644 index 0000000..b8b5730 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/openhab/__pycache__/rules.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/openhab/client.py b/myenv/lib/python3.12/site-packages/openhab/client.py new file mode 100644 index 0000000..1a59c7a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/openhab/client.py @@ -0,0 +1,466 @@ +"""python library for accessing the openHAB REST API.""" + +# +# Georges Toth (c) 2016-present +# +# python-openhab is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# python-openhab is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with python-openhab. If not, see . +# + +import datetime +import logging +import typing + +import authlib.integrations.httpx_client +import httpx + +import openhab.items +import openhab.rules + +from .config import Oauth2Config, Oauth2Token + +__author__ = 'Georges Toth ' +__license__ = 'AGPLv3+' + + +class OpenHAB: + """openHAB REST API client.""" + + def __init__(self, base_url: str, + username: typing.Optional[str] = None, + password: typing.Optional[str] = None, + http_auth: typing.Optional[httpx.Auth] = None, + timeout: typing.Optional[float] = None, + oauth2_config: typing.Optional[typing.Dict[str, typing.Any]] = None, + ) -> None: + """Class constructor. + + The format of the optional *oauth2_config* dictionary is as follows: + ```python + {"client_id": "http://127.0.0.1/auth", + "token_cache": "///.oauth2_token", + "token": + {"access_token": "adsafdasfasfsafasfsafasfasfasfsa....", + "expires_in": 3600, + "refresh_token": "312e21e21e32112", + "scope": "admin", + "token_type": "bearer", + "user": { + "name": "admin", + "roles": [ + "administrator" + ] + } + } + ``` + + Args: + base_url (str): The openHAB REST URL, e.g. http://example.com/rest + username (str, optional): A optional username, used in conjunction with a optional + provided password, in case openHAB requires authentication. + password (str, optional): A optional password, used in conjunction with a optional + provided username, in case openHAB requires authentication. + http_auth (Auth, optional): An alternative to username/password pair, is to + specify a custom http authentication object of type :class:`requests.Auth`. + timeout (float, optional): An optional timeout for REST transactions + oauth2_config: Optional OAuth2 configuration dictionary + + Returns: + OpenHAB: openHAB class instance. + """ + self.url_rest = base_url + self.url_base = base_url.rsplit('/', 1)[0] + + self.oauth2_config: typing.Optional[Oauth2Config] = None + + if oauth2_config is not None: + self.oauth2_config = Oauth2Config(**oauth2_config) + + self.session = authlib.integrations.httpx_client.OAuth2Client(client_id=self.oauth2_config.client_id, + token=self.oauth2_config.token.model_dump(), + update_token=self._oauth2_token_updater, + ) + + self.session.metadata['token_endpoint'] = f'{self.url_rest}/auth/token' + + if not self.oauth2_config.token_cache.is_file(): + self._oauth2_token_updater(self.oauth2_config.token.model_dump()) + + else: + self.session = httpx.Client(timeout=timeout) + + if http_auth is not None: + self.session.auth = http_auth + elif not (username is None or password is None): + self.session.auth = httpx.BasicAuth(username, password) + + self.logger = logging.getLogger(__name__) + + self._rules: typing.Optional[openhab.rules.Rules] = None + + @property + def rules(self) -> openhab.rules.Rules: + """Get object for managing rules.""" + if self._rules is None: + self._rules = openhab.rules.Rules(self) + + return self._rules + + @staticmethod + def _check_req_return(req: httpx.Response) -> None: + """Internal method for checking the return value of a REST HTTP request. + + Args: + req (requests.Response): A requests Response object. + + Returns: + None: Returns None if no error occurred; else raises an exception. + + Raises: + ValueError: Raises a ValueError exception in case of a non-successful + REST request. + """ + if not 200 <= req.status_code < 300: + req.raise_for_status() + + def req_get(self, uri_path: str, params: typing.Optional[typing.Union[typing.Dict[str, typing.Any], list, tuple]] = None) -> typing.Any: + """Helper method for initiating a HTTP GET request. + + Besides doing the actual request, it also checks the return value and returns the resulting decoded + JSON data. + + Args: + uri_path (str): The path to be used in the GET request. + + Returns: + dict: Returns a dict containing the data returned by the OpenHAB REST server. + """ + r = self.session.get(f'{self.url_rest}{uri_path}', params=params) + self._check_req_return(r) + return r.json() + + def req_post(self, + uri_path: str, + data: typing.Optional[typing.Union[str, bytes, typing.Mapping[str, typing.Any], typing.Iterable[ + typing.Tuple[str, typing.Optional[str]]]]] = None, + ) -> None: + """Helper method for initiating a HTTP POST request. + + Besides doing the actual request, it also checks the return value and returns the resulting decoded + JSON data. + + Args: + uri_path (str): The path to be used in the POST request. + data (dict, optional): A optional dict with data to be submitted as part of the POST request. + + Returns: + None: No data is returned. + """ + headers = self.session.headers + headers['Content-Type'] = 'text/plain' + + r = self.session.post(self.url_rest + uri_path, content=data, headers=headers) + self._check_req_return(r) + + def req_put(self, + uri_path: str, + data: typing.Optional[dict] = None, + json_data: typing.Optional[dict] = None, + headers: typing.Optional[dict] = None, + ) -> None: + """Helper method for initiating a HTTP PUT request. + + Besides doing the actual request, it also checks the return value and returns the resulting decoded + JSON data. + + Args: + uri_path (str): The path to be used in the PUT request. + data (dict, optional): A optional dict with data to be submitted as part of the PUT request. + json_data: Data to be submitted as json. + headers: Specify optional custom headers. + + Returns: + None: No data is returned. + """ + if headers is None: + headers = {'Content-Type': 'text/plain'} + content = data + data = None + else: + content = None + + r = self.session.put(self.url_rest + uri_path, content=content, data=data, json=json_data, headers=headers) + self._check_req_return(r) + + # fetch all items + def fetch_all_items(self) -> typing.Dict[str, openhab.items.Item]: + """Returns all items defined in openHAB. + + Returns: + dict: Returns a dict with item names as key and item class instances as value. + """ + items = {} # type: dict + res = self.req_get('/items/') + + for i in res: + if i['name'] not in items: + items[i['name']] = self.json_to_item(i) + + return items + + def get_item(self, name: str) -> openhab.items.Item: + """Returns an item with its state and type as fetched from openHAB. + + Args: + name (str): The name of the item to fetch from openHAB. + + Returns: + Item: A corresponding Item class instance with the state of the requested item. + """ + json_data = self.get_item_raw(name) + + return self.json_to_item(json_data) + + def json_to_item(self, json_data: dict) -> openhab.items.Item: + """This method takes as argument the RAW (JSON decoded) response for an openHAB item. + + It checks of what type the item is and returns a class instance of the + specific item filled with the item's state. + + Args: + json_data (dict): The JSON decoded data as returned by the openHAB server. + + Returns: + Item: A corresponding Item class instance with the state of the item. + """ + _type = json_data['type'] + + if _type == 'Group' and 'groupType' in json_data: + _type = json_data['groupType'] + + if _type == 'Group' and 'groupType' not in json_data: + return openhab.items.GroupItem(self, json_data) + + if _type == 'String': + return openhab.items.StringItem(self, json_data) + + if _type == 'Switch': + return openhab.items.SwitchItem(self, json_data) + + if _type == 'DateTime': + return openhab.items.DateTimeItem(self, json_data) + + if _type == 'Contact': + return openhab.items.ContactItem(self, json_data) + + if _type.startswith('Number'): + return openhab.items.NumberItem(self, json_data) + + if _type == 'Dimmer': + return openhab.items.DimmerItem(self, json_data) + + if _type == 'Color': + return openhab.items.ColorItem(self, json_data) + + if _type == 'Rollershutter': + return openhab.items.RollershutterItem(self, json_data) + + if _type == 'Player': + return openhab.items.PlayerItem(self, json_data) + + return openhab.items.Item(self, json_data) + + def get_item_raw(self, name: str) -> typing.Any: + """Private method for fetching a json configuration of an item. + + Args: + name (str): The item name to be fetched. + + Returns: + dict: A JSON decoded dict. + """ + return self.req_get(f'/items/{name}') + + def logout(self) -> bool: + """OAuth2 session logout method. + + Returns: + True or False depending on if the logout did succeed. + """ + if self.oauth2_config is None or not isinstance(self.session, authlib.integrations.httpx_client.OAuth2Client): + raise ValueError('You are trying to logout from a non-OAuth2 session. This is not supported!') + + data = {'refresh_token': self.oauth2_config.token.refresh_token, + 'id': self.oauth2_config.client_id, + } + url_logout = f'{self.url_rest}/auth/logout' + + res = self.session.post(url_logout, data=data) + + return res.status_code == 200 + + def _oauth2_token_updater(self, token: typing.Dict[str, typing.Any], + refresh_token: typing.Any = None, + access_token: typing.Any = None) -> None: + if self.oauth2_config is None: + raise ValueError('OAuth2 configuration is not set; invalid action!') + + self.oauth2_config.token = Oauth2Token(**token) + + with self.oauth2_config.token_cache.open('w', encoding='utf-8') as fhdl: + fhdl.write(self.oauth2_config.token.model_dump_json()) + + def create_or_update_item(self, + name: str, + _type: typing.Union[str, typing.Type[openhab.items.Item]], + quantity_type: typing.Optional[str] = None, + label: typing.Optional[str] = None, + category: typing.Optional[str] = None, + tags: typing.Optional[typing.List[str]] = None, + group_names: typing.Optional[typing.List[str]] = None, + group_type: typing.Optional[typing.Union[str, typing.Type[openhab.items.Item]]] = None, + function_name: typing.Optional[str] = None, + function_params: typing.Optional[typing.List[str]] = None, + ) -> None: + """Creates a new item in openHAB if there is no item with name 'name' yet. + + If there is an item with 'name' already in openHAB, the item gets updated with the infos provided. be aware that not provided fields will be deleted in openHAB. + Consider to get the existing item via 'getItem' and then read out existing fields to populate the parameters here. + + Args: + name: unique name of the item + _type: the data_type used in openHAB (like Group, Number, Contact, DateTime, Rollershutter, Color, Dimmer, Switch, Player) + server. + To create groups use 'GroupItem'! + quantity_type: optional quantity_type ( like Angle, Temperature, Illuminance (see https://www.openhab.org/docs/concepts/units-of-measurement.html)) + label: optional openHAB label (see https://www.openhab.org/docs/configuration/items.html#label) + category: optional category. no documentation found + tags: optional list of tags (see https://www.openhab.org/docs/configuration/items.html#tags) + group_names: optional list of groups this item belongs to. + group_type: Optional group_type (e.g. NumberItem, SwitchItem, etc). + function_name: Optional function_name. no documentation found. + Can be one of ['EQUALITY', 'AND', 'OR', 'NAND', 'NOR', 'AVG', 'SUM', 'MAX', 'MIN', 'COUNT', 'LATEST', 'EARLIEST'] + function_params: Optional list of function params (no documentation found), depending on function name. + """ + paramdict: typing.Dict[ + str, typing.Union[str, typing.List[str], typing.Dict[str, typing.Union[str, typing.List[str]]]]] = {} + + if isinstance(_type, type): + if issubclass(_type, openhab.items.Item): + itemtypename = _type.TYPENAME + else: + raise ValueError( + f'_type parameter must be a valid subclass of type *Item* or a string name of such a class; given value is "{str(_type)}"') + else: + itemtypename = _type + + if quantity_type is None: + paramdict['type'] = itemtypename + else: + paramdict['type'] = f'{itemtypename}:{quantity_type}' + + paramdict['name'] = name + + if label is not None: + paramdict['label'] = label + + if category is not None: + paramdict['category'] = category + + if tags is not None: + paramdict['tags'] = tags + + if group_names is not None: + paramdict['groupNames'] = group_names + + if group_type is not None: + if isinstance(group_type, type): + if issubclass(group_type, openhab.items.Item): + paramdict['groupType'] = group_type.TYPENAME + else: + raise ValueError( + f'group_type parameter must be a valid subclass of type *Item* or a string name of such a class; given value is "{str(group_type)}"') + else: + paramdict['groupType'] = group_type + + if function_name is not None: + if function_name not in ( + 'EQUALITY', 'AND', 'OR', 'NAND', 'NOR', 'AVG', 'SUM', 'MAX', 'MIN', 'COUNT', 'LATEST', 'EARLIEST'): + raise ValueError(f'Invalid function name "{function_name}') + + if function_name in ('AND', 'OR', 'NAND', 'NOR') and (not function_params or len(function_params) != 2): + raise ValueError(f'Group function "{function_name}" requires two arguments') + + if function_name == 'COUNT' and (not function_params or len(function_params) != 1): + raise ValueError(f'Group function "{function_name}" requires one arguments') + + if function_params: + paramdict['function'] = {'name': function_name, 'params': function_params} + else: + paramdict['function'] = {'name': function_name} + + self.logger.debug('About to create item with PUT request:\n%s', str(paramdict)) + + self.req_put(f'/items/{name}', json_data=paramdict, headers={'Content-Type': 'application/json'}) + + def get_item_persistence(self, + name: str, + service_id: typing.Optional[str] = None, + start_time: typing.Optional[datetime.datetime] = None, + end_time: typing.Optional[datetime.datetime] = None, + page: int = 0, + page_length: int = 0, + boundary: bool = False, + ) -> typing.Iterator[typing.Dict[str, typing.Union[str, int]]]: + """Method for fetching persistence data for a given item. + + Args: + name: The item name persistence data should be fetched for. + service_id: ID of the persistence service. If not provided the default service will be used. + start_time: Start time of the data to return. Will default to 1 day before end_time. + end_time: End time of the data to return. Will default to current time. + page: Page number of data to return. Defaults to 0 if not provided. + page_length: The length of each page. Defaults to 0 which disabled paging. + boundary: Gets one value before and after the requested period. + + Returns: + Iterator over dict values containing time and state value, e.g. + {"time": 1695588900122, + "state": "23" + } + """ + params: typing.Dict[str, typing.Any] = {'boundary': str(boundary).lower(), + 'page': page, + 'pagelength': page_length, + } + + if service_id is not None: + params['serviceId'] = service_id + + if start_time is not None: + params['starttime'] = start_time.isoformat() + + if end_time is not None: + params['endtime'] = end_time.isoformat() + + if start_time == end_time: + raise ValueError('start_time must differ from end_time') + + res = self.req_get(f'/persistence/items/{name}', params=params) + + yield from res['data'] + + while page_length > 0 and int(res['datapoints']) > 0: + params['page'] += 1 + res = self.req_get(f'/persistence/items/{name}', params=params) + yield from res['data'] diff --git a/myenv/lib/python3.12/site-packages/openhab/command_types.py b/myenv/lib/python3.12/site-packages/openhab/command_types.py new file mode 100644 index 0000000..6c63479 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/openhab/command_types.py @@ -0,0 +1,591 @@ +"""python library for accessing the openHAB REST API.""" + +# +# Georges Toth (c) 2016-present +# +# python-openhab is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# python-openhab is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with python-openhab. If not, see . +# + +# pylint: disable=bad-indentation + +import abc +import datetime +import re +import typing + +import dateutil.parser + +__author__ = 'Georges Toth ' +__license__ = 'AGPLv3+' + + +class CommandType(metaclass=abc.ABCMeta): + """Base command type class.""" + + TYPENAME = '' + SUPPORTED_TYPENAMES: typing.List[str] = [] + UNDEF = 'UNDEF' + NULL = 'NULL' + UNDEFINED_STATES = [UNDEF, NULL] + + @classmethod + def is_undefined(cls, value: typing.Any) -> bool: + """Return true if given value is an undefined value in openHAB (i.e. UNDEF/NULL).""" + return value in CommandType.UNDEFINED_STATES + + @classmethod + def get_type_for(cls, + typename: str, + parent_cls: typing.Optional[typing.Type['CommandType']] = None, + ) -> typing.Union[typing.Type['CommandType'], None]: + """Get a class type for a given typename.""" + if parent_cls is None: + parent_cls = CommandType + for a_type in parent_cls.__subclasses__(): + if typename in a_type.SUPPORTED_TYPENAMES: + return a_type + + # maybe a subclass of a subclass + result = a_type.get_type_for(typename, a_type) + if result is not None: + return result + + return None + + @classmethod + @abc.abstractmethod + def parse(cls, value: str) -> typing.Optional[typing.Any]: + """Parse a given value.""" + raise NotImplementedError + + @classmethod + @abc.abstractmethod + def validate(cls, value: typing.Any) -> None: + """Value validation method. As this is the base class which should not be used\ + directly, we throw a NotImplementedError exception. + + Args: + value (Object): The value to validate. The data_type of the value depends on the item + data_type and is checked accordingly. + Raises: + NotImplementedError: Raises NotImplementedError as the base class should never + be used directly. + """ + raise NotImplementedError + + +class UndefType(CommandType): + """Undefined type.""" + TYPENAME = 'UnDef' + SUPPORTED_TYPENAMES = [TYPENAME] + + @classmethod + def parse(cls, value: str) -> typing.Optional[str]: + """Parse a given value.""" + if value in UndefType.UNDEFINED_STATES: + return None + return value + + @classmethod + def validate(cls, value: str) -> None: + """Value validation method.""" + + +class GroupType(CommandType): + """Group type.""" + TYPENAME = 'Group' + SUPPORTED_TYPENAMES = [TYPENAME] + + @classmethod + def parse(cls, value: str) -> typing.Optional[str]: + """Parse a given value.""" + if value in GroupType.UNDEFINED_STATES: + return None + return value + + @classmethod + def validate(cls, value: str) -> None: + """Value validation method.""" + + +class StringType(CommandType): + """StringType data_type class.""" + + TYPENAME = 'String' + SUPPORTED_TYPENAMES = [TYPENAME] + + @classmethod + def parse(cls, value: str) -> typing.Optional[str]: + """Parse a given value.""" + if value in StringType.UNDEFINED_STATES: + return None + if not isinstance(value, str): + raise ValueError + return value + + @classmethod + def validate(cls, value: str) -> None: + """Value validation method. + + Valid values are any of data_type string. + + Args: + value (str): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + StringType.parse(value) + + +class OnOffType(StringType): + """OnOffType data_type class.""" + TYPENAME = 'OnOff' + SUPPORTED_TYPENAMES = [TYPENAME] + ON = 'ON' + OFF = 'OFF' + POSSIBLE_VALUES = [ON, OFF] + + @classmethod + def parse(cls, value: str) -> typing.Optional[str]: + """Parse a given value.""" + if value in OnOffType.UNDEFINED_STATES: + return None + if value not in OnOffType.POSSIBLE_VALUES: + raise ValueError + return value + + @classmethod + def validate(cls, value: str) -> None: + """Value validation method. + + Valid values are ``ON`` and ``OFF``. + + Args: + value (str): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + super().validate(value) + OnOffType.parse(value) + + +class OpenCloseType(StringType): + """OpenCloseType data_type class.""" + TYPENAME = 'OpenClosed' + SUPPORTED_TYPENAMES = [TYPENAME] + OPEN = 'OPEN' + CLOSED = 'CLOSED' + POSSIBLE_VALUES = [OPEN, CLOSED] + + @classmethod + def parse(cls, value: str) -> typing.Optional[str]: + """Parse a given value.""" + if value in OpenCloseType.UNDEFINED_STATES: + return None + if value not in OpenCloseType.POSSIBLE_VALUES: + raise ValueError + return value + + @classmethod + def validate(cls, value: str) -> None: + """Value validation method. + + Valid values are ``OPEN`` and ``CLOSED``. + + Args: + value (str): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + super().validate(value) + OpenCloseType.parse(value) + + +class ColorType(CommandType): + """ColorType data_type class.""" + TYPENAME = 'HSB' + SUPPORTED_TYPENAMES = [TYPENAME] + + @classmethod + def parse(cls, value: str) -> typing.Optional[typing.Tuple[float, float, float]]: + """Parse a given value.""" + if value in ColorType.UNDEFINED_STATES: + return None + + if not isinstance(value, str): + raise ValueError + + str_split = value.split(',') + if len(str_split) != 3: + raise ValueError + + hs, ss, bs = value.split(',', 3) + h = float(hs) + s = float(ss) + b = float(bs) + if not ((0 <= h <= 360) and (0 <= s <= 100) and (0 <= b <= 100)): + raise ValueError + return h, s, b + + @classmethod + def validate(cls, value: typing.Union[str, typing.Tuple[float, float, float]]) -> None: + """Value validation method. + + Valid values are in format H,S,B. + Value ranges: + H(ue): 0-360 + S(aturation): 0-100 + B(rightness): 0-100 + + Args: + value (str): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + if isinstance(value, str): + str_value = str(value) + elif isinstance(value, tuple) and len(value) == 3: + str_value = f'{value[0]},{value[1]},{value[2]}' + else: + raise ValueError + + ColorType.parse(str_value) + + +class DecimalType(CommandType): + """DecimalType data_type class.""" + TYPENAME = 'Decimal' + SUPPORTED_TYPENAMES = [TYPENAME, 'Quantity'] + + @classmethod + def parse(cls, value: str) -> typing.Union[None, typing.Tuple[typing.Union[int, float], str]]: + """Parse a given value.""" + if value in DecimalType.UNDEFINED_STATES: + return None + + m = re.match(r'(-?[0-9.]+(?:[eE]-?[0-9]+)?)\s?(.*)?$', value) + if m: + value_value = m.group(1) + value_unit_of_measure = m.group(2) + + try: + if '.' in value: + return_value: typing.Union[int, float] = float(value_value) + else: + return_value = int(value_value) + except ArithmeticError as exc: + raise ValueError(exc) from exc + + return return_value, value_unit_of_measure + + raise ValueError + + @classmethod + def validate(cls, value: typing.Union[int, float, typing.Tuple[typing.Union[int, float], str], str]) -> None: + """Value validation method. + + Valid values are any of data_type: + - ``int`` + - ``float`` + - a tuple of (``int`` or ``float``, ``str``) for numeric value, unit of measure + - a ``str`` that can be parsed to one of the above by ``DecimalType.parse`` + + Args: + value (int, float, tuple, str): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + if isinstance(value, str): + DecimalType.parse(value) + elif isinstance(value, tuple) and len(value) == 2: + DecimalType.parse(f'{value[0]} {value[1]}') + elif not isinstance(value, (int, float)): + raise ValueError + + +class PercentType(CommandType): + """PercentType data_type class.""" + TYPENAME = 'Percent' + SUPPORTED_TYPENAMES = [TYPENAME] + + @classmethod + def parse(cls, value: str) -> typing.Optional[float]: + """Parse a given value.""" + if value in PercentType.UNDEFINED_STATES: + return None + try: + f = float(value) + if not 0 <= f <= 100: + raise ValueError + return f + except Exception as e: + raise ValueError(e) from e + + @classmethod + def validate(cls, value: typing.Union[float, int]) -> None: + """Value validation method. + + Valid values are any of data_type ``float`` or ``int`` and must be greater of equal to 0 + and smaller or equal to 100. + + Args: + value (float): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + if not (isinstance(value, (float, int)) and 0 <= value <= 100): + raise ValueError + + +class IncreaseDecreaseType(StringType): + """IncreaseDecreaseType data_type class.""" + TYPENAME = 'IncreaseDecrease' + SUPPORTED_TYPENAMES = [TYPENAME] + + INCREASE = 'INCREASE' + DECREASE = 'DECREASE' + + POSSIBLE_VALUES = [INCREASE, DECREASE] + + @classmethod + def parse(cls, value: str) -> typing.Optional[str]: + """Parse a given value.""" + if value in IncreaseDecreaseType.UNDEFINED_STATES: + return None + if value not in IncreaseDecreaseType.POSSIBLE_VALUES: + raise ValueError + return value + + @classmethod + def validate(cls, value: str) -> None: + """Value validation method. + + Valid values are ``INCREASE`` and ``DECREASE``. + + Args: + value (str): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + super().validate(value) + IncreaseDecreaseType.parse(value) + + +class DateTimeType(CommandType): + """DateTimeType data_type class.""" + TYPENAME = 'DateTime' + SUPPORTED_TYPENAMES = [TYPENAME] + + @classmethod + def parse(cls, value: str) -> typing.Optional[datetime.datetime]: + """Parse a given value.""" + if value in DateTimeType.UNDEFINED_STATES: + return None + return dateutil.parser.parse(value) + + @classmethod + def validate(cls, value: datetime.datetime) -> None: + """Value validation method. + + Valid values are any of data_type ``datetime.datetime``. + + Args: + value (datetime.datetime): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + if not isinstance(value, datetime.datetime): + raise ValueError + + +class UpDownType(StringType): + """UpDownType data_type class.""" + + TYPENAME = 'UpDown' + SUPPORTED_TYPENAMES = [TYPENAME] + UP = 'UP' + DOWN = 'DOWN' + POSSIBLE_VALUES = [UP, DOWN] + + @classmethod + def parse(cls, value: str) -> typing.Optional[str]: + """Parse a given value.""" + if value in UpDownType.UNDEFINED_STATES: + return None + if value not in UpDownType.POSSIBLE_VALUES: + raise ValueError + return value + + @classmethod + def validate(cls, value: str) -> None: + """Value validation method. + + Valid values are ``UP`` and ``DOWN``. + + Args: + value (str): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + super().validate(value) + + UpDownType.parse(value) + + +class StopMoveType(StringType): + """UpDownType data_type class.""" + + TYPENAME = 'StopMove' + SUPPORTED_TYPENAMES = [TYPENAME] + STOP = 'STOP' + POSSIBLE_VALUES = [STOP] + + @classmethod + def parse(cls, value: str) -> typing.Optional[str]: + """Parse a given value.""" + if value in StopMoveType.UNDEFINED_STATES: + return None + if value not in StopMoveType.POSSIBLE_VALUES: + raise ValueError + return value + + @classmethod + def validate(cls, value: str) -> None: + """Value validation method. + + Valid values are ``UP`` and ``DOWN``. + + Args: + value (str): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + super().validate(value) + + StopMoveType.parse(value) + + +class PlayPauseType(StringType): + """PlayPauseType data_type class.""" + + TYPENAME = 'PlayPause' + SUPPORTED_TYPENAMES = [TYPENAME] + PLAY = 'PLAY' + PAUSE = 'PAUSE' + POSSIBLE_VALUES = [PLAY, PAUSE] + + @classmethod + def parse(cls, value: str) -> typing.Optional[str]: + """Parse a given value.""" + if value in PlayPauseType.UNDEFINED_STATES: + return None + if value not in PlayPauseType.POSSIBLE_VALUES: + raise ValueError + return value + + @classmethod + def validate(cls, value: str) -> None: + """Value validation method. + + Valid values are ``PLAY``, ``PAUSE`` + + Args: + value (str): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + super().validate(value) + + PlayPauseType.parse(value) + + +class NextPrevious(StringType): + """NextPrevious data_type class.""" + + TYPENAME = 'NextPrevious' + SUPPORTED_TYPENAMES = [TYPENAME] + NEXT = 'NEXT' + PREVIOUS = 'PREVIOUS' + POSSIBLE_VALUES = [NEXT, PREVIOUS] + + @classmethod + def parse(cls, value: str) -> typing.Optional[str]: + """Parse a given value.""" + if value in NextPrevious.UNDEFINED_STATES: + return None + if value not in NextPrevious.POSSIBLE_VALUES: + raise ValueError + return value + + @classmethod + def validate(cls, value: str) -> None: + """Value validation method. + + Valid values are ``PLAY``, ``PAUSE`` + + Args: + value (str): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + super().validate(value) + + NextPrevious.parse(value) + + +class RewindFastforward(StringType): + """RewindFastforward data_type class.""" + + TYPENAME = 'RewindFastforward' + SUPPORTED_TYPENAMES = [TYPENAME] + REWIND = 'REWIND' + FASTFORWARD = 'FASTFORWARD' + POSSIBLE_VALUES = [REWIND, FASTFORWARD] + + @classmethod + def parse(cls, value: str) -> typing.Optional[str]: + """Parse a given value.""" + if value in RewindFastforward.UNDEFINED_STATES: + return None + if value not in RewindFastforward.POSSIBLE_VALUES: + raise ValueError + return value + + @classmethod + def validate(cls, value: str) -> None: + """Value validation method. + + Valid values are ``REWIND``, ``FASTFORWARD`` + + Args: + value (str): The value to validate. + + Raises: + ValueError: Raises ValueError if an invalid value has been specified. + """ + super().validate(value) + + RewindFastforward.parse(value) diff --git a/myenv/lib/python3.12/site-packages/openhab/config.py b/myenv/lib/python3.12/site-packages/openhab/config.py new file mode 100644 index 0000000..57f56a2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/openhab/config.py @@ -0,0 +1,72 @@ +"""python library for accessing the openHAB REST API.""" + +# +# Georges Toth (c) 2016-present +# +# python-openhab is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# python-openhab is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with python-openhab. If not, see . +# + +import pathlib +import time +import typing + +import pydantic + +'''Considering a oauth2 token config is expected to look like the following: + +``` +{"client_id": "http://127.0.0.1/auth", + "token_cache": "///.oauth2_token", + "token": { + "access_token": "adsafdasfasfsafasfsafasfasfasfsa....", + "expires_in": 3600, + "refresh_token": "312e21e21e32112", + "scope": "admin", + "token_type": "bearer", + "user": { + "name": "admin", + "roles": [ + "administrator" + ] + } + } + } +``` + +, the following classes model that structure for validation. +''' + + +class Oauth2User(pydantic.BaseModel): + """Nested user structure within an oauth2 token.""" + name: str + roles: typing.List[str] + + +class Oauth2Token(pydantic.BaseModel): + """Structure as returned by openHAB when generating a new oauth2 token.""" + access_token: str + expires_in: int + expires_at: float = time.time() - 10 + refresh_token: str + scope: typing.Union[str, typing.List[str]] = 'admin' + token_type: str + user: Oauth2User + + +class Oauth2Config(pydantic.BaseModel): + """Structure expected for a full oauth2 config.""" + client_id: str = 'http://127.0.0.1/auth' + token_cache: pathlib.Path + token: Oauth2Token diff --git a/myenv/lib/python3.12/site-packages/openhab/exceptions.py b/myenv/lib/python3.12/site-packages/openhab/exceptions.py new file mode 100644 index 0000000..a08c2d8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/openhab/exceptions.py @@ -0,0 +1,9 @@ +"""python-openhab exceptions.""" + + +class OpenHABException(Exception): + """Base of all python-openhab exceptions.""" + + +class InvalidReturnException(OpenHABException): + """The openHAB server returned an invalid or unparsable result.""" diff --git a/myenv/lib/python3.12/site-packages/openhab/items.py b/myenv/lib/python3.12/site-packages/openhab/items.py new file mode 100644 index 0000000..1aab5ad --- /dev/null +++ b/myenv/lib/python3.12/site-packages/openhab/items.py @@ -0,0 +1,672 @@ +"""python library for accessing the openHAB REST API.""" + +# +# Georges Toth (c) 2016-present +# +# python-openhab is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# python-openhab is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with python-openhab. If not, see . +# + + +import datetime +import logging +import re +import typing + +import dateutil.parser + +import openhab.command_types +import openhab.exceptions + +__author__ = 'Georges Toth ' +__license__ = 'AGPLv3+' + + +class Item: + """Base item class.""" + + types: typing.Sequence[typing.Type[openhab.command_types.CommandType]] = [] + state_types: typing.Sequence[typing.Type[openhab.command_types.CommandType]] = [] + command_event_types: typing.Sequence[typing.Type[openhab.command_types.CommandType]] = [] + state_event_types: typing.Sequence[typing.Type[openhab.command_types.CommandType]] = [] + state_changed_event_types: typing.Sequence[typing.Type[openhab.command_types.CommandType]] = [] + + TYPENAME = 'unknown' + + def __init__(self, openhab_conn: 'openhab.client.OpenHAB', json_data: dict) -> None: + """Constructor. + + Args: + openhab_conn (openhab.OpenHAB): openHAB object. + json_data (dic): A dict converted from the JSON data returned by the openHAB + server. + """ + self.openhab = openhab_conn + self.type_: typing.Optional[str] = None + self.quantityType: typing.Optional[str] = None + self.editable = None + self.label = '' + self.category = '' + self.tags = '' + self.groupNames = '' + self.group = False + self.name = '' + self._state = None # type: typing.Optional[typing.Any] + self._unitOfMeasure = '' + self._raw_state = None # type: typing.Optional[typing.Any] # raw state as returned by the server + self._members = {} # type: typing.Dict[str, typing.Any] # group members (key = item name), for none-group items it's empty + self.function_name: typing.Optional[str] = None + self.function_params: typing.Optional[typing.Sequence[str]] = None + + self.logger = logging.getLogger(__name__) + + self.init_from_json(json_data) + + def init_from_json(self, json_data: dict) -> None: + """Initialize this object from a json configuration as fetched from openHAB. + + Args: + json_data (dict): A dict converted from the JSON data returned by the openHAB + server. + """ + self.name = json_data['name'] + if json_data['type'] == 'Group': + self.group = True + if 'groupType' in json_data: + self.type_ = json_data['groupType'] + + if 'function' in json_data: + self.function_name = json_data['function']['name'] + + if 'params' in json_data['function']: + self.function_params = json_data['function']['params'] + + # init members + for i in json_data['members']: + self.members[i['name']] = self.openhab.json_to_item(i) + + else: + self.type_ = json_data.get('type', None) + + if self.type_ is None: + raise openhab.exceptions.InvalidReturnException('Item did not return a type attribute.') + + parts = self.type_.split(':') + if len(parts) == 2: + self.quantityType = parts[1] + + if 'editable' in json_data: + self.editable = json_data['editable'] + if 'label' in json_data: + self.label = json_data['label'] + if 'category' in json_data: + self.category = json_data['category'] + if 'tags' in json_data: + self.tags = json_data['tags'] + if 'groupNames' in json_data: + self.groupNames = json_data['groupNames'] + + self._raw_state = json_data['state'] + + if self.is_undefined(self._raw_state): + self._state = None + else: + self._state, self._unitOfMeasure = self._parse_rest(self._raw_state) + + @property + def state(self) -> typing.Any: + """The state property represents the current state of the item. + + The state is automatically refreshed from openHAB on reading it. + Updating the value via this property send an update to the event bus. + """ + json_data = self.openhab.get_item_raw(self.name) + self.init_from_json(json_data) + + return self._state + + @state.setter + def state(self, value: typing.Any) -> None: + self.update(value) + + @property + def unit_of_measure(self) -> str: + """Return the unit of measure. Returns an empty string if there is none defined.""" + return self._unitOfMeasure + + @property + def members(self) -> typing.Dict[str, typing.Any]: + """If item is a type of Group, it will return all member items for this group. + + For none group item empty dictionary will be returned. + + Returns: + dict: Returns a dict with item names as key and `Item` class instances as value. + + """ + return self._members + + def _validate_value(self, value: typing.Union[str, typing.Type[openhab.command_types.CommandType]]) -> None: + """Private method for verifying the new value before modifying the state of the item.""" + if self.type_ == 'String': + if not isinstance(value, (str, bytes)): + raise ValueError + elif self.types: + validation = False + + for type_ in self.types: + try: + type_.validate(value) + except ValueError: + pass + else: + validation = True + + if not validation: + raise ValueError(f'Invalid value "{value}"') + else: + raise ValueError + + def _parse_rest(self, value: str) -> typing.Tuple[str, str]: + """Parse a REST result into a native object.""" + return value, '' + + def _rest_format(self, value: str) -> typing.Union[str, bytes]: + """Format a value before submitting to openHAB.""" + _value = value # type: typing.Union[str, bytes] + + # Only ascii encoding is supported by default. If non-ascii characters were provided, convert them to bytes. + try: + _ = value.encode('ascii') + except UnicodeError: + _value = value.encode('utf-8') + + return _value + + def is_undefined(self, value: str) -> bool: + """Check if value is undefined.""" + for aStateType in self.state_types: + if not aStateType.is_undefined(value): + return False + + return True + + def __str__(self) -> str: + """String representation.""" + state = self._state + if self._unitOfMeasure and not isinstance(self._state, tuple): + state = f'{self._state} {self._unitOfMeasure}' + return f'<{self.type_} - {self.name} : {state}>' + + def _update(self, value: typing.Any) -> None: + """Updates the state of an item, input validation is expected to be already done. + + Args: + value (object): The value to update the item with. The type of the value depends + on the item type and is checked accordingly. + """ + # noinspection PyTypeChecker + self.openhab.req_put(f'/items/{self.name}/state', data=value) + + def update(self, value: typing.Any) -> None: + """Updates the state of an item. + + Args: + value (object): The value to update the item with. The type of the value depends + on the item type and is checked accordingly. + """ + self._validate_value(value) + + v = self._rest_format(value) + + self._state = value + + self._update(v) + + def command(self, value: typing.Any) -> None: + """Sends the given value as command to the event bus. + + Args: + value (object): The value to send as command to the event bus. The type of the + value depends on the item type and is checked accordingly. + """ + self._validate_value(value) + + v = self._rest_format(value) + + self._state = value + + self.openhab.req_post(f'/items/{self.name}', data=v) + + def update_state_null(self) -> None: + """Update the state of the item to *NULL*.""" + self._update('NULL') + + def update_state_undef(self) -> None: + """Update the state of the item to *UNDEF*.""" + self._update('UNDEF') + + def is_state_null(self) -> bool: + """If the item state is None, use this method for checking if the remote value is NULL.""" + if self.state is None: + # we need to query the current remote state as else this method will not work correctly if called after + # either update_state method + if self._raw_state is None: + # This should never happen + raise ValueError('Invalid internal (raw) state.') + + return self._raw_state == 'NULL' + + return False + + def is_state_undef(self) -> bool: + """If the item state is None, use this method for checking if the remote value is UNDEF.""" + if self.state is None: + # we need to query the current remote state as else this method will not work correctly if called after + # either update_state method + if self._raw_state is None: + # This should never happen + raise ValueError('Invalid internal (raw) state.') + + return self._raw_state == 'UNDEF' + + return False + + def persistence(self, + service_id: typing.Optional[str] = None, + start_time: typing.Optional[datetime.datetime] = None, + end_time: typing.Optional[datetime.datetime] = None, + page: int = 0, + page_length: int = 0, + boundary: bool = False, + ) -> typing.Iterator[typing.Dict[str, typing.Union[str, int]]]: + """Method for fetching persistence data for a given item. + + Args: + service_id: ID of the persistence service. If not provided the default service will be used. + start_time: Start time of the data to return. Will default to 1 day before end_time. + end_time: End time of the data to return. Will default to current time. + page: Page number of data to return. Defaults to 0 if not provided. + page_length: The length of each page. Defaults to 0 which disabled paging. + boundary: Gets one value before and after the requested period. + + Returns: + Iterator over dict values containing time and state value, e.g. + {"time": 1695588900122, + "state": "23" + } + """ + yield from self.openhab.get_item_persistence(name=self.name, + service_id=service_id, + start_time=start_time, + end_time=end_time, + page=page, + page_length=page_length, + boundary=boundary, + ) + + +class GroupItem(Item): + """String item type.""" + + TYPENAME = 'Group' + types: typing.List[typing.Type[openhab.command_types.CommandType]] = [] + state_types: typing.List[typing.Type[openhab.command_types.CommandType]] = [] + + +class StringItem(Item): + """String item type.""" + + TYPENAME = 'String' + types = [openhab.command_types.StringType] + state_types = types + + +class DateTimeItem(Item): + """DateTime item type.""" + + TYPENAME = 'DateTime' + types = [openhab.command_types.DateTimeType] + state_types = types + + def __gt__(self, other: datetime.datetime) -> bool: + """Greater than comparison.""" + if self._state is None or not isinstance(other, datetime.datetime): + raise NotImplementedError('You can only compare two DateTimeItem objects.') + + return self._state > other + + def __ge__(self, other: datetime.datetime) -> bool: + """Greater or equal comparison.""" + if self._state is None or not isinstance(other, datetime.datetime): + raise NotImplementedError('You can only compare two DateTimeItem objects.') + + return self._state >= other + + def __lt__(self, other: object) -> bool: + """Less than comparison.""" + if not isinstance(other, datetime.datetime): + raise NotImplementedError('You can only compare two DateTimeItem objects.') + + return not self.__gt__(other) + + def __le__(self, other: object) -> bool: + """Less or equal comparison.""" + if self._state is None or not isinstance(other, datetime.datetime): + raise NotImplementedError('You can only compare two DateTimeItem objects.') + + return self._state <= other + + def __eq__(self, other: object) -> bool: + """Equality comparison.""" + if not isinstance(other, datetime.datetime): + raise NotImplementedError('You can only compare two DateTimeItem objects.') + + return self._state == other + + def __ne__(self, other: object) -> bool: + """Not equal comparison.""" + if not isinstance(other, datetime.datetime): + raise NotImplementedError('You can only compare two DateTimeItem objects.') + + return not self.__eq__(other) + + def _parse_rest(self, value: str) -> typing.Tuple[datetime.datetime, str]: # type: ignore[override] + """Parse a REST result into a native object. + + Args: + value (str): A string argument to be converted into a datetime.datetime object. + + Returns: + datetime.datetime: The datetime.datetime object as converted from the string + parameter. + """ + return dateutil.parser.parse(value), '' + + def _rest_format(self, value: datetime.datetime) -> str: # type: ignore[override] + """Format a value before submitting to openHAB. + + Args: + value (datetime.datetime): A datetime.datetime argument to be converted + into a string. + + Returns: + str: The string as converted from the datetime.datetime parameter. + """ + # openHAB supports only up to milliseconds as of this writing + return value.isoformat(timespec='milliseconds') + + +class PlayerItem(Item): + """PlayerItem item type.""" + + TYPENAME = 'Player' + types = [openhab.command_types.PlayPauseType, openhab.command_types.NextPrevious, openhab.command_types.RewindFastforward] + state_types = [openhab.command_types.PlayPauseType, openhab.command_types.RewindFastforward] + + def play(self) -> None: + """Send the command PLAY.""" + self.command(openhab.command_types.PlayPauseType.PLAY) + + def pause(self) -> None: + """Send the command PAUSE.""" + self.command(openhab.command_types.PlayPauseType.PAUSE) + + def next(self) -> None: + """Send the command NEXT.""" + self.command(openhab.command_types.NextPrevious.NEXT) + + def previous(self) -> None: + """Send the command PREVIOUS.""" + self.command(openhab.command_types.NextPrevious.PREVIOUS) + + def fastforward(self) -> None: + """Send the command FASTFORWARD.""" + self.command(openhab.command_types.RewindFastforward.FASTFORWARD) + + def rewind(self) -> None: + """Send the command REWIND.""" + self.command(openhab.command_types.RewindFastforward.REWIND) + + +class SwitchItem(Item): + """SwitchItem item type.""" + + TYPENAME = 'Switch' + types = [openhab.command_types.OnOffType] + state_types = types + + def on(self) -> None: + """Set the state of the switch to ON.""" + self.command(openhab.command_types.OnOffType.ON) + + def off(self) -> None: + """Set the state of the switch to OFF.""" + self.command(openhab.command_types.OnOffType.OFF) + + def toggle(self) -> None: + """Toggle the state of the switch to OFF to ON and vice versa.""" + if self.state == openhab.command_types.OnOffType.ON: + self.off() + elif self.state == openhab.command_types.OnOffType.OFF: + self.on() + + +class NumberItem(Item): + """NumberItem item type.""" + + TYPENAME = 'Number' + types = [openhab.command_types.DecimalType] + state_types = types + + def _parse_rest(self, value: str) -> typing.Tuple[typing.Union[float, None], str]: # type: ignore[override] + """Parse a REST result into a native object. + + Args: + value (str): A string argument to be converted into a float object. + + Returns: + float: The float object as converted from the string parameter. + str: The unit Of Measure or empty string + """ + if value in ('UNDEF', 'NULL'): + return None, '' + # m = re.match(r'''^(-?[0-9.]+)''', value) + try: + m = re.match(r'(-?[0-9.]+(?:[eE]-?[0-9]+)?)\s?(.*)?$', value) + + if m: + value = m.group(1) + unit_of_measure = m.group(2) + + return float(value), unit_of_measure + + return float(value), '' + + except (ArithmeticError, ValueError) as exc: + self.logger.error('Error in parsing new value "%s" for "%s" - "%s"', value, self.name, exc) + + raise ValueError(f'{self.__class__}: unable to parse value "{value}"') + + def _rest_format(self, value: typing.Union[float, typing.Tuple[float, str], str]) -> typing.Union[str, bytes]: + """Format a value before submitting to openHAB. + + Args: + value: Either a float, a tuple of (float, str), or string; in the first two cases we have to cast it to a string. + + Returns: + str or bytes: A string or bytes as converted from the value parameter. + """ + if isinstance(value, tuple) and len(value) == 2: + return super()._rest_format(f'{value[0]:G} {value[1]}') + if not isinstance(value, str): + return super()._rest_format(f'{value:G}') + return super()._rest_format(value) + + +class ContactItem(Item): + """Contact item type.""" + + TYPENAME = 'Contact' + types = [openhab.command_types.OpenCloseType] + state_types = types + + def command(self, *args: typing.Any, **kwargs: typing.Any) -> None: + """This overrides the `Item` command method. + + Note: Commands are not accepted for items of type contact. + """ + raise ValueError(f'This item ({self.__class__}) only supports updates, not commands!') + + def open(self) -> None: + """Set the state of the contact item to OPEN.""" + self.state = openhab.command_types.OpenCloseType.OPEN + + def closed(self) -> None: + """Set the state of the contact item to CLOSED.""" + self.state = openhab.command_types.OpenCloseType.CLOSED + + +class DimmerItem(Item): + """DimmerItem item type.""" + + TYPENAME = 'Dimmer' + types = [openhab.command_types.OnOffType, openhab.command_types.PercentType, openhab.command_types.IncreaseDecreaseType] + state_types = [openhab.command_types.PercentType] + + def _parse_rest(self, value: str) -> typing.Tuple[float, str]: # type: ignore[override] + """Parse a REST result into a native object. + + Args: + value (str): A string argument to be converted into a int object. + + Returns: + float: The int object as converted from the string parameter. + str: Possible UoM + """ + return float(value), '' + + def _rest_format(self, value: typing.Union[str, int]) -> str: + """Format a value before submitting to OpenHAB. + + Args: + value: Either a string or an integer; in the latter case we have to cast it to a string. + + Returns: + str: The string as possibly converted from the parameter. + """ + if not isinstance(value, str): + return str(value) + + return value + + def on(self) -> None: + """Set the state of the dimmer to ON.""" + self.command(openhab.command_types.OnOffType.ON) + + def off(self) -> None: + """Set the state of the dimmer to OFF.""" + self.command(openhab.command_types.OnOffType.OFF) + + def increase(self) -> None: + """Increase the state of the dimmer.""" + self.command(openhab.command_types.IncreaseDecreaseType.INCREASE) + + def decrease(self) -> None: + """Decrease the state of the dimmer.""" + self.command(openhab.command_types.IncreaseDecreaseType.DECREASE) + + +class ColorItem(DimmerItem): + """ColorItem item type.""" + + TYPENAME = 'Color' + types = [openhab.command_types.OnOffType, openhab.command_types.PercentType, openhab.command_types.IncreaseDecreaseType, openhab.command_types.ColorType] + state_types = [openhab.command_types.ColorType] + + def _parse_rest(self, value: str) -> typing.Tuple[typing.Optional[typing.Tuple[float, float, float]], str]: # type: ignore[override] + """Parse a REST result into a native object. + + Args: + value (str): A string argument to be converted into a str object. + + Returns: + HSB components + Optional UoM + """ + result = openhab.command_types.ColorType.parse(value) + return result, '' + + def _rest_format(self, value: typing.Union[typing.Tuple[int, int, float], str, int]) -> str: + """Format a value before submitting to openHAB. + + Args: + value: Either a string, an integer or a tuple of HSB components (int, int, float); in the latter two cases we have to cast it to a string. + + Returns: + str: The string as possibly converted from the parameter. + """ + if isinstance(value, tuple): + if len(value) == 3: + return f'{value[0]},{value[1]},{value[2]}' + + if not isinstance(value, str): + return str(value) + + return value + + +class RollershutterItem(Item): + """RollershutterItem item type.""" + + TYPENAME = 'Rollershutter' + types = [openhab.command_types.UpDownType, openhab.command_types.PercentType, openhab.command_types.StopMoveType] + state_types = [openhab.command_types.PercentType] + + def _parse_rest(self, value: str) -> typing.Tuple[int, str]: # type: ignore[override] + """Parse a REST result into a native object. + + Args: + value (str): A string argument to be converted into a int object. + + Returns: + int: The int object as converted from the string parameter. + str: Possible UoM + """ + return int(float(value)), '' + + def _rest_format(self, value: typing.Union[str, int]) -> str: + """Format a value before submitting to openHAB. + + Args: + value: Either a string or an integer; in the latter case we have to cast it to a string. + + Returns: + str: The string as possibly converted from the parameter. + """ + if not isinstance(value, str): + return str(value) + + return value + + def up(self) -> None: + """Set the state of the dimmer to ON.""" + self.command(openhab.command_types.UpDownType.UP) + + def down(self) -> None: + """Set the state of the dimmer to OFF.""" + self.command(openhab.command_types.UpDownType.DOWN) + + def stop(self) -> None: + """Set the state of the dimmer to OFF.""" + self.command(openhab.command_types.StopMoveType.STOP) diff --git a/myenv/lib/python3.12/site-packages/openhab/oauth2_helper.py b/myenv/lib/python3.12/site-packages/openhab/oauth2_helper.py new file mode 100644 index 0000000..6a36e03 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/openhab/oauth2_helper.py @@ -0,0 +1,96 @@ +"""OAuth2 helper method for generating and fetching an OAuth2 token.""" + +import typing + +import bs4 +import httpx + + +def get_oauth2_token(base_url: str, + username: str, + password: str, + client_id: typing.Optional[str] = None, + redirect_url: typing.Optional[str] = None, + scope: typing.Optional[str] = None, + ) -> dict: + """Method for generating an OAuth2 token. + + Args: + base_url: openHAB base URL + username: Admin account username + password: Admin account password + client_id: OAuth2 client ID; does not need to be specified + redirect_url: OAuth2 redirect URL; does not need to be specified + scope: Do not change unless you know what you are doing + + Returns: + *dict* with the generated OAuth2 token details + """ + if client_id is not None: + oauth2_client_id = client_id + else: + oauth2_client_id = 'http://127.0.0.1/auth' + + if redirect_url is not None: + oauth2_redirect_url = redirect_url + else: + oauth2_redirect_url = 'http://127.0.0.1/auth' + + if scope is not None: + oauth2_scope = scope + else: + oauth2_scope = 'admin' + + oauth2_auth_endpoint = f'{base_url}/rest/auth/token' + url_generate_token = f'{base_url}/auth?response_type=code&redirect_uri={oauth2_redirect_url}&client_id={oauth2_client_id}&scope={oauth2_scope}' + + res = httpx.get(url_generate_token, timeout=30) + res.raise_for_status() + + soup = bs4.BeautifulSoup(res.content, 'html.parser') + submit_form = soup.find('form') + + action = submit_form.attrs.get('action').lower() + url_submit_generate_token = f'{base_url}{action}' + + data = {} + + for input_tag in submit_form.find_all('input'): + input_name = input_tag.attrs.get('name') + + if input_name is None: + continue + + input_value = input_tag.attrs.get('value', '') + + data[input_name] = input_value + + data['username'] = username + data['password'] = password + + res = httpx.post(url_submit_generate_token, data=data, timeout=30) + if not 200 < res.status_code <= 302: + res.raise_for_status() + + if 'location' not in res.headers: + raise KeyError('Token generation failed!') + + oauth_redirect_location = res.headers['location'] + + if '?code=' not in oauth_redirect_location: + raise ValueError('Token generation failed!') + + oauth2_registration_code = oauth_redirect_location.split('?code=', 1)[1] + + data = {'grant_type': 'authorization_code', + 'code': oauth2_registration_code, + 'redirect_uri': oauth2_redirect_url, + 'client_id': oauth2_client_id, + 'refresh_token': None, + 'code_verifier': None, + } + + res = httpx.post(oauth2_auth_endpoint, data=data, timeout=30) + res.raise_for_status() + + return res.json() diff --git a/myenv/lib/python3.12/site-packages/openhab/py.typed b/myenv/lib/python3.12/site-packages/openhab/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/openhab/rules.py b/myenv/lib/python3.12/site-packages/openhab/rules.py new file mode 100644 index 0000000..d3811e2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/openhab/rules.py @@ -0,0 +1,46 @@ +"""python library for accessing the openHAB REST API.""" + +# +# Georges Toth (c) 2016-present +# +# python-openhab is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# python-openhab is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with python-openhab. If not, see . +# + +# pylint: disable=bad-indentation + +import logging +import typing + +if typing.TYPE_CHECKING: + import openhab.client + +__author__ = 'Georges Toth ' +__license__ = 'AGPLv3+' + + +class Rules: + """Base rule class.""" + + def __init__(self, openhab_conn: 'openhab.client.OpenHAB') -> None: + """Constructor. + + Args: + openhab_conn (openhab.OpenHAB): openHAB object. + """ + self.openhab = openhab_conn + self.logger = logging.getLogger(__name__) + + def get(self) -> typing.List[typing.Dict[str, typing.Any]]: + """Get all rules.""" + return self.openhab.req_get('/rules') diff --git a/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/AUTHORS.txt b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/AUTHORS.txt new file mode 100644 index 0000000..0e63548 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/AUTHORS.txt @@ -0,0 +1,760 @@ +@Switch01 +A_Rog +Aakanksha Agrawal +Abhinav Sagar +ABHYUDAY PRATAP SINGH +abs51295 +AceGentile +Adam Chainz +Adam Tse +Adam Wentz +admin +Adrien Morison +ahayrapetyan +Ahilya +AinsworthK +Akash Srivastava +Alan Yee +Albert Tugushev +Albert-Guan +albertg +Alberto Sottile +Aleks Bunin +Ales Erjavec +Alethea Flowers +Alex Gaynor +Alex Grönholm +Alex Hedges +Alex Loosley +Alex Morega +Alex Stachowiak +Alexander Shtyrov +Alexandre Conrad +Alexey Popravka +Aleš Erjavec +Alli +Ami Fischman +Ananya Maiti +Anatoly Techtonik +Anders Kaseorg +Andre Aguiar +Andreas Lutro +Andrei Geacar +Andrew Gaul +Andrew Shymanel +Andrey Bienkowski +Andrey Bulgakov +Andrés Delfino +Andy Freeland +Andy Kluger +Ani Hayrapetyan +Aniruddha Basak +Anish Tambe +Anrs Hu +Anthony Sottile +Antoine Musso +Anton Ovchinnikov +Anton Patrushev +Antonio Alvarado Hernandez +Antony Lee +Antti Kaihola +Anubhav Patel +Anudit Nagar +Anuj Godase +AQNOUCH Mohammed +AraHaan +Arindam Choudhury +Armin Ronacher +Artem +Arun Babu Neelicattu +Ashley Manton +Ashwin Ramaswami +atse +Atsushi Odagiri +Avinash Karhana +Avner Cohen +Awit (Ah-Wit) Ghirmai +Baptiste Mispelon +Barney Gale +barneygale +Bartek Ogryczak +Bastian Venthur +Ben Bodenmiller +Ben Darnell +Ben Hoyt +Ben Mares +Ben Rosser +Bence Nagy +Benjamin Peterson +Benjamin VanEvery +Benoit Pierre +Berker Peksag +Bernard +Bernard Tyers +Bernardo B. Marques +Bernhard M. Wiedemann +Bertil Hatt +Bhavam Vidyarthi +Blazej Michalik +Bogdan Opanchuk +BorisZZZ +Brad Erickson +Bradley Ayers +Brandon L. Reiss +Brandt Bucher +Brett Randall +Brett Rosen +Brian Cristante +Brian Rosner +briantracy +BrownTruck +Bruno Oliveira +Bruno Renié +Bruno S +Bstrdsmkr +Buck Golemon +burrows +Bussonnier Matthias +bwoodsend +c22 +Caleb Martinez +Calvin Smith +Carl Meyer +Carlos Liam +Carol Willing +Carter Thayer +Cass +Chandrasekhar Atina +Chih-Hsuan Yen +Chris Brinker +Chris Hunt +Chris Jerdonek +Chris Kuehl +Chris McDonough +Chris Pawley +Chris Pryer +Chris Wolfe +Christian Clauss +Christian Heimes +Christian Oudard +Christoph Reiter +Christopher Hunt +Christopher Snyder +cjc7373 +Clark Boylan +Claudio Jolowicz +Clay McClure +Cody +Cody Soyland +Colin Watson +Collin Anderson +Connor Osborn +Cooper Lees +Cooper Ry Lees +Cory Benfield +Cory Wright +Craig Kerstiens +Cristian Sorinel +Cristina +Cristina Muñoz +Curtis Doty +cytolentino +Daan De Meyer +Dale +Damian +Damian Quiroga +Damian Shaw +Dan Black +Dan Savilonis +Dan Sully +Dane Hillard +daniel +Daniel Collins +Daniel Hahler +Daniel Holth +Daniel Jost +Daniel Katz +Daniel Shaulov +Daniele Esposti +Daniele Nicolodi +Daniele Procida +Daniil Konovalenko +Danny Hermes +Danny McClanahan +Darren Kavanagh +Dav Clark +Dave Abrahams +Dave Jones +David Aguilar +David Black +David Bordeynik +David Caro +David D Lowe +David Evans +David Hewitt +David Linke +David Poggi +David Pursehouse +David Runge +David Tucker +David Wales +Davidovich +ddelange +Deepak Sharma +Deepyaman Datta +Denise Yu +dependabot[bot] +derwolfe +Desetude +Devesh Kumar Singh +Diego Caraballo +Diego Ramirez +DiegoCaraballo +Dimitri Merejkowsky +Dimitri Papadopoulos +Dirk Stolle +Dmitry Gladkov +Dmitry Volodin +Domen Kožar +Dominic Davis-Foster +Donald Stufft +Dongweiming +doron zarhi +Dos Moonen +Douglas Thor +DrFeathers +Dustin Ingram +Dwayne Bailey +Ed Morley +Edgar Ramírez +Edgar Ramírez Mondragón +Ee Durbin +Efflam Lemaillet +efflamlemaillet +Eitan Adler +ekristina +elainechan +Eli Schwartz +Elisha Hollander +Ellen Marie Dash +Emil Burzo +Emil Styrke +Emmanuel Arias +Endoh Takanao +enoch +Erdinc Mutlu +Eric Cousineau +Eric Gillingham +Eric Hanchrow +Eric Hopper +Erik M. Bray +Erik Rose +Erwin Janssen +Eugene Vereshchagin +everdimension +Federico +Felipe Peter +Felix Yan +fiber-space +Filip Kokosiński +Filipe Laíns +Finn Womack +finnagin +Flavio Amurrio +Florian Briand +Florian Rathgeber +Francesco +Francesco Montesano +Frost Ming +Gabriel Curio +Gabriel de Perthuis +Garry Polley +gavin +gdanielson +Geoffrey Sneddon +George Song +Georgi Valkov +Georgy Pchelkin +ghost +Giftlin Rajaiah +gizmoguy1 +gkdoc +Godefroid Chapelle +Gopinath M +GOTO Hayato +gousaiyang +gpiks +Greg Roodt +Greg Ward +Guilherme Espada +Guillaume Seguin +gutsytechster +Guy Rozendorn +Guy Tuval +gzpan123 +Hanjun Kim +Hari Charan +Harsh Vardhan +harupy +Harutaka Kawamura +hauntsaninja +Henrich Hartzer +Henry Schreiner +Herbert Pfennig +Holly Stotelmyer +Honnix +Hsiaoming Yang +Hugo Lopes Tavares +Hugo van Kemenade +Hugues Bruant +Hynek Schlawack +Ian Bicking +Ian Cordasco +Ian Lee +Ian Stapleton Cordasco +Ian Wienand +Igor Kuzmitshov +Igor Sobreira +Ilan Schnell +Illia Volochii +Ilya Baryshev +Inada Naoki +Ionel Cristian Mărieș +Ionel Maries Cristian +Itamar Turner-Trauring +Ivan Pozdeev +J. Nick Koston +Jacob Kim +Jacob Walls +Jaime Sanz +jakirkham +Jakub Kuczys +Jakub Stasiak +Jakub Vysoky +Jakub Wilk +James Cleveland +James Curtin +James Firth +James Gerity +James Polley +Jan Pokorný +Jannis Leidel +Jarek Potiuk +jarondl +Jason Curtis +Jason R. Coombs +JasonMo +JasonMo1 +Jay Graves +Jean Abou Samra +Jean-Christophe Fillion-Robin +Jeff Barber +Jeff Dairiki +Jeff Widman +Jelmer Vernooij +jenix21 +Jeremy Stanley +Jeremy Zafran +Jesse Rittner +Jiashuo Li +Jim Fisher +Jim Garrison +Jiun Bae +Jivan Amara +Joe Bylund +Joe Michelini +John Paton +John T. Wodder II +John-Scott Atlakson +johnthagen +Jon Banafato +Jon Dufresne +Jon Parise +Jonas Nockert +Jonathan Herbert +Joonatan Partanen +Joost Molenaar +Jorge Niedbalski +Joseph Bylund +Joseph Long +Josh Bronson +Josh Hansen +Josh Schneier +Joshua +Juan Luis Cano Rodríguez +Juanjo Bazán +Judah Rand +Julian Berman +Julian Gethmann +Julien Demoor +Jussi Kukkonen +jwg4 +Jyrki Pulliainen +Kai Chen +Kai Mueller +Kamal Bin Mustafa +kasium +kaustav haldar +keanemind +Keith Maxwell +Kelsey Hightower +Kenneth Belitzky +Kenneth Reitz +Kevin Burke +Kevin Carter +Kevin Frommelt +Kevin R Patterson +Kexuan Sun +Kit Randel +Klaas van Schelven +KOLANICH +kpinc +Krishna Oza +Kumar McMillan +Kurt McKee +Kyle Persohn +lakshmanaram +Laszlo Kiss-Kollar +Laurent Bristiel +Laurent LAPORTE +Laurie O +Laurie Opperman +layday +Leon Sasson +Lev Givon +Lincoln de Sousa +Lipis +lorddavidiii +Loren Carvalho +Lucas Cimon +Ludovic Gasc +Lukas Geiger +Lukas Juhrich +Luke Macken +Luo Jiebin +luojiebin +luz.paz +László Kiss Kollár +M00nL1ght +Marc Abramowitz +Marc Tamlyn +Marcus Smith +Mariatta +Mark Kohler +Mark Williams +Markus Hametner +Martey Dodoo +Martin Fischer +Martin Häcker +Martin Pavlasek +Masaki +Masklinn +Matej Stuchlik +Mathew Jennings +Mathieu Bridon +Mathieu Kniewallner +Matt Bacchi +Matt Good +Matt Maker +Matt Robenolt +matthew +Matthew Einhorn +Matthew Feickert +Matthew Gilliard +Matthew Iversen +Matthew Treinish +Matthew Trumbell +Matthew Willson +Matthias Bussonnier +mattip +Maurits van Rees +Max W Chase +Maxim Kurnikov +Maxime Rouyrre +mayeut +mbaluna +mdebi +memoselyk +meowmeowcat +Michael +Michael Aquilina +Michael E. Karpeles +Michael Klich +Michael Mintz +Michael Williamson +michaelpacer +Michał Górny +Mickaël Schoentgen +Miguel Araujo Perez +Mihir Singh +Mike +Mike Hendricks +Min RK +MinRK +Miro Hrončok +Monica Baluna +montefra +Monty Taylor +Muha Ajjan‮ +Nadav Wexler +Nahuel Ambrosini +Nate Coraor +Nate Prewitt +Nathan Houghton +Nathaniel J. Smith +Nehal J Wani +Neil Botelho +Nguyễn Gia Phong +Nicholas Serra +Nick Coghlan +Nick Stenning +Nick Timkovich +Nicolas Bock +Nicole Harris +Nikhil Benesch +Nikhil Ladha +Nikita Chepanov +Nikolay Korolev +Nipunn Koorapati +Nitesh Sharma +Niyas Sait +Noah +Noah Gorny +Nowell Strite +NtaleGrey +nvdv +OBITORASU +Ofek Lev +ofrinevo +Oliver Freund +Oliver Jeeves +Oliver Mannion +Oliver Tonnhofer +Olivier Girardot +Olivier Grisel +Ollie Rutherfurd +OMOTO Kenji +Omry Yadan +onlinejudge95 +Oren Held +Oscar Benjamin +Oz N Tiram +Pachwenko +Patrick Dubroy +Patrick Jenkins +Patrick Lawson +patricktokeeffe +Patrik Kopkan +Paul Ganssle +Paul Kehrer +Paul Moore +Paul Nasrat +Paul Oswald +Paul van der Linden +Paulus Schoutsen +Pavel Safronov +Pavithra Eswaramoorthy +Pawel Jasinski +Paweł Szramowski +Pekka Klärck +Peter Gessler +Peter Lisák +Peter Waller +petr-tik +Phaneendra Chiruvella +Phil Elson +Phil Freo +Phil Pennock +Phil Whelan +Philip Jägenstedt +Philip Molloy +Philippe Ombredanne +Pi Delport +Pierre-Yves Rofes +Pieter Degroote +pip +Prabakaran Kumaresshan +Prabhjyotsing Surjit Singh Sodhi +Prabhu Marappan +Pradyun Gedam +Prashant Sharma +Pratik Mallya +pre-commit-ci[bot] +Preet Thakkar +Preston Holmes +Przemek Wrzos +Pulkit Goyal +q0w +Qiangning Hong +Qiming Xu +Quentin Lee +Quentin Pradet +R. David Murray +Rafael Caricio +Ralf Schmitt +Razzi Abuissa +rdb +Reece Dunham +Remi Rampin +Rene Dudfield +Riccardo Magliocchetti +Riccardo Schirone +Richard Jones +Richard Si +Ricky Ng-Adam +Rishi +RobberPhex +Robert Collins +Robert McGibbon +Robert Pollak +Robert T. McGibbon +robin elisha robinson +Roey Berman +Rohan Jain +Roman Bogorodskiy +Roman Donchenko +Romuald Brunet +ronaudinho +Ronny Pfannschmidt +Rory McCann +Ross Brattain +Roy Wellington Ⅳ +Ruairidh MacLeod +Russell Keith-Magee +Ryan Shepherd +Ryan Wooden +ryneeverett +Sachi King +Salvatore Rinchiera +sandeepkiran-js +Sander Van Balen +Savio Jomton +schlamar +Scott Kitterman +Sean +seanj +Sebastian Jordan +Sebastian Schaetz +Segev Finer +SeongSoo Cho +Sergey Vasilyev +Seth Michael Larson +Seth Woodworth +Shahar Epstein +Shantanu +shireenrao +Shivansh-007 +Shlomi Fish +Shovan Maity +Simeon Visser +Simon Cross +Simon Pichugin +sinoroc +sinscary +snook92 +socketubs +Sorin Sbarnea +Srinivas Nyayapati +Stavros Korokithakis +Stefan Scherfke +Stefano Rivera +Stephan Erb +Stephen Rosen +stepshal +Steve (Gadget) Barnes +Steve Barnes +Steve Dower +Steve Kowalik +Steven Myint +Steven Silvester +stonebig +studioj +Stéphane Bidoul +Stéphane Bidoul (ACSONE) +Stéphane Klein +Sumana Harihareswara +Surbhi Sharma +Sviatoslav Sydorenko +Swat009 +Sylvain +Takayuki SHIMIZUKAWA +Taneli Hukkinen +tbeswick +Thiago +Thijs Triemstra +Thomas Fenzl +Thomas Grainger +Thomas Guettler +Thomas Johansson +Thomas Kluyver +Thomas Smith +Thomas VINCENT +Tim D. Smith +Tim Gates +Tim Harder +Tim Heap +tim smith +tinruufu +Tobias Hermann +Tom Forbes +Tom Freudenheim +Tom V +Tomas Hrnciar +Tomas Orsava +Tomer Chachamu +Tommi Enenkel | AnB +Tomáš Hrnčiar +Tony Beswick +Tony Narlock +Tony Zhaocheng Tan +TonyBeswick +toonarmycaptain +Toshio Kuratomi +toxinu +Travis Swicegood +Tushar Sadhwani +Tzu-ping Chung +Valentin Haenel +Victor Stinner +victorvpaulo +Vikram - Google +Viktor Szépe +Ville Skyttä +Vinay Sajip +Vincent Philippon +Vinicyus Macedo +Vipul Kumar +Vitaly Babiy +Vladimir Fokow +Vladimir Rutsky +W. Trevor King +Wil Tan +Wilfred Hughes +William Edwards +William ML Leslie +William T Olson +William Woodruff +Wilson Mo +wim glenn +Winson Luk +Wolfgang Maier +Wu Zhenyu +XAMES3 +Xavier Fernandez +xoviat +xtreak +YAMAMOTO Takashi +Yen Chi Hsuan +Yeray Diaz Diaz +Yoval P +Yu Jian +Yuan Jing Vincent Yan +Yusuke Hayashi +Zearin +Zhiping Deng +ziebam +Zvezdan Petkovic +Łukasz Langa +Роман Донченко +Семён Марьясин +‮rekcäH nitraM‮ diff --git a/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/INSTALLER b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/LICENSE.txt b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..8e7b65e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright (c) 2008-present The pip developers (see AUTHORS.txt file) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/METADATA b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/METADATA new file mode 100644 index 0000000..e5b45bd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/METADATA @@ -0,0 +1,88 @@ +Metadata-Version: 2.1 +Name: pip +Version: 24.0 +Summary: The PyPA recommended tool for installing Python packages. +Author-email: The pip developers +License: MIT +Project-URL: Homepage, https://pip.pypa.io/ +Project-URL: Documentation, https://pip.pypa.io +Project-URL: Source, https://github.com/pypa/pip +Project-URL: Changelog, https://pip.pypa.io/en/stable/news/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +License-File: AUTHORS.txt + +pip - The Python Package Installer +================================== + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.org/project/pip/ + :alt: PyPI + +.. image:: https://img.shields.io/pypi/pyversions/pip + :target: https://pypi.org/project/pip + :alt: PyPI - Python Version + +.. image:: https://readthedocs.org/projects/pip/badge/?version=latest + :target: https://pip.pypa.io/en/latest + :alt: Documentation + +pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes. + +Please take a look at our documentation for how to install and use pip: + +* `Installation`_ +* `Usage`_ + +We release updates regularly, with a new version every 3 months. Find more details in our documentation: + +* `Release notes`_ +* `Release process`_ + +If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: + +* `Issue tracking`_ +* `Discourse channel`_ +* `User IRC`_ + +If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms: + +* `GitHub page`_ +* `Development documentation`_ +* `Development IRC`_ + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _package installer: https://packaging.python.org/guides/tool-recommendations/ +.. _Python Package Index: https://pypi.org +.. _Installation: https://pip.pypa.io/en/stable/installation/ +.. _Usage: https://pip.pypa.io/en/stable/ +.. _Release notes: https://pip.pypa.io/en/stable/news.html +.. _Release process: https://pip.pypa.io/en/latest/development/release-process/ +.. _GitHub page: https://github.com/pypa/pip +.. _Development documentation: https://pip.pypa.io/en/latest/development +.. _Issue tracking: https://github.com/pypa/pip/issues +.. _Discourse channel: https://discuss.python.org/c/packaging +.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa +.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md diff --git a/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/RECORD b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/RECORD new file mode 100644 index 0000000..cb9a9e6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/RECORD @@ -0,0 +1,1005 @@ +../../../bin/pip,sha256=n2A_zghynp7SEMiYok_38PyzsZ-bpD0Lgyn_zMo0pbA,242 +../../../bin/pip3,sha256=n2A_zghynp7SEMiYok_38PyzsZ-bpD0Lgyn_zMo0pbA,242 +../../../bin/pip3.12,sha256=n2A_zghynp7SEMiYok_38PyzsZ-bpD0Lgyn_zMo0pbA,242 +pip-24.0.dist-info/AUTHORS.txt,sha256=SwXm4nkwRkmtnO1ZY-dLy7EPeoQNXMNLby5CN3GlNhY,10388 +pip-24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip-24.0.dist-info/LICENSE.txt,sha256=Y0MApmnUmurmWxLGxIySTFGkzfPR_whtw0VtyLyqIQQ,1093 +pip-24.0.dist-info/METADATA,sha256=kNEfJ3_Vho2mee4lfJdlbd5RHIqsfQJSMUB-bOkIOeI,3581 +pip-24.0.dist-info/RECORD,, +pip-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip-24.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +pip-24.0.dist-info/entry_points.txt,sha256=Fa_c0b-xGFaYxagIruvpJD6qqXmNTA02vAVIkmMj-9o,125 +pip-24.0.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/__init__.py,sha256=oAk1nFpLmUVS5Ln7NxvNoGUn5Vkn6FGQjPaNDf8Q8pk,355 +pip/__main__.py,sha256=WzbhHXTbSE6gBY19mNN9m4s5o_365LOvTYSgqgbdBhE,854 +pip/__pip-runner__.py,sha256=EnrfKmKMzWAdqg_JicLCOP9Y95Ux7zHh4ObvqLtQcjo,1444 +pip/__pycache__/__init__.cpython-312.pyc,, +pip/__pycache__/__main__.cpython-312.pyc,, +pip/__pycache__/__pip-runner__.cpython-312.pyc,, +pip/_internal/__init__.py,sha256=iqZ5-YQsQV08tkUc7L806Reop6tguLFWf70ySF6be0Y,515 +pip/_internal/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/__pycache__/build_env.cpython-312.pyc,, +pip/_internal/__pycache__/cache.cpython-312.pyc,, +pip/_internal/__pycache__/configuration.cpython-312.pyc,, +pip/_internal/__pycache__/exceptions.cpython-312.pyc,, +pip/_internal/__pycache__/main.cpython-312.pyc,, +pip/_internal/__pycache__/pyproject.cpython-312.pyc,, +pip/_internal/__pycache__/self_outdated_check.cpython-312.pyc,, +pip/_internal/__pycache__/wheel_builder.cpython-312.pyc,, +pip/_internal/build_env.py,sha256=1ESpqw0iupS_K7phZK5zshVE5Czy9BtGLFU4W6Enva8,10243 +pip/_internal/cache.py,sha256=uiYD-9F0Bv1C8ZyWE85lpzDmQf7hcUkgL99GmI8I41Q,10370 +pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132 +pip/_internal/cli/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/cli/__pycache__/autocompletion.cpython-312.pyc,, +pip/_internal/cli/__pycache__/base_command.cpython-312.pyc,, +pip/_internal/cli/__pycache__/cmdoptions.cpython-312.pyc,, +pip/_internal/cli/__pycache__/command_context.cpython-312.pyc,, +pip/_internal/cli/__pycache__/main.cpython-312.pyc,, +pip/_internal/cli/__pycache__/main_parser.cpython-312.pyc,, +pip/_internal/cli/__pycache__/parser.cpython-312.pyc,, +pip/_internal/cli/__pycache__/progress_bars.cpython-312.pyc,, +pip/_internal/cli/__pycache__/req_command.cpython-312.pyc,, +pip/_internal/cli/__pycache__/spinners.cpython-312.pyc,, +pip/_internal/cli/__pycache__/status_codes.cpython-312.pyc,, +pip/_internal/cli/autocompletion.py,sha256=_br_5NgSxSuvPjMF0MLHzS5s6BpSkQAQHKrLK89VauM,6690 +pip/_internal/cli/base_command.py,sha256=iuVWGa2oTq7gBReo0er3Z0tXJ2oqBIC6QjDHcnDhKXY,8733 +pip/_internal/cli/cmdoptions.py,sha256=V8ggG6AtHpHKkH_6tRU0mhJaZTeqtrFpu75ghvMXXJk,30063 +pip/_internal/cli/command_context.py,sha256=RHgIPwtObh5KhMrd3YZTkl8zbVG-6Okml7YbFX4Ehg0,774 +pip/_internal/cli/main.py,sha256=Uzxt_YD1hIvB1AW5mxt6IVcht5G712AtMqdo51UMhmQ,2816 +pip/_internal/cli/main_parser.py,sha256=laDpsuBDl6kyfywp9eMMA9s84jfH2TJJn-vmL0GG90w,4338 +pip/_internal/cli/parser.py,sha256=KW6C3-7-4ErTNB0TfLTKwOdHcd-qefCeGnrOoE2r0RQ,10781 +pip/_internal/cli/progress_bars.py,sha256=So4mPoSjXkXiSHiTzzquH3VVyVD_njXlHJSExYPXAow,1968 +pip/_internal/cli/req_command.py,sha256=c7_XHABnXmD3_qlK9-r37KqdKBAcgmVKvQ2WcTrNLfc,18369 +pip/_internal/cli/spinners.py,sha256=hIJ83GerdFgFCdobIA23Jggetegl_uC4Sp586nzFbPE,5118 +pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116 +pip/_internal/commands/__init__.py,sha256=5oRO9O3dM2vGuh0bFw4HOVletryrz5HHMmmPWwJrH9U,3882 +pip/_internal/commands/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/commands/__pycache__/cache.cpython-312.pyc,, +pip/_internal/commands/__pycache__/check.cpython-312.pyc,, +pip/_internal/commands/__pycache__/completion.cpython-312.pyc,, +pip/_internal/commands/__pycache__/configuration.cpython-312.pyc,, +pip/_internal/commands/__pycache__/debug.cpython-312.pyc,, +pip/_internal/commands/__pycache__/download.cpython-312.pyc,, +pip/_internal/commands/__pycache__/freeze.cpython-312.pyc,, +pip/_internal/commands/__pycache__/hash.cpython-312.pyc,, +pip/_internal/commands/__pycache__/help.cpython-312.pyc,, +pip/_internal/commands/__pycache__/index.cpython-312.pyc,, +pip/_internal/commands/__pycache__/inspect.cpython-312.pyc,, +pip/_internal/commands/__pycache__/install.cpython-312.pyc,, +pip/_internal/commands/__pycache__/list.cpython-312.pyc,, +pip/_internal/commands/__pycache__/search.cpython-312.pyc,, +pip/_internal/commands/__pycache__/show.cpython-312.pyc,, +pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc,, +pip/_internal/commands/__pycache__/wheel.cpython-312.pyc,, +pip/_internal/commands/cache.py,sha256=xg76_ZFEBC6zoQ3gXLRfMZJft4z2a0RwH4GEFZC6nnU,7944 +pip/_internal/commands/check.py,sha256=Rb13Q28yoLh0j1gpx5SU0jlResNct21eQCRsnaO9xKA,1782 +pip/_internal/commands/completion.py,sha256=HT4lD0bgsflHq2IDgYfiEdp7IGGtE7s6MgI3xn0VQEw,4287 +pip/_internal/commands/configuration.py,sha256=n98enwp6y0b5G6fiRQjaZo43FlJKYve_daMhN-4BRNc,9766 +pip/_internal/commands/debug.py,sha256=63972uUCeMIGOdMMVeIUGrOjTOqTVWplFC82a-hcKyA,6777 +pip/_internal/commands/download.py,sha256=e4hw088zGo26WmJaMIRvCniLlLmoOjqolGyfHjsCkCQ,5335 +pip/_internal/commands/freeze.py,sha256=qrIHS_-c6JPrQ92hMhAv9kkl0bHgFpRLwYJDdbcYr1o,3243 +pip/_internal/commands/hash.py,sha256=EVVOuvGtoPEdFi8SNnmdqlCQrhCxV-kJsdwtdcCnXGQ,1703 +pip/_internal/commands/help.py,sha256=gcc6QDkcgHMOuAn5UxaZwAStsRBrnGSn_yxjS57JIoM,1132 +pip/_internal/commands/index.py,sha256=CNXQer_PeZKSJooURcCFCBEKGfwyNoUWYP_MWczAcOM,4775 +pip/_internal/commands/inspect.py,sha256=2wSPt9yfr3r6g-s2S5L6PvRtaHNVyb4TuodMStJ39cw,3188 +pip/_internal/commands/install.py,sha256=VxDd-BD3a27ApeE2OK34rfBXS6Zo2wtemK9-HCwPqxM,28782 +pip/_internal/commands/list.py,sha256=-QbpPuGDiGN1SdThsk2ml8beBnepliefbGhMAN8tkzU,12547 +pip/_internal/commands/search.py,sha256=sbBZiARRc050QquOKcCvOr2K3XLsoYebLKZGRi__iUI,5697 +pip/_internal/commands/show.py,sha256=t5jia4zcYJRJZy4U_Von7zMl03hJmmcofj6oDNTnj7Y,6419 +pip/_internal/commands/uninstall.py,sha256=OIqO9tqadY8kM4HwhFf1Q62fUIp7v8KDrTRo8yWMz7Y,3886 +pip/_internal/commands/wheel.py,sha256=CSnX8Pmf1oPCnd7j7bn1_f58G9KHNiAblvVJ5zykN-A,6476 +pip/_internal/configuration.py,sha256=XkAiBS0hpzsM-LF0Qu5hvPWO_Bs67-oQKRYFBuMbESs,14006 +pip/_internal/distributions/__init__.py,sha256=Hq6kt6gXBgjNit5hTTWLAzeCNOKoB-N0pGYSqehrli8,858 +pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/distributions/__pycache__/base.cpython-312.pyc,, +pip/_internal/distributions/__pycache__/installed.cpython-312.pyc,, +pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc,, +pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc,, +pip/_internal/distributions/base.py,sha256=oRSEvnv2ZjBnargamnv2fcJa1n6gUDKaW0g6CWSEpWs,1743 +pip/_internal/distributions/installed.py,sha256=QinHFbWAQ8oE0pbD8MFZWkwlnfU1QYTccA1vnhrlYOU,842 +pip/_internal/distributions/sdist.py,sha256=4K3V0VNMllHbBzCJibjwd_tylUKpmIdu2AQyhplvCQo,6709 +pip/_internal/distributions/wheel.py,sha256=-ma3sOtUQj0AxXCEb6_Fhmjl3nh4k3A0HC2taAb2N-4,1277 +pip/_internal/exceptions.py,sha256=TmF1iNFEneSWaemwlg6a5bpPuq2cMHK7d1-SvjsQHb0,23634 +pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30 +pip/_internal/index/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/index/__pycache__/collector.cpython-312.pyc,, +pip/_internal/index/__pycache__/package_finder.cpython-312.pyc,, +pip/_internal/index/__pycache__/sources.cpython-312.pyc,, +pip/_internal/index/collector.py,sha256=sH0tL_cOoCk6pLLfCSGVjFM4rPEJtllF-VobvAvLSH4,16590 +pip/_internal/index/package_finder.py,sha256=S_nC8gzVIMY6ikWfKoSOzRtoesUqnfNhAPl_BwSOusA,37843 +pip/_internal/index/sources.py,sha256=dJegiR9f86kslaAHcv9-R5L_XBf5Rzm_FkyPteDuPxI,8688 +pip/_internal/locations/__init__.py,sha256=Dh8LJWG8LRlDK4JIj9sfRF96TREzE--N_AIlx7Tqoe4,15365 +pip/_internal/locations/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc,, +pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc,, +pip/_internal/locations/__pycache__/base.cpython-312.pyc,, +pip/_internal/locations/_distutils.py,sha256=H9ZHK_35rdDV1Qsmi4QeaBULjFT4Mbu6QuoVGkJ6QHI,6009 +pip/_internal/locations/_sysconfig.py,sha256=jyNVtUfMIf0mtyY-Xp1m9yQ8iwECozSVVFmjkN9a2yw,7680 +pip/_internal/locations/base.py,sha256=RQiPi1d4FVM2Bxk04dQhXZ2PqkeljEL2fZZ9SYqIQ78,2556 +pip/_internal/main.py,sha256=r-UnUe8HLo5XFJz8inTcOOTiu_sxNhgHb6VwlGUllOI,340 +pip/_internal/metadata/__init__.py,sha256=9pU3W3s-6HtjFuYhWcLTYVmSaziklPv7k2x8p7X1GmA,4339 +pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/metadata/__pycache__/_json.cpython-312.pyc,, +pip/_internal/metadata/__pycache__/base.cpython-312.pyc,, +pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc,, +pip/_internal/metadata/_json.py,sha256=Rz5M5ciSNvITwaTQR6NfN8TgKgM5WfTws4D6CFknovE,2627 +pip/_internal/metadata/base.py,sha256=l3Wgku4xlgr8s4p6fS-3qQ4QKOpPbWLRwi5d9omEFG4,25907 +pip/_internal/metadata/importlib/__init__.py,sha256=jUUidoxnHcfITHHaAWG1G2i5fdBYklv_uJcjo2x7VYE,135 +pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc,, +pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc,, +pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc,, +pip/_internal/metadata/importlib/_compat.py,sha256=GAe_prIfCE4iUylrnr_2dJRlkkBVRUbOidEoID7LPoE,1882 +pip/_internal/metadata/importlib/_dists.py,sha256=UPl1wUujFqiwiltRJ1tMF42WRINO1sSpNNlYQ2mX0mk,8297 +pip/_internal/metadata/importlib/_envs.py,sha256=XTaFIYERP2JF0QUZuPx2ETiugXbPEcZ8q8ZKeht6Lpc,7456 +pip/_internal/metadata/pkg_resources.py,sha256=opjw4IBSqHvie6sXJ_cbT42meygoPEUfNURJuWZY7sk,10035 +pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 +pip/_internal/models/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/models/__pycache__/candidate.cpython-312.pyc,, +pip/_internal/models/__pycache__/direct_url.cpython-312.pyc,, +pip/_internal/models/__pycache__/format_control.cpython-312.pyc,, +pip/_internal/models/__pycache__/index.cpython-312.pyc,, +pip/_internal/models/__pycache__/installation_report.cpython-312.pyc,, +pip/_internal/models/__pycache__/link.cpython-312.pyc,, +pip/_internal/models/__pycache__/scheme.cpython-312.pyc,, +pip/_internal/models/__pycache__/search_scope.cpython-312.pyc,, +pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc,, +pip/_internal/models/__pycache__/target_python.cpython-312.pyc,, +pip/_internal/models/__pycache__/wheel.cpython-312.pyc,, +pip/_internal/models/candidate.py,sha256=hEPu8VdGE5qVASv6vLz-R-Rgh5-7LMbai1jgthMCd8M,931 +pip/_internal/models/direct_url.py,sha256=FwouYBKcqckh7B-k2H3HVgRhhFTukFwqiS3kfvtFLSk,6889 +pip/_internal/models/format_control.py,sha256=wtsQqSK9HaUiNxQEuB-C62eVimw6G4_VQFxV9-_KDBE,2486 +pip/_internal/models/index.py,sha256=tYnL8oxGi4aSNWur0mG8DAP7rC6yuha_MwJO8xw0crI,1030 +pip/_internal/models/installation_report.py,sha256=zRVZoaz-2vsrezj_H3hLOhMZCK9c7TbzWgC-jOalD00,2818 +pip/_internal/models/link.py,sha256=XirOAGv1jgMu7vu87kuPbohGj7VHpwVrd2q3KUgVQNg,20777 +pip/_internal/models/scheme.py,sha256=3EFQp_ICu_shH1-TBqhl0QAusKCPDFOlgHFeN4XowWs,738 +pip/_internal/models/search_scope.py,sha256=ASVyyZxiJILw7bTIVVpJx8J293M3Hk5F33ilGn0e80c,4643 +pip/_internal/models/selection_prefs.py,sha256=KZdi66gsR-_RUXUr9uejssk3rmTHrQVJWeNA2sV-VSY,1907 +pip/_internal/models/target_python.py,sha256=34EkorrMuRvRp-bjqHKJ-bOO71m9xdjN2b8WWFEC2HU,4272 +pip/_internal/models/wheel.py,sha256=YqazoIZyma_Q1ejFa1C7NHKQRRWlvWkdK96VRKmDBeI,3600 +pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50 +pip/_internal/network/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/network/__pycache__/auth.cpython-312.pyc,, +pip/_internal/network/__pycache__/cache.cpython-312.pyc,, +pip/_internal/network/__pycache__/download.cpython-312.pyc,, +pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc,, +pip/_internal/network/__pycache__/session.cpython-312.pyc,, +pip/_internal/network/__pycache__/utils.cpython-312.pyc,, +pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc,, +pip/_internal/network/auth.py,sha256=TC-OcW2KU4W6R1hU4qPgQXvVH54adACpZz6sWq-R9NA,20541 +pip/_internal/network/cache.py,sha256=48A971qCzKNFvkb57uGEk7-0xaqPS0HWj2711QNTxkU,3935 +pip/_internal/network/download.py,sha256=i0Tn55CD5D7XYEFY3TxiYaCf0OaaTQ6SScNgCsSeV14,6086 +pip/_internal/network/lazy_wheel.py,sha256=2PXVduYZPCPZkkQFe1J1GbfHJWeCU--FXonGyIfw9eU,7638 +pip/_internal/network/session.py,sha256=9tqEDD8JiVaFdplOEXJxNo9cjRfBZ6RIa0yQQ_qBNiM,18698 +pip/_internal/network/utils.py,sha256=6A5SrUJEEUHxbGtbscwU2NpCyz-3ztiDlGWHpRRhsJ8,4073 +pip/_internal/network/xmlrpc.py,sha256=sAxzOacJ-N1NXGPvap9jC3zuYWSnnv3GXtgR2-E2APA,1838 +pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/operations/__pycache__/check.cpython-312.pyc,, +pip/_internal/operations/__pycache__/freeze.cpython-312.pyc,, +pip/_internal/operations/__pycache__/prepare.cpython-312.pyc,, +pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/build/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/operations/build/__pycache__/build_tracker.cpython-312.pyc,, +pip/_internal/operations/build/__pycache__/metadata.cpython-312.pyc,, +pip/_internal/operations/build/__pycache__/metadata_editable.cpython-312.pyc,, +pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-312.pyc,, +pip/_internal/operations/build/__pycache__/wheel.cpython-312.pyc,, +pip/_internal/operations/build/__pycache__/wheel_editable.cpython-312.pyc,, +pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-312.pyc,, +pip/_internal/operations/build/build_tracker.py,sha256=z-H5DOknZdBa3dh2Vq6VBMY5qLYIKmlj2p6CGZK5Lc8,4832 +pip/_internal/operations/build/metadata.py,sha256=9S0CUD8U3QqZeXp-Zyt8HxwU90lE4QrnYDgrqZDzBnc,1422 +pip/_internal/operations/build/metadata_editable.py,sha256=VLL7LvntKE8qxdhUdEJhcotFzUsOSI8NNS043xULKew,1474 +pip/_internal/operations/build/metadata_legacy.py,sha256=o-eU21As175hDC7dluM1fJJ_FqokTIShyWpjKaIpHZw,2198 +pip/_internal/operations/build/wheel.py,sha256=sT12FBLAxDC6wyrDorh8kvcZ1jG5qInCRWzzP-UkJiQ,1075 +pip/_internal/operations/build/wheel_editable.py,sha256=yOtoH6zpAkoKYEUtr8FhzrYnkNHQaQBjWQ2HYae1MQg,1417 +pip/_internal/operations/build/wheel_legacy.py,sha256=C9j6rukgQI1n_JeQLoZGuDdfUwzCXShyIdPTp6edbMQ,3064 +pip/_internal/operations/check.py,sha256=fsqA88iGaqftCr2tlP3sSU202CSkoODRtW0O-JU9M4Y,6806 +pip/_internal/operations/freeze.py,sha256=uqoeTAf6HOYVMR2UgAT8N85UZoGEVEoQdan_Ao6SOfk,9816 +pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51 +pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/operations/install/__pycache__/editable_legacy.cpython-312.pyc,, +pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc,, +pip/_internal/operations/install/editable_legacy.py,sha256=YeR0KadWXw_ZheC1NtAG1qVIEkOgRGHc23x-YtGW7NU,1282 +pip/_internal/operations/install/wheel.py,sha256=9hGb1c4bRnPIb2FG7CtUSPfPxqprmHQBtwIAlWPNTtE,27311 +pip/_internal/operations/prepare.py,sha256=57Oq87HfunX3Rbqp47FdaJr9cHbAKUm_3gv7WhBAqbE,28128 +pip/_internal/pyproject.py,sha256=4Xszp11xgr126yzG6BbJA0oaQ9WXuhb0jyUb-y_6lPQ,7152 +pip/_internal/req/__init__.py,sha256=TELFgZOof3lhMmaICVWL9U7PlhXo9OufokbMAJ6J2GI,2738 +pip/_internal/req/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/req/__pycache__/constructors.cpython-312.pyc,, +pip/_internal/req/__pycache__/req_file.cpython-312.pyc,, +pip/_internal/req/__pycache__/req_install.cpython-312.pyc,, +pip/_internal/req/__pycache__/req_set.cpython-312.pyc,, +pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc,, +pip/_internal/req/constructors.py,sha256=8hlY56imEthLORRwmloyKz3YOyXymIaKsNB6P9ewvNI,19018 +pip/_internal/req/req_file.py,sha256=M8ttOZL-PwAj7scPElhW3ZD2hiD9mm_6FJAGIbwAzEI,17790 +pip/_internal/req/req_install.py,sha256=wtOPxkyRSM8comTks8oL1Gp2oyGqbH7JwIDRci2QiPk,35460 +pip/_internal/req/req_set.py,sha256=iMYDUToSgkxFyrP_OrTtPSgw4dwjRyGRDpGooTqeA4Y,4704 +pip/_internal/req/req_uninstall.py,sha256=nmvTQaRCC0iu-5Tw0djlXJhSj6WmqHRvT3qkkEdC35E,24551 +pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/resolution/__pycache__/base.cpython-312.pyc,, +pip/_internal/resolution/base.py,sha256=qlmh325SBVfvG6Me9gc5Nsh5sdwHBwzHBq6aEXtKsLA,583 +pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc,, +pip/_internal/resolution/legacy/resolver.py,sha256=Xk24jQ62GvLr4Mc7IjN_qiO88qp0BImzVmPIFz9QLOE,24025 +pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc,, +pip/_internal/resolution/resolvelib/base.py,sha256=jg5COmHLhmBIKOR-4spdJD3jyULYa1BdsqiBu2YJnJ4,5173 +pip/_internal/resolution/resolvelib/candidates.py,sha256=19Ki91Po-MSxBknGIfOGkaWkFdOznN0W_nKv7jL28L0,21052 +pip/_internal/resolution/resolvelib/factory.py,sha256=vqqk-hjchdhShwWVdeW2_A-5ZblLhE_nC_v3Mhz4Svc,32292 +pip/_internal/resolution/resolvelib/found_candidates.py,sha256=hvL3Hoa9VaYo-qEOZkBi2Iqw251UDxPz-uMHVaWmLpE,5705 +pip/_internal/resolution/resolvelib/provider.py,sha256=4t23ivjruqM6hKBX1KpGiTt-M4HGhRcZnGLV0c01K7U,9824 +pip/_internal/resolution/resolvelib/reporter.py,sha256=YFm9hQvz4DFCbjZeFTQ56hTz3Ac-mDBnHkeNRVvMHLY,3100 +pip/_internal/resolution/resolvelib/requirements.py,sha256=-kJONP0WjDfdTvBAs2vUXPgAnOyNIBEAXY4b72ogtPE,5696 +pip/_internal/resolution/resolvelib/resolver.py,sha256=nLJOsVMEVi2gQUVJoUFKMZAeu2f7GRMjGMvNSWyz0Bc,12592 +pip/_internal/self_outdated_check.py,sha256=saxQLB8UzIFtMScquytG10TOTsYVFJQ_mkW1NY-46wE,8378 +pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/utils/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc,, +pip/_internal/utils/__pycache__/_log.cpython-312.pyc,, +pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc,, +pip/_internal/utils/__pycache__/compat.cpython-312.pyc,, +pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc,, +pip/_internal/utils/__pycache__/datetime.cpython-312.pyc,, +pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc,, +pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc,, +pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc,, +pip/_internal/utils/__pycache__/encoding.cpython-312.pyc,, +pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc,, +pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc,, +pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc,, +pip/_internal/utils/__pycache__/glibc.cpython-312.pyc,, +pip/_internal/utils/__pycache__/hashes.cpython-312.pyc,, +pip/_internal/utils/__pycache__/logging.cpython-312.pyc,, +pip/_internal/utils/__pycache__/misc.cpython-312.pyc,, +pip/_internal/utils/__pycache__/models.cpython-312.pyc,, +pip/_internal/utils/__pycache__/packaging.cpython-312.pyc,, +pip/_internal/utils/__pycache__/setuptools_build.cpython-312.pyc,, +pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc,, +pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc,, +pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc,, +pip/_internal/utils/__pycache__/urls.cpython-312.pyc,, +pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc,, +pip/_internal/utils/__pycache__/wheel.cpython-312.pyc,, +pip/_internal/utils/_jaraco_text.py,sha256=yvDGelTVugRayPaOF2k4ab0Ky4d3uOkAfuOQjASjImY,3351 +pip/_internal/utils/_log.py,sha256=-jHLOE_THaZz5BFcCnoSL9EYAtJ0nXem49s9of4jvKw,1015 +pip/_internal/utils/appdirs.py,sha256=swgcTKOm3daLeXTW6v5BUS2Ti2RvEnGRQYH_yDXklAo,1665 +pip/_internal/utils/compat.py,sha256=ACyBfLgj3_XG-iA5omEDrXqDM0cQKzi8h8HRBInzG6Q,1884 +pip/_internal/utils/compatibility_tags.py,sha256=ydin8QG8BHqYRsPY4OL6cmb44CbqXl1T0xxS97VhHkk,5377 +pip/_internal/utils/datetime.py,sha256=m21Y3wAtQc-ji6Veb6k_M5g6A0ZyFI4egchTdnwh-pQ,242 +pip/_internal/utils/deprecation.py,sha256=NKo8VqLioJ4nnXXGmW4KdasxF90EFHkZaHeX1fT08C8,3627 +pip/_internal/utils/direct_url_helpers.py,sha256=6F1tc2rcKaCZmgfVwsE6ObIe_Pux23mUVYA-2D9wCFc,3206 +pip/_internal/utils/egg_link.py,sha256=0FePZoUYKv4RGQ2t6x7w5Z427wbA_Uo3WZnAkrgsuqo,2463 +pip/_internal/utils/encoding.py,sha256=qqsXDtiwMIjXMEiIVSaOjwH5YmirCaK-dIzb6-XJsL0,1169 +pip/_internal/utils/entrypoints.py,sha256=YlhLTRl2oHBAuqhc-zmL7USS67TPWVHImjeAQHreZTQ,3064 +pip/_internal/utils/filesystem.py,sha256=RhMIXUaNVMGjc3rhsDahWQ4MavvEQDdqXqgq-F6fpw8,5122 +pip/_internal/utils/filetypes.py,sha256=i8XAQ0eFCog26Fw9yV0Yb1ygAqKYB1w9Cz9n0fj8gZU,716 +pip/_internal/utils/glibc.py,sha256=Mesxxgg3BLxheLZx-dSf30b6gKpOgdVXw6W--uHSszQ,3113 +pip/_internal/utils/hashes.py,sha256=MjOigC75z6qoRMkgHiHqot7eqxfwDZSrEflJMPm-bHE,5118 +pip/_internal/utils/logging.py,sha256=fdtuZJ-AKkqwDTANDvGcBEpssL8el7T1jnwk1CnZl3Y,11603 +pip/_internal/utils/misc.py,sha256=fNXwaeeikvnUt4CPMFIL4-IQbZDxxjj4jDpzCi4ZsOw,23623 +pip/_internal/utils/models.py,sha256=5GoYU586SrxURMvDn_jBMJInitviJg4O5-iOU-6I0WY,1193 +pip/_internal/utils/packaging.py,sha256=5Wm6_x7lKrlqVjPI5MBN_RurcRHwVYoQ7Ksrs84de7s,2108 +pip/_internal/utils/setuptools_build.py,sha256=ouXpud-jeS8xPyTPsXJ-m34NPvK5os45otAzdSV_IJE,4435 +pip/_internal/utils/subprocess.py,sha256=zzdimb75jVLE1GU4WlTZ055gczhD7n1y1xTcNc7vNZQ,9207 +pip/_internal/utils/temp_dir.py,sha256=DUAw22uFruQdK43i2L2K53C-CDjRCPeAsBKJpu-rHQ4,9312 +pip/_internal/utils/unpacking.py,sha256=SBb2iV1crb89MDRTEKY86R4A_UOWApTQn9VQVcMDOlE,8821 +pip/_internal/utils/urls.py,sha256=AhaesUGl-9it6uvG6fsFPOr9ynFpGaTMk4t5XTX7Z_Q,1759 +pip/_internal/utils/virtualenv.py,sha256=S6f7csYorRpiD6cvn3jISZYc3I8PJC43H5iMFpRAEDU,3456 +pip/_internal/utils/wheel.py,sha256=i4BwUNHattzN0ixy3HBAF04tZPRh2CcxaT6t86viwkE,4499 +pip/_internal/vcs/__init__.py,sha256=UAqvzpbi0VbZo3Ub6skEeZAw-ooIZR-zX_WpCbxyCoU,596 +pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc,, +pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc,, +pip/_internal/vcs/__pycache__/git.cpython-312.pyc,, +pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc,, +pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc,, +pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc,, +pip/_internal/vcs/bazaar.py,sha256=j0oin0fpGRHcCFCxEcpPCQoFEvA-DMLULKdGP8Nv76o,3519 +pip/_internal/vcs/git.py,sha256=CeKBGJnl6uskvvjkAUXrJVxbHJrpS_B_pyfFdjL3CRc,18121 +pip/_internal/vcs/mercurial.py,sha256=oULOhzJ2Uie-06d1omkL-_Gc6meGaUkyogvqG9ZCyPs,5249 +pip/_internal/vcs/subversion.py,sha256=vhZs8L-TNggXqM1bbhl-FpbxE3TrIB6Tgnx8fh3S2HE,11729 +pip/_internal/vcs/versioncontrol.py,sha256=3eIjtOMYvOY5qP6BMYIYDZ375CSuec6kSEB0bOo1cSs,22787 +pip/_internal/wheel_builder.py,sha256=qTTzQV8F6b1jNsFCda1TRQC8J7gK-m7iuRNgKo7Dj68,11801 +pip/_vendor/__init__.py,sha256=U51NPwXdA-wXOiANIQncYjcMp6txgeOL5nHxksJeyas,4993 +pip/_vendor/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/__pycache__/six.cpython-312.pyc,, +pip/_vendor/__pycache__/typing_extensions.cpython-312.pyc,, +pip/_vendor/cachecontrol/__init__.py,sha256=ctHagMhQXuvQDdm4TirZrwDOT5H8oBNAJqzdKI6sovk,676 +pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc,, +pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc,, +pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc,, +pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc,, +pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc,, +pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc,, +pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc,, +pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc,, +pip/_vendor/cachecontrol/_cmd.py,sha256=iist2EpzJvDVIhMAxXq8iFnTBsiZAd6iplxfmNboNyk,1737 +pip/_vendor/cachecontrol/adapter.py,sha256=_CcWvUP9048qAZjsNqViaHbdcLs9mmFNixVfpO7oebE,6392 +pip/_vendor/cachecontrol/cache.py,sha256=OTQj72tUf8C1uEgczdl3Gc8vkldSzsTITKtDGKMx4z8,1952 +pip/_vendor/cachecontrol/caches/__init__.py,sha256=dtrrroK5BnADR1GWjCZ19aZ0tFsMfvFBtLQQU1sp_ag,303 +pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc,, +pip/_vendor/cachecontrol/caches/file_cache.py,sha256=3z8AWKD-vfKeiJqIzLmJyIYtR2yd6Tsh3u1TyLRQoIQ,5352 +pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=9rmqwtYu_ljVkW6_oLqbC7EaX_a8YT_yLuna-eS0dgo,1386 +pip/_vendor/cachecontrol/controller.py,sha256=keCFA3ZaNVaWTwHd6F1zqWhb4vyvNx_UvZuo5iIYMfo,18384 +pip/_vendor/cachecontrol/filewrapper.py,sha256=STttGmIPBvZzt2b51dUOwoWX5crcMCpKZOisM3f5BNc,4292 +pip/_vendor/cachecontrol/heuristics.py,sha256=fdFbk9W8IeLrjteIz_fK4mj2HD_Y7COXF2Uc8TgjT1c,4828 +pip/_vendor/cachecontrol/serialize.py,sha256=0dHeMaDwysVAAnGVlhMOP4tDliohgNK0Jxk_zsOiWxw,7173 +pip/_vendor/cachecontrol/wrapper.py,sha256=hsGc7g8QGQTT-4f8tgz3AM5qwScg6FO0BSdLSRdEvpU,1417 +pip/_vendor/certifi/__init__.py,sha256=L_j-d0kYuA_MzA2_2hraF1ovf6KT6DTquRdV3paQwOk,94 +pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255 +pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc,, +pip/_vendor/certifi/__pycache__/core.cpython-312.pyc,, +pip/_vendor/certifi/cacert.pem,sha256=eU0Dn_3yd8BH4m8sfVj4Glhl2KDrcCSg-sEWT-pNJ88,281617 +pip/_vendor/certifi/core.py,sha256=DNTl8b_B6C4vO3Vc9_q2uvwHpNnBQoy5onDC4McImxc,4531 +pip/_vendor/chardet/__init__.py,sha256=57R-HSxj0PWmILMN0GFmUNqEMfrEVSamXyjD-W6_fbs,4797 +pip/_vendor/chardet/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/big5freq.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/big5prober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/chardistribution.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/charsetprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/cp949prober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/enums.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/escprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/escsm.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/eucjpprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/euckrfreq.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/euckrprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/euctwfreq.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/euctwprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/gb2312freq.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/gb2312prober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/hebrewprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/jisfreq.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/johabfreq.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/johabprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/jpcntx.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/langthaimodel.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/latin1prober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/macromanprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/mbcssm.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/resultdict.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/sjisprober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/universaldetector.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/utf1632prober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/utf8prober.cpython-312.pyc,, +pip/_vendor/chardet/__pycache__/version.cpython-312.pyc,, +pip/_vendor/chardet/big5freq.py,sha256=ltcfP-3PjlNHCoo5e4a7C4z-2DhBTXRfY6jbMbB7P30,31274 +pip/_vendor/chardet/big5prober.py,sha256=lPMfwCX6v2AaPgvFh_cSWZcgLDbWiFCHLZ_p9RQ9uxE,1763 +pip/_vendor/chardet/chardistribution.py,sha256=13B8XUG4oXDuLdXvfbIWwLFeR-ZU21AqTS1zcdON8bU,10032 +pip/_vendor/chardet/charsetgroupprober.py,sha256=UKK3SaIZB2PCdKSIS0gnvMtLR9JJX62M-fZJu3OlWyg,3915 +pip/_vendor/chardet/charsetprober.py,sha256=L3t8_wIOov8em-vZWOcbkdsrwe43N6_gqNh5pH7WPd4,5420 +pip/_vendor/chardet/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/chardet/cli/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-312.pyc,, +pip/_vendor/chardet/cli/chardetect.py,sha256=zibMVg5RpKb-ME9_7EYG4ZM2Sf07NHcQzZ12U-rYJho,3242 +pip/_vendor/chardet/codingstatemachine.py,sha256=K7k69sw3jY5DmTXoSJQVsUtFIQKYPQVOSJJhBuGv_yE,3732 +pip/_vendor/chardet/codingstatemachinedict.py,sha256=0GY3Hi2qIZvDrOOJ3AtqppM1RsYxr_66ER4EHjuMiMc,542 +pip/_vendor/chardet/cp949prober.py,sha256=0jKRV7fECuWI16rNnks0ZECKA1iZYCIEaP8A1ZvjUSI,1860 +pip/_vendor/chardet/enums.py,sha256=TzECiZoCKNMqgwU76cPCeKWFBqaWvAdLMev5_bCkhY8,1683 +pip/_vendor/chardet/escprober.py,sha256=Kho48X65xE0scFylIdeJjM2bcbvRvv0h0WUbMWrJD3A,4006 +pip/_vendor/chardet/escsm.py,sha256=AqyXpA2FQFD7k-buBty_7itGEYkhmVa8X09NLRul3QM,12176 +pip/_vendor/chardet/eucjpprober.py,sha256=5KYaM9fsxkRYzw1b5k0fL-j_-ezIw-ij9r97a9MHxLY,3934 +pip/_vendor/chardet/euckrfreq.py,sha256=3mHuRvXfsq_QcQysDQFb8qSudvTiol71C6Ic2w57tKM,13566 +pip/_vendor/chardet/euckrprober.py,sha256=hiFT6wM174GIwRvqDsIcuOc-dDsq2uPKMKbyV8-1Xnc,1753 +pip/_vendor/chardet/euctwfreq.py,sha256=2alILE1Lh5eqiFJZjzRkMQXolNJRHY5oBQd-vmZYFFM,36913 +pip/_vendor/chardet/euctwprober.py,sha256=NxbpNdBtU0VFI0bKfGfDkpP7S2_8_6FlO87dVH0ogws,1753 +pip/_vendor/chardet/gb2312freq.py,sha256=49OrdXzD-HXqwavkqjo8Z7gvs58hONNzDhAyMENNkvY,20735 +pip/_vendor/chardet/gb2312prober.py,sha256=KPEBueaSLSvBpFeINMu0D6TgHcR90e5PaQawifzF4o0,1759 +pip/_vendor/chardet/hebrewprober.py,sha256=96T_Lj_OmW-fK7JrSHojYjyG3fsGgbzkoTNleZ3kfYE,14537 +pip/_vendor/chardet/jisfreq.py,sha256=mm8tfrwqhpOd3wzZKS4NJqkYBQVcDfTM2JiQ5aW932E,25796 +pip/_vendor/chardet/johabfreq.py,sha256=dBpOYG34GRX6SL8k_LbS9rxZPMjLjoMlgZ03Pz5Hmqc,42498 +pip/_vendor/chardet/johabprober.py,sha256=O1Qw9nVzRnun7vZp4UZM7wvJSv9W941mEU9uDMnY3DU,1752 +pip/_vendor/chardet/jpcntx.py,sha256=uhHrYWkLxE_rF5OkHKInm0HUsrjgKHHVQvtt3UcvotA,27055 +pip/_vendor/chardet/langbulgarianmodel.py,sha256=vmbvYFP8SZkSxoBvLkFqKiH1sjma5ihk3PTpdy71Rr4,104562 +pip/_vendor/chardet/langgreekmodel.py,sha256=JfB7bupjjJH2w3X_mYnQr9cJA_7EuITC2cRW13fUjeI,98484 +pip/_vendor/chardet/langhebrewmodel.py,sha256=3HXHaLQPNAGcXnJjkIJfozNZLTvTJmf4W5Awi6zRRKc,98196 +pip/_vendor/chardet/langhungarianmodel.py,sha256=WxbeQIxkv8YtApiNqxQcvj-tMycsoI4Xy-fwkDHpP_Y,101363 +pip/_vendor/chardet/langrussianmodel.py,sha256=s395bTZ87ESTrZCOdgXbEjZ9P1iGPwCl_8xSsac_DLY,128035 +pip/_vendor/chardet/langthaimodel.py,sha256=7bJlQitRpTnVGABmbSznHnJwOHDy3InkTvtFUx13WQI,102774 +pip/_vendor/chardet/langturkishmodel.py,sha256=XY0eGdTIy4eQ9Xg1LVPZacb-UBhHBR-cq0IpPVHowKc,95372 +pip/_vendor/chardet/latin1prober.py,sha256=p15EEmFbmQUwbKLC7lOJVGHEZwcG45ubEZYTGu01J5g,5380 +pip/_vendor/chardet/macromanprober.py,sha256=9anfzmY6TBfUPDyBDOdY07kqmTHpZ1tK0jL-p1JWcOY,6077 +pip/_vendor/chardet/mbcharsetprober.py,sha256=Wr04WNI4F3X_VxEverNG-H25g7u-MDDKlNt-JGj-_uU,3715 +pip/_vendor/chardet/mbcsgroupprober.py,sha256=iRpaNBjV0DNwYPu_z6TiHgRpwYahiM7ztI_4kZ4Uz9A,2131 +pip/_vendor/chardet/mbcssm.py,sha256=hUtPvDYgWDaA2dWdgLsshbwRfm3Q5YRlRogdmeRUNQw,30391 +pip/_vendor/chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/chardet/metadata/__pycache__/languages.cpython-312.pyc,, +pip/_vendor/chardet/metadata/languages.py,sha256=FhvBIdZFxRQ-dTwkb_0madRKgVBCaUMQz9I5xqjE5iQ,13560 +pip/_vendor/chardet/resultdict.py,sha256=ez4FRvN5KaSosJeJ2WzUyKdDdg35HDy_SSLPXKCdt5M,402 +pip/_vendor/chardet/sbcharsetprober.py,sha256=-nd3F90i7GpXLjehLVHqVBE0KlWzGvQUPETLBNn4o6U,6400 +pip/_vendor/chardet/sbcsgroupprober.py,sha256=gcgI0fOfgw_3YTClpbra_MNxwyEyJ3eUXraoLHYb59E,4137 +pip/_vendor/chardet/sjisprober.py,sha256=aqQufMzRw46ZpFlzmYaYeT2-nzmKb-hmcrApppJ862k,4007 +pip/_vendor/chardet/universaldetector.py,sha256=xYBrg4x0dd9WnT8qclfADVD9ondrUNkqPmvte1pa520,14848 +pip/_vendor/chardet/utf1632prober.py,sha256=pw1epGdMj1hDGiCu1AHqqzOEfjX8MVdiW7O1BlT8-eQ,8505 +pip/_vendor/chardet/utf8prober.py,sha256=8m08Ub5490H4jQ6LYXvFysGtgKoKsHUd2zH_i8_TnVw,2812 +pip/_vendor/chardet/version.py,sha256=lGtJcxGM44Qz4Cbk4rbbmrKxnNr1-97U25TameLehZw,244 +pip/_vendor/colorama/__init__.py,sha256=wePQA4U20tKgYARySLEC047ucNX-g8pRLpYBuiHlLb8,266 +pip/_vendor/colorama/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/colorama/__pycache__/ansi.cpython-312.pyc,, +pip/_vendor/colorama/__pycache__/ansitowin32.cpython-312.pyc,, +pip/_vendor/colorama/__pycache__/initialise.cpython-312.pyc,, +pip/_vendor/colorama/__pycache__/win32.cpython-312.pyc,, +pip/_vendor/colorama/__pycache__/winterm.cpython-312.pyc,, +pip/_vendor/colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522 +pip/_vendor/colorama/ansitowin32.py,sha256=vPNYa3OZbxjbuFyaVo0Tmhmy1FZ1lKMWCnT7odXpItk,11128 +pip/_vendor/colorama/initialise.py,sha256=-hIny86ClXo39ixh5iSCfUIa2f_h_bgKRDW7gqs-KLU,3325 +pip/_vendor/colorama/tests/__init__.py,sha256=MkgPAEzGQd-Rq0w0PZXSX2LadRWhUECcisJY8lSrm4Q,75 +pip/_vendor/colorama/tests/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/colorama/tests/__pycache__/ansi_test.cpython-312.pyc,, +pip/_vendor/colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc,, +pip/_vendor/colorama/tests/__pycache__/initialise_test.cpython-312.pyc,, +pip/_vendor/colorama/tests/__pycache__/isatty_test.cpython-312.pyc,, +pip/_vendor/colorama/tests/__pycache__/utils.cpython-312.pyc,, +pip/_vendor/colorama/tests/__pycache__/winterm_test.cpython-312.pyc,, +pip/_vendor/colorama/tests/ansi_test.py,sha256=FeViDrUINIZcr505PAxvU4AjXz1asEiALs9GXMhwRaE,2839 +pip/_vendor/colorama/tests/ansitowin32_test.py,sha256=RN7AIhMJ5EqDsYaCjVo-o4u8JzDD4ukJbmevWKS70rY,10678 +pip/_vendor/colorama/tests/initialise_test.py,sha256=BbPy-XfyHwJ6zKozuQOvNvQZzsx9vdb_0bYXn7hsBTc,6741 +pip/_vendor/colorama/tests/isatty_test.py,sha256=Pg26LRpv0yQDB5Ac-sxgVXG7hsA1NYvapFgApZfYzZg,1866 +pip/_vendor/colorama/tests/utils.py,sha256=1IIRylG39z5-dzq09R_ngufxyPZxgldNbrxKxUGwGKE,1079 +pip/_vendor/colorama/tests/winterm_test.py,sha256=qoWFPEjym5gm2RuMwpf3pOis3a5r_PJZFCzK254JL8A,3709 +pip/_vendor/colorama/win32.py,sha256=YQOKwMTwtGBbsY4dL5HYTvwTeP9wIQra5MvPNddpxZs,6181 +pip/_vendor/colorama/winterm.py,sha256=XCQFDHjPi6AHYNdZwy0tA02H-Jh48Jp-HvCjeLeLp3U,7134 +pip/_vendor/distlib/__init__.py,sha256=hJKF7FHoqbmGckncDuEINWo_OYkDNiHODtYXSMcvjcc,625 +pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/database.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/index.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/locators.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/manifest.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/markers.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/metadata.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/util.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/version.cpython-312.pyc,, +pip/_vendor/distlib/__pycache__/wheel.cpython-312.pyc,, +pip/_vendor/distlib/compat.py,sha256=Un-uIBvy02w-D267OG4VEhuddqWgKj9nNkxVltAb75w,41487 +pip/_vendor/distlib/database.py,sha256=0V9Qvs0Vrxa2F_-hLWitIyVyRifJ0pCxyOI-kEOBwsA,51965 +pip/_vendor/distlib/index.py,sha256=lTbw268rRhj8dw1sib3VZ_0EhSGgoJO3FKJzSFMOaeA,20797 +pip/_vendor/distlib/locators.py,sha256=o1r_M86_bRLafSpetmyfX8KRtFu-_Q58abvQrnOSnbA,51767 +pip/_vendor/distlib/manifest.py,sha256=3qfmAmVwxRqU1o23AlfXrQGZzh6g_GGzTAP_Hb9C5zQ,14168 +pip/_vendor/distlib/markers.py,sha256=n3DfOh1yvZ_8EW7atMyoYeZFXjYla0Nz0itQlojCd0A,5268 +pip/_vendor/distlib/metadata.py,sha256=pB9WZ9mBfmQxc9OVIldLS5CjOoQRvKAvUwwQyKwKQtQ,39693 +pip/_vendor/distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820 +pip/_vendor/distlib/scripts.py,sha256=nQFXN6G7nOWNDUyxirUep-3WOlJhB7McvCs9zOnkGTI,18315 +pip/_vendor/distlib/util.py,sha256=XSznxEi_i3T20UJuaVc0qXHz5ksGUCW1khYlBprN_QE,67530 +pip/_vendor/distlib/version.py,sha256=9pXkduchve_aN7JG6iL9VTYV_kqNSGoc2Dwl8JuySnQ,23747 +pip/_vendor/distlib/wheel.py,sha256=FVQCve8u-L0QYk5-YTZc7s4WmNQdvjRWTK08KXzZVX4,43958 +pip/_vendor/distro/__init__.py,sha256=2fHjF-SfgPvjyNZ1iHh_wjqWdR_Yo5ODHwZC0jLBPhc,981 +pip/_vendor/distro/__main__.py,sha256=bu9d3TifoKciZFcqRBuygV3GSuThnVD_m2IK4cz96Vs,64 +pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc,, +pip/_vendor/distro/__pycache__/distro.cpython-312.pyc,, +pip/_vendor/distro/distro.py,sha256=UZO1LjIhtFCMdlbiz39gj3raV-Amf3SBwzGzfApiMHw,49330 +pip/_vendor/idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 +pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/idna/__pycache__/codec.cpython-312.pyc,, +pip/_vendor/idna/__pycache__/compat.cpython-312.pyc,, +pip/_vendor/idna/__pycache__/core.cpython-312.pyc,, +pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc,, +pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc,, +pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc,, +pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc,, +pip/_vendor/idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374 +pip/_vendor/idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 +pip/_vendor/idna/core.py,sha256=1JxchwKzkxBSn7R_oCE12oBu3eVux0VzdxolmIad24M,12950 +pip/_vendor/idna/idnadata.py,sha256=xUjqKqiJV8Ho_XzBpAtv5JFoVPSupK-SUXvtjygUHqw,44375 +pip/_vendor/idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 +pip/_vendor/idna/package_data.py,sha256=C_jHJzmX8PI4xq0jpzmcTMxpb5lDsq4o5VyxQzlVrZE,21 +pip/_vendor/idna/uts46data.py,sha256=zvjZU24s58_uAS850Mcd0NnD0X7_gCMAMjzWNIeUJdc,206539 +pip/_vendor/msgpack/__init__.py,sha256=hyGhlnmcJkxryJBKC3X5FnEph375kQoL_mG8LZUuXgY,1132 +pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc,, +pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc,, +pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc,, +pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081 +pip/_vendor/msgpack/ext.py,sha256=C5MK8JhVYGYFWPvxsORsqZAnvOXefYQ57m1Ym0luW5M,6079 +pip/_vendor/msgpack/fallback.py,sha256=tvNBHyxxFbuVlC8GZShETClJxjLiDMOja4XwwyvNm2g,34544 +pip/_vendor/packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661 +pip/_vendor/packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497 +pip/_vendor/packaging/__pycache__/__about__.cpython-312.pyc,, +pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc,, +pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc,, +pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc,, +pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc,, +pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc,, +pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc,, +pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc,, +pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc,, +pip/_vendor/packaging/__pycache__/version.cpython-312.pyc,, +pip/_vendor/packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488 +pip/_vendor/packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378 +pip/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 +pip/_vendor/packaging/markers.py,sha256=AJBOcY8Oq0kYc570KuuPTkvuqjAlhufaE2c9sCUbm64,8487 +pip/_vendor/packaging/requirements.py,sha256=NtDlPBtojpn1IUC85iMjPNsUmufjpSlwnNA-Xb4m5NA,4676 +pip/_vendor/packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110 +pip/_vendor/packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699 +pip/_vendor/packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200 +pip/_vendor/packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665 +pip/_vendor/pkg_resources/__init__.py,sha256=hTAeJCNYb7dJseIDVsYK3mPQep_gphj4tQh-bspX8bg,109364 +pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/platformdirs/__init__.py,sha256=SkhEYVyC_HUHC6KX7n4M_6coyRMtEB38QMyOYIAX6Yk,20155 +pip/_vendor/platformdirs/__main__.py,sha256=fVvSiTzr2-RM6IsjWjj4fkaOtDOgDhUWv6sA99do4CQ,1476 +pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc,, +pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc,, +pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc,, +pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc,, +pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc,, +pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc,, +pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc,, +pip/_vendor/platformdirs/android.py,sha256=y_EEMKwYl2-bzYBDovksSn8m76on0Lda8eyJksVQE9U,7211 +pip/_vendor/platformdirs/api.py,sha256=jWtX06jAJytYrkJDOqEls97mCkyHRSZkoqUlbMK5Qew,7132 +pip/_vendor/platformdirs/macos.py,sha256=LueVOoVgGWDBwQb8OFwXkVKfVn33CM1Lkwf1-A86tRQ,3678 +pip/_vendor/platformdirs/unix.py,sha256=22JhR8ZY0aLxSVCFnKrc6f1iz6Gv42K24Daj7aTjfSg,8809 +pip/_vendor/platformdirs/version.py,sha256=mavZTQIJIXfdewEaSTn7EWrNfPZWeRofb-74xqW5f2M,160 +pip/_vendor/platformdirs/windows.py,sha256=4TtbPGoWG2PRgI11uquDa7eRk8TcxvnUNuuMGZItnXc,9573 +pip/_vendor/pygments/__init__.py,sha256=6AuDljQtvf89DTNUyWM7k3oUlP_lq70NU-INKKteOBY,2983 +pip/_vendor/pygments/__main__.py,sha256=es8EKMvXj5yToIfQ-pf3Dv5TnIeeM6sME0LW-n4ecHo,353 +pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/cmdline.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/console.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/style.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/token.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc,, +pip/_vendor/pygments/__pycache__/util.cpython-312.pyc,, +pip/_vendor/pygments/cmdline.py,sha256=byxYJp9gnjVeyhRlZ3UTMgo_LhkXh1afvN8wJBtAcc8,23685 +pip/_vendor/pygments/console.py,sha256=2wZ5W-U6TudJD1_NLUwjclMpbomFM91lNv11_60sfGY,1697 +pip/_vendor/pygments/filter.py,sha256=j5aLM9a9wSx6eH1oy473oSkJ02hGWNptBlVo4s1g_30,1938 +pip/_vendor/pygments/filters/__init__.py,sha256=h_koYkUFo-FFUxjs564JHUAz7O3yJpVwI6fKN3MYzG0,40386 +pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/pygments/formatter.py,sha256=J9OL9hXLJKZk7moUgKwpjW9HNf4WlJFg_o_-Z_S_tTY,4178 +pip/_vendor/pygments/formatters/__init__.py,sha256=_xgAcdFKr0QNYwh_i98AU9hvfP3X2wAkhElFcRRF3Uo,5424 +pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/groff.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/html.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/img.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/irc.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/latex.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/other.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/svg.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-312.pyc,, +pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-312.pyc,, +pip/_vendor/pygments/formatters/_mapping.py,sha256=1Cw37FuQlNacnxRKmtlPX4nyLoX9_ttko5ZwscNUZZ4,4176 +pip/_vendor/pygments/formatters/bbcode.py,sha256=r1b7wzWTJouADDLh-Z11iRi4iQxD0JKJ1qHl6mOYxsA,3314 +pip/_vendor/pygments/formatters/groff.py,sha256=xy8Zf3tXOo6MWrXh7yPGWx3lVEkg_DhY4CxmsDb0IVo,5094 +pip/_vendor/pygments/formatters/html.py,sha256=PIzAyilNqaTzSSP2slDG2VDLE3qNioWy2rgtSSoviuI,35610 +pip/_vendor/pygments/formatters/img.py,sha256=XKXmg2_XONrR4mtq2jfEU8XCsoln3VSGTw-UYiEokys,21938 +pip/_vendor/pygments/formatters/irc.py,sha256=Ep-m8jd3voFO6Fv57cUGFmz6JVA67IEgyiBOwv0N4a0,4981 +pip/_vendor/pygments/formatters/latex.py,sha256=FGzJ-YqSTE8z_voWPdzvLY5Tq8jE_ygjGjM6dXZJ8-k,19351 +pip/_vendor/pygments/formatters/other.py,sha256=gPxkk5BdAzWTCgbEHg1lpLi-1F6ZPh5A_aotgLXHnzg,5073 +pip/_vendor/pygments/formatters/pangomarkup.py,sha256=6LKnQc8yh49f802bF0sPvbzck4QivMYqqoXAPaYP8uU,2212 +pip/_vendor/pygments/formatters/rtf.py,sha256=aA0v_psW6KZI3N18TKDifxeL6mcF8EDXcPXDWI4vhVQ,5014 +pip/_vendor/pygments/formatters/svg.py,sha256=dQONWypbzfvzGCDtdp3M_NJawScJvM2DiHbx1k-ww7g,7335 +pip/_vendor/pygments/formatters/terminal.py,sha256=FG-rpjRpFmNpiGB4NzIucvxq6sQIXB3HOTo2meTKtrU,4674 +pip/_vendor/pygments/formatters/terminal256.py,sha256=13SJ3D5pFdqZ9zROE6HbWnBDwHvOGE8GlsmqGhprRp4,11753 +pip/_vendor/pygments/lexer.py,sha256=2BpqLlT2ExvOOi7vnjK5nB4Fp-m52ldiPaXMox5uwug,34618 +pip/_vendor/pygments/lexers/__init__.py,sha256=j5KEi5O_VQ5GS59H49l-10gzUOkWKxlwGeVMlGO2MMk,12130 +pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-312.pyc,, +pip/_vendor/pygments/lexers/__pycache__/python.cpython-312.pyc,, +pip/_vendor/pygments/lexers/_mapping.py,sha256=Hts4r_ZQ8icftGM7gkBPeED5lyVSv4affFgXYE6Ap04,72281 +pip/_vendor/pygments/lexers/python.py,sha256=c7jnmKFU9DLxTJW0UbwXt6Z9FJqbBlVsWA1Qr9xSA_w,53424 +pip/_vendor/pygments/modeline.py,sha256=eF2vO4LpOGoPvIKKkbPfnyut8hT4UiebZPpb-BYGQdI,986 +pip/_vendor/pygments/plugin.py,sha256=j1Fh310RbV2DQ9nvkmkqvlj38gdyuYKllLnGxbc8sJM,2591 +pip/_vendor/pygments/regexopt.py,sha256=jg1ALogcYGU96TQS9isBl6dCrvw5y5--BP_K-uFk_8s,3072 +pip/_vendor/pygments/scanner.py,sha256=b_nu5_f3HCgSdp5S_aNRBQ1MSCm4ZjDwec2OmTRickw,3092 +pip/_vendor/pygments/sphinxext.py,sha256=wBFYm180qea9JKt__UzhRlNRNhczPDFDaqGD21sbuso,6882 +pip/_vendor/pygments/style.py,sha256=C4qyoJrUTkq-OV3iO-8Vz3UtWYpJwSTdh5_vlGCGdNQ,6257 +pip/_vendor/pygments/styles/__init__.py,sha256=he7HjQx7sC0d2kfTVLjUs0J15mtToJM6M1brwIm9--Q,3700 +pip/_vendor/pygments/styles/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/pygments/token.py,sha256=seNsmcch9OEHXYirh8Ool7w8xDhfNTbLj5rHAC-gc_o,6184 +pip/_vendor/pygments/unistring.py,sha256=FaUfG14NBJEKLQoY9qj6JYeXrpYcLmKulghdxOGFaOc,63223 +pip/_vendor/pygments/util.py,sha256=AEVY0qonyyEMgv4Do2dINrrqUAwUk2XYSqHM650uzek,10230 +pip/_vendor/pyparsing/__init__.py,sha256=9m1JbE2JTLdBG0Mb6B0lEaZj181Wx5cuPXZpsbHEYgE,9116 +pip/_vendor/pyparsing/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/pyparsing/__pycache__/actions.cpython-312.pyc,, +pip/_vendor/pyparsing/__pycache__/common.cpython-312.pyc,, +pip/_vendor/pyparsing/__pycache__/core.cpython-312.pyc,, +pip/_vendor/pyparsing/__pycache__/exceptions.cpython-312.pyc,, +pip/_vendor/pyparsing/__pycache__/helpers.cpython-312.pyc,, +pip/_vendor/pyparsing/__pycache__/results.cpython-312.pyc,, +pip/_vendor/pyparsing/__pycache__/testing.cpython-312.pyc,, +pip/_vendor/pyparsing/__pycache__/unicode.cpython-312.pyc,, +pip/_vendor/pyparsing/__pycache__/util.cpython-312.pyc,, +pip/_vendor/pyparsing/actions.py,sha256=05uaIPOznJPQ7VgRdmGCmG4sDnUPtwgv5qOYIqbL2UY,6567 +pip/_vendor/pyparsing/common.py,sha256=p-3c83E5-DjlkF35G0O9-kjQRpoejP-2_z0hxZ-eol4,13387 +pip/_vendor/pyparsing/core.py,sha256=yvuRlLpXSF8mgk-QhiW3OVLqD9T0rsj9tbibhRH4Yaw,224445 +pip/_vendor/pyparsing/diagram/__init__.py,sha256=nxmDOoYF9NXuLaGYy01tKFjkNReWJlrGFuJNWEiTo84,24215 +pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/pyparsing/exceptions.py,sha256=6Jc6W1eDZBzyFu1J0YrcdNFVBC-RINujZmveSnB8Rxw,9523 +pip/_vendor/pyparsing/helpers.py,sha256=BZJHCA8SS0pYio30KGQTc9w2qMOaK4YpZ7hcvHbnTgk,38646 +pip/_vendor/pyparsing/results.py,sha256=9dyqQ-w3MjfmxWbFt8KEPU6IfXeyRdoWp2Og802rUQY,26692 +pip/_vendor/pyparsing/testing.py,sha256=eJncg0p83zm1FTPvM9auNT6oavIvXaibmRFDf1qmwkY,13488 +pip/_vendor/pyparsing/unicode.py,sha256=fAPdsJiARFbkPAih6NkYry0dpj4jPqelGVMlE4wWFW8,10646 +pip/_vendor/pyparsing/util.py,sha256=vTMzTdwSDyV8d_dSgquUTdWgBFoA_W30nfxEJDsshRQ,8670 +pip/_vendor/pyproject_hooks/__init__.py,sha256=kCehmy0UaBa9oVMD7ZIZrnswfnP3LXZ5lvnNJAL5JBM,491 +pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/pyproject_hooks/__pycache__/_compat.cpython-312.pyc,, +pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc,, +pip/_vendor/pyproject_hooks/_compat.py,sha256=by6evrYnqkisiM-MQcvOKs5bgDMzlOSgZqRHNqf04zE,138 +pip/_vendor/pyproject_hooks/_impl.py,sha256=61GJxzQip0IInhuO69ZI5GbNQ82XEDUB_1Gg5_KtUoc,11920 +pip/_vendor/pyproject_hooks/_in_process/__init__.py,sha256=9gQATptbFkelkIy0OfWFEACzqxXJMQDWCH9rBOAZVwQ,546 +pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc,, +pip/_vendor/pyproject_hooks/_in_process/_in_process.py,sha256=m2b34c917IW5o-Q_6TYIHlsK9lSUlNiyrITTUH_zwew,10927 +pip/_vendor/requests/__init__.py,sha256=owujob4dk45Siy4EYtbCKR6wcFph7E04a_v_OuAacBA,5169 +pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/api.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/auth.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/certs.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/compat.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/help.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/models.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/packages.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/structures.cpython-312.pyc,, +pip/_vendor/requests/__pycache__/utils.cpython-312.pyc,, +pip/_vendor/requests/__version__.py,sha256=ssI3Ezt7PaxgkOW45GhtwPUclo_SO_ygtIm4A74IOfw,435 +pip/_vendor/requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495 +pip/_vendor/requests/adapters.py,sha256=idj6cZcId3L5xNNeJ7ieOLtw3awJk5A64xUfetHwq3M,19697 +pip/_vendor/requests/api.py,sha256=q61xcXq4tmiImrvcSVLTbFyCiD2F-L_-hWKGbz4y8vg,6449 +pip/_vendor/requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187 +pip/_vendor/requests/certs.py,sha256=PVPooB0jP5hkZEULSCwC074532UFbR2Ptgu0I5zwmCs,575 +pip/_vendor/requests/compat.py,sha256=IhK9quyX0RRuWTNcg6d2JGSAOUbM6mym2p_2XjLTwf4,1286 +pip/_vendor/requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560 +pip/_vendor/requests/exceptions.py,sha256=FA-_kVwBZ2jhXauRctN_ewHVK25b-fj0Azyz1THQ0Kk,3823 +pip/_vendor/requests/help.py,sha256=FnAAklv8MGm_qb2UilDQgS6l0cUttiCFKUjx0zn2XNA,3879 +pip/_vendor/requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733 +pip/_vendor/requests/models.py,sha256=dDZ-iThotky-Noq9yy97cUEJhr3wnY6mv-xR_ePg_lk,35288 +pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695 +pip/_vendor/requests/sessions.py,sha256=-LvTzrPtetSTrR3buxu4XhdgMrJFLB1q5D7P--L2Xhw,30373 +pip/_vendor/requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235 +pip/_vendor/requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912 +pip/_vendor/requests/utils.py,sha256=kOPn0qYD6xRTzaxbqTdYiSInBZHl6379AJsyIgzYGLY,33460 +pip/_vendor/resolvelib/__init__.py,sha256=h509TdEcpb5-44JonaU3ex2TM15GVBLjM9CNCPwnTTs,537 +pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc,, +pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc,, +pip/_vendor/resolvelib/__pycache__/resolvers.cpython-312.pyc,, +pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc,, +pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-312.pyc,, +pip/_vendor/resolvelib/compat/collections_abc.py,sha256=uy8xUZ-NDEw916tugUXm8HgwCGiMO0f-RcdnpkfXfOs,156 +pip/_vendor/resolvelib/providers.py,sha256=fuuvVrCetu5gsxPB43ERyjfO8aReS3rFQHpDgiItbs4,5871 +pip/_vendor/resolvelib/reporters.py,sha256=TSbRmWzTc26w0ggsV1bxVpeWDB8QNIre6twYl7GIZBE,1601 +pip/_vendor/resolvelib/resolvers.py,sha256=G8rsLZSq64g5VmIq-lB7UcIJ1gjAxIQJmTF4REZleQ0,20511 +pip/_vendor/resolvelib/structs.py,sha256=0_1_XO8z_CLhegP3Vpf9VJ3zJcfLm0NOHRM-i0Ykz3o,4963 +pip/_vendor/rich/__init__.py,sha256=dRxjIL-SbFVY0q3IjSMrfgBTHrm1LZDgLOygVBwiYZc,6090 +pip/_vendor/rich/__main__.py,sha256=TT8sb9PTnsnKhhrGuHkLN0jdN0dtKhtPkEr9CidDbPM,8478 +pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/abc.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/align.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/bar.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/box.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/cells.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/color.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/columns.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/console.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/containers.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/control.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/errors.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/json.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/layout.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/live.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/logging.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/markup.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/measure.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/padding.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/pager.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/palette.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/panel.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/progress.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/region.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/repr.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/rule.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/scope.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/screen.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/segment.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/status.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/style.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/styled.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/table.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/text.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/theme.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/themes.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc,, +pip/_vendor/rich/__pycache__/tree.cpython-312.pyc,, +pip/_vendor/rich/_cell_widths.py,sha256=2n4EiJi3X9sqIq0O16kUZ_zy6UYMd3xFfChlKfnW1Hc,10096 +pip/_vendor/rich/_emoji_codes.py,sha256=hu1VL9nbVdppJrVoijVshRlcRRe_v3dju3Mmd2sKZdY,140235 +pip/_vendor/rich/_emoji_replace.py,sha256=n-kcetsEUx2ZUmhQrfeMNc-teeGhpuSQ5F8VPBsyvDo,1064 +pip/_vendor/rich/_export_format.py,sha256=qxgV3nKnXQu1hfbnRVswPYy-AwIg1X0LSC47cK5s8jk,2100 +pip/_vendor/rich/_extension.py,sha256=Xt47QacCKwYruzjDi-gOBq724JReDj9Cm9xUi5fr-34,265 +pip/_vendor/rich/_fileno.py,sha256=HWZxP5C2ajMbHryvAQZseflVfQoGzsKOHzKGsLD8ynQ,799 +pip/_vendor/rich/_inspect.py,sha256=oZJGw31e64dwXSCmrDnvZbwVb1ZKhWfU8wI3VWohjJk,9695 +pip/_vendor/rich/_log_render.py,sha256=1ByI0PA1ZpxZY3CGJOK54hjlq4X-Bz_boIjIqCd8Kns,3225 +pip/_vendor/rich/_loop.py,sha256=hV_6CLdoPm0va22Wpw4zKqM0RYsz3TZxXj0PoS-9eDQ,1236 +pip/_vendor/rich/_null_file.py,sha256=tGSXk_v-IZmbj1GAzHit8A3kYIQMiCpVsCFfsC-_KJ4,1387 +pip/_vendor/rich/_palettes.py,sha256=cdev1JQKZ0JvlguV9ipHgznTdnvlIzUFDBb0It2PzjI,7063 +pip/_vendor/rich/_pick.py,sha256=evDt8QN4lF5CiwrUIXlOJCntitBCOsI3ZLPEIAVRLJU,423 +pip/_vendor/rich/_ratio.py,sha256=2lLSliL025Y-YMfdfGbutkQDevhcyDqc-DtUYW9mU70,5472 +pip/_vendor/rich/_spinners.py,sha256=U2r1_g_1zSjsjiUdAESc2iAMc3i4ri_S8PYP6kQ5z1I,19919 +pip/_vendor/rich/_stack.py,sha256=-C8OK7rxn3sIUdVwxZBBpeHhIzX0eI-VM3MemYfaXm0,351 +pip/_vendor/rich/_timer.py,sha256=zelxbT6oPFZnNrwWPpc1ktUeAT-Vc4fuFcRZLQGLtMI,417 +pip/_vendor/rich/_win32_console.py,sha256=P0vxI2fcndym1UU1S37XAzQzQnkyY7YqAKmxm24_gug,22820 +pip/_vendor/rich/_windows.py,sha256=dvNl9TmfPzNVxiKk5WDFihErZ5796g2UC9-KGGyfXmk,1926 +pip/_vendor/rich/_windows_renderer.py,sha256=t74ZL3xuDCP3nmTp9pH1L5LiI2cakJuQRQleHCJerlk,2783 +pip/_vendor/rich/_wrap.py,sha256=xfV_9t0Sg6rzimmrDru8fCVmUlalYAcHLDfrJZnbbwQ,1840 +pip/_vendor/rich/abc.py,sha256=ON-E-ZqSSheZ88VrKX2M3PXpFbGEUUZPMa_Af0l-4f0,890 +pip/_vendor/rich/align.py,sha256=Ji-Yokfkhnfe_xMmr4ISjZB07TJXggBCOYoYa-HDAr8,10368 +pip/_vendor/rich/ansi.py,sha256=iD6532QYqnBm6hADulKjrV8l8kFJ-9fEVooHJHH3hMg,6906 +pip/_vendor/rich/bar.py,sha256=a7UD303BccRCrEhGjfMElpv5RFYIinaAhAuqYqhUvmw,3264 +pip/_vendor/rich/box.py,sha256=FJ6nI3jD7h2XNFU138bJUt2HYmWOlRbltoCEuIAZhew,9842 +pip/_vendor/rich/cells.py,sha256=627ztJs9zOL-38HJ7kXBerR-gT8KBfYC8UzEwMJDYYo,4509 +pip/_vendor/rich/color.py,sha256=9Gh958U3f75WVdLTeC0U9nkGTn2n0wnojKpJ6jQEkIE,18224 +pip/_vendor/rich/color_triplet.py,sha256=3lhQkdJbvWPoLDO-AnYImAWmJvV5dlgYNCVZ97ORaN4,1054 +pip/_vendor/rich/columns.py,sha256=HUX0KcMm9dsKNi11fTbiM_h2iDtl8ySCaVcxlalEzq8,7131 +pip/_vendor/rich/console.py,sha256=pDvkbLkvtZIMIwQx_jkZ-seyNl4zGBLviXoWXte9fwg,99218 +pip/_vendor/rich/constrain.py,sha256=1VIPuC8AgtKWrcncQrjBdYqA3JVWysu6jZo1rrh7c7Q,1288 +pip/_vendor/rich/containers.py,sha256=aKgm5UDHn5Nmui6IJaKdsZhbHClh_X7D-_Wg8Ehrr7s,5497 +pip/_vendor/rich/control.py,sha256=DSkHTUQLorfSERAKE_oTAEUFefZnZp4bQb4q8rHbKws,6630 +pip/_vendor/rich/default_styles.py,sha256=-Fe318kMVI_IwciK5POpThcO0-9DYJ67TZAN6DlmlmM,8082 +pip/_vendor/rich/diagnose.py,sha256=an6uouwhKPAlvQhYpNNpGq9EJysfMIOvvCbO3oSoR24,972 +pip/_vendor/rich/emoji.py,sha256=omTF9asaAnsM4yLY94eR_9dgRRSm1lHUszX20D1yYCQ,2501 +pip/_vendor/rich/errors.py,sha256=5pP3Kc5d4QJ_c0KFsxrfyhjiPVe7J1zOqSFbFAzcV-Y,642 +pip/_vendor/rich/file_proxy.py,sha256=Tl9THMDZ-Pk5Wm8sI1gGg_U5DhusmxD-FZ0fUbcU0W0,1683 +pip/_vendor/rich/filesize.py,sha256=9fTLAPCAwHmBXdRv7KZU194jSgNrRb6Wx7RIoBgqeKY,2508 +pip/_vendor/rich/highlighter.py,sha256=p3C1g4QYzezFKdR7NF9EhPbzQDvdPUhGRgSyGGEmPko,9584 +pip/_vendor/rich/json.py,sha256=EYp9ucj-nDjYDkHCV6Mk1ve8nUOpuFLaW76X50Mis2M,5032 +pip/_vendor/rich/jupyter.py,sha256=QyoKoE_8IdCbrtiSHp9TsTSNyTHY0FO5whE7jOTd9UE,3252 +pip/_vendor/rich/layout.py,sha256=RFYL6HdCFsHf9WRpcvi3w-fpj-8O5dMZ8W96VdKNdbI,14007 +pip/_vendor/rich/live.py,sha256=vZzYvu7fqwlv3Gthl2xiw1Dc_O80VlGcCV0DOHwCyDM,14273 +pip/_vendor/rich/live_render.py,sha256=zElm3PrfSIvjOce28zETHMIUf9pFYSUA5o0AflgUP64,3667 +pip/_vendor/rich/logging.py,sha256=uB-cB-3Q4bmXDLLpbOWkmFviw-Fde39zyMV6tKJ2WHQ,11903 +pip/_vendor/rich/markup.py,sha256=xzF4uAafiEeEYDJYt_vUnJOGoTU8RrH-PH7WcWYXjCg,8198 +pip/_vendor/rich/measure.py,sha256=HmrIJX8sWRTHbgh8MxEay_83VkqNW_70s8aKP5ZcYI8,5305 +pip/_vendor/rich/padding.py,sha256=kTFGsdGe0os7tXLnHKpwTI90CXEvrceeZGCshmJy5zw,4970 +pip/_vendor/rich/pager.py,sha256=SO_ETBFKbg3n_AgOzXm41Sv36YxXAyI3_R-KOY2_uSc,828 +pip/_vendor/rich/palette.py,sha256=lInvR1ODDT2f3UZMfL1grq7dY_pDdKHw4bdUgOGaM4Y,3396 +pip/_vendor/rich/panel.py,sha256=wGMe40J8KCGgQoM0LyjRErmGIkv2bsYA71RCXThD0xE,10574 +pip/_vendor/rich/pretty.py,sha256=eLEYN9xVaMNuA6EJVYm4li7HdOHxCqmVKvnOqJpyFt0,35852 +pip/_vendor/rich/progress.py,sha256=n4KF9vky8_5iYeXcyZPEvzyLplWlDvFLkM5JI0Bs08A,59706 +pip/_vendor/rich/progress_bar.py,sha256=cEoBfkc3lLwqba4XKsUpy4vSQKDh2QQ5J2J94-ACFoo,8165 +pip/_vendor/rich/prompt.py,sha256=x0mW-pIPodJM4ry6grgmmLrl8VZp99kqcmdnBe70YYA,11303 +pip/_vendor/rich/protocol.py,sha256=5hHHDDNHckdk8iWH5zEbi-zuIVSF5hbU2jIo47R7lTE,1391 +pip/_vendor/rich/region.py,sha256=rNT9xZrVZTYIXZC0NYn41CJQwYNbR-KecPOxTgQvB8Y,166 +pip/_vendor/rich/repr.py,sha256=9Z8otOmM-tyxnyTodvXlectP60lwahjGiDTrbrxPSTg,4431 +pip/_vendor/rich/rule.py,sha256=0fNaS_aERa3UMRc3T5WMpN_sumtDxfaor2y3of1ftBk,4602 +pip/_vendor/rich/scope.py,sha256=TMUU8qo17thyqQCPqjDLYpg_UU1k5qVd-WwiJvnJVas,2843 +pip/_vendor/rich/screen.py,sha256=YoeReESUhx74grqb0mSSb9lghhysWmFHYhsbMVQjXO8,1591 +pip/_vendor/rich/segment.py,sha256=XLnJEFvcV3bjaVzMNUJiem3n8lvvI9TJ5PTu-IG2uTg,24247 +pip/_vendor/rich/spinner.py,sha256=15koCmF0DQeD8-k28Lpt6X_zJQUlzEhgo_6A6uy47lc,4339 +pip/_vendor/rich/status.py,sha256=gJsIXIZeSo3urOyxRUjs6VrhX5CZrA0NxIQ-dxhCnwo,4425 +pip/_vendor/rich/style.py,sha256=3hiocH_4N8vwRm3-8yFWzM7tSwjjEven69XqWasSQwM,27073 +pip/_vendor/rich/styled.py,sha256=eZNnzGrI4ki_54pgY3Oj0T-x3lxdXTYh4_ryDB24wBU,1258 +pip/_vendor/rich/syntax.py,sha256=jgDiVCK6cpR0NmBOpZmIu-Ud4eaW7fHvjJZkDbjpcSA,35173 +pip/_vendor/rich/table.py,sha256=-WzesL-VJKsaiDU3uyczpJMHy6VCaSewBYJwx8RudI8,39684 +pip/_vendor/rich/terminal_theme.py,sha256=1j5-ufJfnvlAo5Qsi_ACZiXDmwMXzqgmFByObT9-yJY,3370 +pip/_vendor/rich/text.py,sha256=_8JBlSau0c2z8ENOZMi1hJ7M1ZGY408E4-hXjHyyg1A,45525 +pip/_vendor/rich/theme.py,sha256=belFJogzA0W0HysQabKaHOc3RWH2ko3fQAJhoN-AFdo,3777 +pip/_vendor/rich/themes.py,sha256=0xgTLozfabebYtcJtDdC5QkX5IVUEaviqDUJJh4YVFk,102 +pip/_vendor/rich/traceback.py,sha256=yCLVrCtyoFNENd9mkm2xeG3KmqkTwH9xpFOO7p2Bq0A,29604 +pip/_vendor/rich/tree.py,sha256=BMbUYNjS9uodNPfvtY_odmU09GA5QzcMbQ5cJZhllQI,9169 +pip/_vendor/six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549 +pip/_vendor/tenacity/__init__.py,sha256=3kvAL6KClq8GFo2KFhmOzskRKSDQI-ubrlfZ8AQEEI0,20493 +pip/_vendor/tenacity/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/tenacity/__pycache__/_asyncio.cpython-312.pyc,, +pip/_vendor/tenacity/__pycache__/_utils.cpython-312.pyc,, +pip/_vendor/tenacity/__pycache__/after.cpython-312.pyc,, +pip/_vendor/tenacity/__pycache__/before.cpython-312.pyc,, +pip/_vendor/tenacity/__pycache__/before_sleep.cpython-312.pyc,, +pip/_vendor/tenacity/__pycache__/nap.cpython-312.pyc,, +pip/_vendor/tenacity/__pycache__/retry.cpython-312.pyc,, +pip/_vendor/tenacity/__pycache__/stop.cpython-312.pyc,, +pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-312.pyc,, +pip/_vendor/tenacity/__pycache__/wait.cpython-312.pyc,, +pip/_vendor/tenacity/_asyncio.py,sha256=Qi6wgQsGa9MQibYRy3OXqcDQswIZZ00dLOoSUGN-6o8,3551 +pip/_vendor/tenacity/_utils.py,sha256=ubs6a7sxj3JDNRKWCyCU2j5r1CB7rgyONgZzYZq6D_4,2179 +pip/_vendor/tenacity/after.py,sha256=S5NCISScPeIrKwIeXRwdJl3kV9Q4nqZfnNPDx6Hf__g,1682 +pip/_vendor/tenacity/before.py,sha256=dIZE9gmBTffisfwNkK0F1xFwGPV41u5GK70UY4Pi5Kc,1562 +pip/_vendor/tenacity/before_sleep.py,sha256=YmpgN9Y7HGlH97U24vvq_YWb5deaK4_DbiD8ZuFmy-E,2372 +pip/_vendor/tenacity/nap.py,sha256=fRWvnz1aIzbIq9Ap3gAkAZgDH6oo5zxMrU6ZOVByq0I,1383 +pip/_vendor/tenacity/retry.py,sha256=jrzD_mxA5mSTUEdiYB7SHpxltjhPSYZSnSRATb-ggRc,8746 +pip/_vendor/tenacity/stop.py,sha256=YMJs7ZgZfND65PRLqlGB_agpfGXlemx_5Hm4PKnBqpQ,3086 +pip/_vendor/tenacity/tornadoweb.py,sha256=po29_F1Mt8qZpsFjX7EVwAT0ydC_NbVia9gVi7R_wXA,2142 +pip/_vendor/tenacity/wait.py,sha256=3FcBJoCDgym12_dN6xfK8C1gROY0Hn4NSI2u8xv50uE,8024 +pip/_vendor/tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396 +pip/_vendor/tomli/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/tomli/__pycache__/_parser.cpython-312.pyc,, +pip/_vendor/tomli/__pycache__/_re.cpython-312.pyc,, +pip/_vendor/tomli/__pycache__/_types.cpython-312.pyc,, +pip/_vendor/tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633 +pip/_vendor/tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943 +pip/_vendor/tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254 +pip/_vendor/truststore/__init__.py,sha256=qzTLSH8PvAkY1fr6QQ2vV-KwE_M83wdXugtpJaP_AbM,403 +pip/_vendor/truststore/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/truststore/__pycache__/_api.cpython-312.pyc,, +pip/_vendor/truststore/__pycache__/_macos.cpython-312.pyc,, +pip/_vendor/truststore/__pycache__/_openssl.cpython-312.pyc,, +pip/_vendor/truststore/__pycache__/_ssl_constants.cpython-312.pyc,, +pip/_vendor/truststore/__pycache__/_windows.cpython-312.pyc,, +pip/_vendor/truststore/_api.py,sha256=xjuEu_rlH4hcdJTROImEyOEqdw-F8t5vO2H2BToY0Ro,9893 +pip/_vendor/truststore/_macos.py,sha256=BjvAKoAjXhdIPuxpY124HJIFswDb0pq8DjynzJOVwqc,17694 +pip/_vendor/truststore/_openssl.py,sha256=LLUZ7ZGaio-i5dpKKjKCSeSufmn6T8pi9lDcFnvSyq0,2324 +pip/_vendor/truststore/_ssl_constants.py,sha256=NUD4fVKdSD02ri7-db0tnO0VqLP9aHuzmStcW7tAl08,1130 +pip/_vendor/truststore/_windows.py,sha256=1x_EhROeJ9QK1sMAjfnZC7awYI8UnBJYL-TjACUYI4A,17468 +pip/_vendor/typing_extensions.py,sha256=EWpcpyQnVmc48E9fSyPGs-vXgHcAk9tQABQIxmMsCGk,111130 +pip/_vendor/urllib3/__init__.py,sha256=iXLcYiJySn0GNbWOOZDDApgBL1JgP44EZ8i1760S8Mc,3333 +pip/_vendor/urllib3/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/urllib3/__pycache__/_collections.cpython-312.pyc,, +pip/_vendor/urllib3/__pycache__/_version.cpython-312.pyc,, +pip/_vendor/urllib3/__pycache__/connection.cpython-312.pyc,, +pip/_vendor/urllib3/__pycache__/connectionpool.cpython-312.pyc,, +pip/_vendor/urllib3/__pycache__/exceptions.cpython-312.pyc,, +pip/_vendor/urllib3/__pycache__/fields.cpython-312.pyc,, +pip/_vendor/urllib3/__pycache__/filepost.cpython-312.pyc,, +pip/_vendor/urllib3/__pycache__/poolmanager.cpython-312.pyc,, +pip/_vendor/urllib3/__pycache__/request.cpython-312.pyc,, +pip/_vendor/urllib3/__pycache__/response.cpython-312.pyc,, +pip/_vendor/urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811 +pip/_vendor/urllib3/_version.py,sha256=azoM7M7BUADl2kBhMVR6PPf2GhBDI90me1fcnzTwdcw,64 +pip/_vendor/urllib3/connection.py,sha256=92k9td_y4PEiTIjNufCUa1NzMB3J3w0LEdyokYgXnW8,20300 +pip/_vendor/urllib3/connectionpool.py,sha256=ItVDasDnPRPP9R8bNxY7tPBlC724nJ9nlxVgXG_SLbI,39990 +pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-312.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-312.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-312.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-312.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-312.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-312.pyc,, +pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957 +pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-312.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-312.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632 +pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922 +pip/_vendor/urllib3/contrib/appengine.py,sha256=VR68eAVE137lxTgjBDwCna5UiBZTOKa01Aj_-5BaCz4,11036 +pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=NlfkW7WMdW8ziqudopjHoW299og1BTWi0IeIibquFwk,4528 +pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=hDJh4MhyY_p-oKlFcYcQaVQRDv6GMmBGuW9yjxyeejM,17081 +pip/_vendor/urllib3/contrib/securetransport.py,sha256=yhZdmVjY6PI6EeFbp7qYOp6-vp1Rkv2NMuOGaEj7pmc,34448 +pip/_vendor/urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097 +pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217 +pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579 +pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440 +pip/_vendor/urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/urllib3/packages/__pycache__/six.cpython-312.pyc,, +pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-312.pyc,, +pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-312.pyc,, +pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417 +pip/_vendor/urllib3/packages/backports/weakref_finalize.py,sha256=tRCal5OAhNSRyb0DhHp-38AtIlCsRP8BxF3NX-6rqIA,5343 +pip/_vendor/urllib3/packages/six.py,sha256=b9LM0wBXv7E7SrbCjAm4wwN-hrH-iNxv18LgWNMMKPo,34665 +pip/_vendor/urllib3/poolmanager.py,sha256=0i8cJgrqupza67IBPZ_u9jXvnSxr5UBlVEiUqdkPtYI,19752 +pip/_vendor/urllib3/request.py,sha256=YTWFNr7QIwh7E1W9dde9LM77v2VWTJ5V78XuTTw7D1A,6691 +pip/_vendor/urllib3/response.py,sha256=fmDJAFkG71uFTn-sVSTh2Iw0WmcXQYqkbRjihvwBjU8,30641 +pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155 +pip/_vendor/urllib3/util/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/connection.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/proxy.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/queue.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/request.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/response.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/retry.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/timeout.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/url.cpython-312.pyc,, +pip/_vendor/urllib3/util/__pycache__/wait.cpython-312.pyc,, +pip/_vendor/urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901 +pip/_vendor/urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605 +pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498 +pip/_vendor/urllib3/util/request.py,sha256=C0OUt2tcU6LRiQJ7YYNP9GvPrSvl7ziIBekQ-5nlBZk,3997 +pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510 +pip/_vendor/urllib3/util/retry.py,sha256=Z6WEf518eTOXP5jr5QSQ9gqJI0DVYt3Xs3EKnYaTmus,22013 +pip/_vendor/urllib3/util/ssl_.py,sha256=X4-AqW91aYPhPx6-xbf66yHFQKbqqfC_5Zt4WkLX1Hc,17177 +pip/_vendor/urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758 +pip/_vendor/urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895 +pip/_vendor/urllib3/util/timeout.py,sha256=cwq4dMk87mJHSBktK1miYJ-85G-3T3RmT20v7SFCpno,10168 +pip/_vendor/urllib3/util/url.py,sha256=lCAE7M5myA8EDdW0sJuyyZhVB9K_j38ljWhHAnFaWoE,14296 +pip/_vendor/urllib3/util/wait.py,sha256=fOX0_faozG2P7iVojQoE1mbydweNyTcm-hXEfFrTtLI,5403 +pip/_vendor/vendor.txt,sha256=4NKk7fQhVsZw0U-0zmm9Q2LgGyaPXacFbnJAaS0Q6EY,493 +pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579 +pip/_vendor/webencodings/__pycache__/__init__.cpython-312.pyc,, +pip/_vendor/webencodings/__pycache__/labels.cpython-312.pyc,, +pip/_vendor/webencodings/__pycache__/mklabels.cpython-312.pyc,, +pip/_vendor/webencodings/__pycache__/tests.cpython-312.pyc,, +pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-312.pyc,, +pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 +pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 +pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563 +pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307 +pip/py.typed,sha256=EBVvvPRTn_eIpz5e5QztSCdrMX7Qwd7VP93RSoIlZ2I,286 diff --git a/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/REQUESTED b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/WHEEL b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/WHEEL new file mode 100644 index 0000000..98c0d20 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/entry_points.txt b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/entry_points.txt new file mode 100644 index 0000000..26fa361 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] +pip = pip._internal.cli.main:main +pip3 = pip._internal.cli.main:main +pip3.12 = pip._internal.cli.main:main diff --git a/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/top_level.txt b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/top_level.txt new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip-24.0.dist-info/top_level.txt @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.12/site-packages/pip/__init__.py b/myenv/lib/python3.12/site-packages/pip/__init__.py new file mode 100644 index 0000000..be0e3ed --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/__init__.py @@ -0,0 +1,13 @@ +from typing import List, Optional + +__version__ = "24.0" + + +def main(args: Optional[List[str]] = None) -> int: + """This is an internal API only meant for use by pip's own console scripts. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/myenv/lib/python3.12/site-packages/pip/__main__.py b/myenv/lib/python3.12/site-packages/pip/__main__.py new file mode 100644 index 0000000..5991326 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/__main__.py @@ -0,0 +1,24 @@ +import os +import sys + +# Remove '' and current working directory from the first entry +# of sys.path, if present to avoid using current directory +# in pip commands check, freeze, install, list and show, +# when invoked as python -m pip +if sys.path[0] in ("", os.getcwd()): + sys.path.pop(0) + +# If we are running from a wheel, add the wheel to sys.path +# This allows the usage python pip-*.whl/pip install pip-*.whl +if __package__ == "": + # __file__ is pip-*.whl/pip/__main__.py + # first dirname call strips of '/__main__.py', second strips off '/pip' + # Resulting path is the name of the wheel itself + # Add that to sys.path so we can import pip + path = os.path.dirname(os.path.dirname(__file__)) + sys.path.insert(0, path) + +if __name__ == "__main__": + from pip._internal.cli.main import main as _main + + sys.exit(_main()) diff --git a/myenv/lib/python3.12/site-packages/pip/__pip-runner__.py b/myenv/lib/python3.12/site-packages/pip/__pip-runner__.py new file mode 100644 index 0000000..49a148a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/__pip-runner__.py @@ -0,0 +1,50 @@ +"""Execute exactly this copy of pip, within a different environment. + +This file is named as it is, to ensure that this module can't be imported via +an import statement. +""" + +# /!\ This version compatibility check section must be Python 2 compatible. /!\ + +import sys + +# Copied from setup.py +PYTHON_REQUIRES = (3, 7) + + +def version_str(version): # type: ignore + return ".".join(str(v) for v in version) + + +if sys.version_info[:2] < PYTHON_REQUIRES: + raise SystemExit( + "This version of pip does not support python {} (requires >={}).".format( + version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES) + ) + ) + +# From here on, we can use Python 3 features, but the syntax must remain +# Python 2 compatible. + +import runpy # noqa: E402 +from importlib.machinery import PathFinder # noqa: E402 +from os.path import dirname # noqa: E402 + +PIP_SOURCES_ROOT = dirname(dirname(__file__)) + + +class PipImportRedirectingFinder: + @classmethod + def find_spec(self, fullname, path=None, target=None): # type: ignore + if fullname != "pip": + return None + + spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) + assert spec, (PIP_SOURCES_ROOT, fullname) + return spec + + +sys.meta_path.insert(0, PipImportRedirectingFinder()) + +assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" +runpy.run_module("pip", run_name="__main__", alter_sys=True) diff --git a/myenv/lib/python3.12/site-packages/pip/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..4b7925c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/__pycache__/__main__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..dcb2fd7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/__pycache__/__main__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/__pycache__/__pip-runner__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/__pycache__/__pip-runner__.cpython-312.pyc new file mode 100644 index 0000000..1d3705a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/__pycache__/__pip-runner__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/__init__.py new file mode 100644 index 0000000..96c6b88 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/__init__.py @@ -0,0 +1,18 @@ +from typing import List, Optional + +from pip._internal.utils import _log + +# init_logging() must be called before any call to logging.getLogger() +# which happens at import of most modules. +_log.init_logging() + + +def main(args: (Optional[List[str]]) = None) -> int: + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..90ae3b0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/build_env.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/build_env.cpython-312.pyc new file mode 100644 index 0000000..f8e9680 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/build_env.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/cache.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/cache.cpython-312.pyc new file mode 100644 index 0000000..299ca18 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/cache.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/configuration.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/configuration.cpython-312.pyc new file mode 100644 index 0000000..d8fc676 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/configuration.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/exceptions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..5d059a1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/exceptions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/main.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000..e83280d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/main.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/pyproject.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/pyproject.cpython-312.pyc new file mode 100644 index 0000000..f676b31 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/pyproject.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-312.pyc new file mode 100644 index 0000000..ea22730 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-312.pyc new file mode 100644 index 0000000..c462d13 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/build_env.py b/myenv/lib/python3.12/site-packages/pip/_internal/build_env.py new file mode 100644 index 0000000..4f704a3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/build_env.py @@ -0,0 +1,311 @@ +"""Build Environment used for isolation during sdist building +""" + +import logging +import os +import pathlib +import site +import sys +import textwrap +from collections import OrderedDict +from types import TracebackType +from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union + +from pip._vendor.certifi import where +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.version import Version + +from pip import __file__ as pip_location +from pip._internal.cli.spinners import open_spinner +from pip._internal.locations import get_platlib, get_purelib, get_scheme +from pip._internal.metadata import get_default_environment, get_environment +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds + +if TYPE_CHECKING: + from pip._internal.index.package_finder import PackageFinder + +logger = logging.getLogger(__name__) + + +def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]: + return (a, b) if a != b else (a,) + + +class _Prefix: + def __init__(self, path: str) -> None: + self.path = path + self.setup = False + scheme = get_scheme("", prefix=path) + self.bin_dir = scheme.scripts + self.lib_dirs = _dedup(scheme.purelib, scheme.platlib) + + +def get_runnable_pip() -> str: + """Get a file to pass to a Python executable, to run the currently-running pip. + + This is used to run a pip subprocess, for installing requirements into the build + environment. + """ + source = pathlib.Path(pip_location).resolve().parent + + if not source.is_dir(): + # This would happen if someone is using pip from inside a zip file. In that + # case, we can use that directly. + return str(source) + + return os.fsdecode(source / "__pip-runner__.py") + + +def _get_system_sitepackages() -> Set[str]: + """Get system site packages + + Usually from site.getsitepackages, + but fallback on `get_purelib()/get_platlib()` if unavailable + (e.g. in a virtualenv created by virtualenv<20) + + Returns normalized set of strings. + """ + if hasattr(site, "getsitepackages"): + system_sites = site.getsitepackages() + else: + # virtualenv < 20 overwrites site.py without getsitepackages + # fallback on get_purelib/get_platlib. + # this is known to miss things, but shouldn't in the cases + # where getsitepackages() has been removed (inside a virtualenv) + system_sites = [get_purelib(), get_platlib()] + return {os.path.normcase(path) for path in system_sites} + + +class BuildEnvironment: + """Creates and manages an isolated environment to install build deps""" + + def __init__(self) -> None: + temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True) + + self._prefixes = OrderedDict( + (name, _Prefix(os.path.join(temp_dir.path, name))) + for name in ("normal", "overlay") + ) + + self._bin_dirs: List[str] = [] + self._lib_dirs: List[str] = [] + for prefix in reversed(list(self._prefixes.values())): + self._bin_dirs.append(prefix.bin_dir) + self._lib_dirs.extend(prefix.lib_dirs) + + # Customize site to: + # - ensure .pth files are honored + # - prevent access to system site packages + system_sites = _get_system_sitepackages() + + self._site_dir = os.path.join(temp_dir.path, "site") + if not os.path.exists(self._site_dir): + os.mkdir(self._site_dir) + with open( + os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8" + ) as fp: + fp.write( + textwrap.dedent( + """ + import os, site, sys + + # First, drop system-sites related paths. + original_sys_path = sys.path[:] + known_paths = set() + for path in {system_sites!r}: + site.addsitedir(path, known_paths=known_paths) + system_paths = set( + os.path.normcase(path) + for path in sys.path[len(original_sys_path):] + ) + original_sys_path = [ + path for path in original_sys_path + if os.path.normcase(path) not in system_paths + ] + sys.path = original_sys_path + + # Second, add lib directories. + # ensuring .pth file are processed. + for path in {lib_dirs!r}: + assert not path in sys.path + site.addsitedir(path) + """ + ).format(system_sites=system_sites, lib_dirs=self._lib_dirs) + ) + + def __enter__(self) -> None: + self._save_env = { + name: os.environ.get(name, None) + for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH") + } + + path = self._bin_dirs[:] + old_path = self._save_env["PATH"] + if old_path: + path.extend(old_path.split(os.pathsep)) + + pythonpath = [self._site_dir] + + os.environ.update( + { + "PATH": os.pathsep.join(path), + "PYTHONNOUSERSITE": "1", + "PYTHONPATH": os.pathsep.join(pythonpath), + } + ) + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + for varname, old_value in self._save_env.items(): + if old_value is None: + os.environ.pop(varname, None) + else: + os.environ[varname] = old_value + + def check_requirements( + self, reqs: Iterable[str] + ) -> Tuple[Set[Tuple[str, str]], Set[str]]: + """Return 2 sets: + - conflicting requirements: set of (installed, wanted) reqs tuples + - missing requirements: set of reqs + """ + missing = set() + conflicting = set() + if reqs: + env = ( + get_environment(self._lib_dirs) + if hasattr(self, "_lib_dirs") + else get_default_environment() + ) + for req_str in reqs: + req = Requirement(req_str) + # We're explicitly evaluating with an empty extra value, since build + # environments are not provided any mechanism to select specific extras. + if req.marker is not None and not req.marker.evaluate({"extra": ""}): + continue + dist = env.get_distribution(req.name) + if not dist: + missing.add(req_str) + continue + if isinstance(dist.version, Version): + installed_req_str = f"{req.name}=={dist.version}" + else: + installed_req_str = f"{req.name}==={dist.version}" + if not req.specifier.contains(dist.version, prereleases=True): + conflicting.add((installed_req_str, req_str)) + # FIXME: Consider direct URL? + return conflicting, missing + + def install_requirements( + self, + finder: "PackageFinder", + requirements: Iterable[str], + prefix_as_string: str, + *, + kind: str, + ) -> None: + prefix = self._prefixes[prefix_as_string] + assert not prefix.setup + prefix.setup = True + if not requirements: + return + self._install_requirements( + get_runnable_pip(), + finder, + requirements, + prefix, + kind=kind, + ) + + @staticmethod + def _install_requirements( + pip_runnable: str, + finder: "PackageFinder", + requirements: Iterable[str], + prefix: _Prefix, + *, + kind: str, + ) -> None: + args: List[str] = [ + sys.executable, + pip_runnable, + "install", + "--ignore-installed", + "--no-user", + "--prefix", + prefix.path, + "--no-warn-script-location", + ] + if logger.getEffectiveLevel() <= logging.DEBUG: + args.append("-v") + for format_control in ("no_binary", "only_binary"): + formats = getattr(finder.format_control, format_control) + args.extend( + ( + "--" + format_control.replace("_", "-"), + ",".join(sorted(formats or {":none:"})), + ) + ) + + index_urls = finder.index_urls + if index_urls: + args.extend(["-i", index_urls[0]]) + for extra_index in index_urls[1:]: + args.extend(["--extra-index-url", extra_index]) + else: + args.append("--no-index") + for link in finder.find_links: + args.extend(["--find-links", link]) + + for host in finder.trusted_hosts: + args.extend(["--trusted-host", host]) + if finder.allow_all_prereleases: + args.append("--pre") + if finder.prefer_binary: + args.append("--prefer-binary") + args.append("--") + args.extend(requirements) + extra_environ = {"_PIP_STANDALONE_CERT": where()} + with open_spinner(f"Installing {kind}") as spinner: + call_subprocess( + args, + command_desc=f"pip subprocess to install {kind}", + spinner=spinner, + extra_environ=extra_environ, + ) + + +class NoOpBuildEnvironment(BuildEnvironment): + """A no-op drop-in replacement for BuildEnvironment""" + + def __init__(self) -> None: + pass + + def __enter__(self) -> None: + pass + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + pass + + def cleanup(self) -> None: + pass + + def install_requirements( + self, + finder: "PackageFinder", + requirements: Iterable[str], + prefix_as_string: str, + *, + kind: str, + ) -> None: + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cache.py b/myenv/lib/python3.12/site-packages/pip/_internal/cache.py new file mode 100644 index 0000000..f45ac23 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cache.py @@ -0,0 +1,290 @@ +"""Cache Management +""" + +import hashlib +import json +import logging +import os +from pathlib import Path +from typing import Any, Dict, List, Optional + +from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InvalidWheelFilename +from pip._internal.models.direct_url import DirectUrl +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pip._internal.utils.urls import path_to_url + +logger = logging.getLogger(__name__) + +ORIGIN_JSON_NAME = "origin.json" + + +def _hash_dict(d: Dict[str, str]) -> str: + """Return a stable sha224 of a dictionary.""" + s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True) + return hashlib.sha224(s.encode("ascii")).hexdigest() + + +class Cache: + """An abstract class - provides cache directories for data from links + + :param cache_dir: The root of the cache. + """ + + def __init__(self, cache_dir: str) -> None: + super().__init__() + assert not cache_dir or os.path.isabs(cache_dir) + self.cache_dir = cache_dir or None + + def _get_cache_path_parts(self, link: Link) -> List[str]: + """Get parts of part that must be os.path.joined with cache_dir""" + + # We want to generate an url to use as our cache key, we don't want to + # just re-use the URL because it might have other items in the fragment + # and we don't care about those. + key_parts = {"url": link.url_without_fragment} + if link.hash_name is not None and link.hash is not None: + key_parts[link.hash_name] = link.hash + if link.subdirectory_fragment: + key_parts["subdirectory"] = link.subdirectory_fragment + + # Include interpreter name, major and minor version in cache key + # to cope with ill-behaved sdists that build a different wheel + # depending on the python version their setup.py is being run on, + # and don't encode the difference in compatibility tags. + # https://github.com/pypa/pip/issues/7296 + key_parts["interpreter_name"] = interpreter_name() + key_parts["interpreter_version"] = interpreter_version() + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and + # thus less secure). However the differences don't make a lot of + # difference for our use case here. + hashed = _hash_dict(key_parts) + + # We want to nest the directories some to prevent having a ton of top + # level directories where we might run out of sub directories on some + # FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + return parts + + def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]: + can_not_cache = not self.cache_dir or not canonical_package_name or not link + if can_not_cache: + return [] + + path = self.get_path_for_link(link) + if os.path.isdir(path): + return [(candidate, path) for candidate in os.listdir(path)] + return [] + + def get_path_for_link(self, link: Link) -> str: + """Return a directory to store cached items in for link.""" + raise NotImplementedError() + + def get( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Link: + """Returns a link to a cached item if it exists, otherwise returns the + passed link. + """ + raise NotImplementedError() + + +class SimpleWheelCache(Cache): + """A cache of wheels for future installs.""" + + def __init__(self, cache_dir: str) -> None: + super().__init__(cache_dir) + + def get_path_for_link(self, link: Link) -> str: + """Return a directory to store cached wheels for link + + Because there are M wheels for any one sdist, we provide a directory + to cache them in, and then consult that directory when looking up + cache hits. + + We only insert things into the cache if they have plausible version + numbers, so that we don't contaminate the cache with things that were + not unique. E.g. ./package might have dozens of installs done for it + and build a version of 0.0...and if we built and cached a wheel, we'd + end up using the same wheel even if the source has been edited. + + :param link: The link of the sdist for which this will cache wheels. + """ + parts = self._get_cache_path_parts(link) + assert self.cache_dir + # Store wheels within the root cache_dir + return os.path.join(self.cache_dir, "wheels", *parts) + + def get( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Link: + candidates = [] + + if not package_name: + return link + + canonical_package_name = canonicalize_name(package_name) + for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name): + try: + wheel = Wheel(wheel_name) + except InvalidWheelFilename: + continue + if canonicalize_name(wheel.name) != canonical_package_name: + logger.debug( + "Ignoring cached wheel %s for %s as it " + "does not match the expected distribution name %s.", + wheel_name, + link, + package_name, + ) + continue + if not wheel.supported(supported_tags): + # Built for a different python/arch/etc + continue + candidates.append( + ( + wheel.support_index_min(supported_tags), + wheel_name, + wheel_dir, + ) + ) + + if not candidates: + return link + + _, wheel_name, wheel_dir = min(candidates) + return Link(path_to_url(os.path.join(wheel_dir, wheel_name))) + + +class EphemWheelCache(SimpleWheelCache): + """A SimpleWheelCache that creates it's own temporary cache directory""" + + def __init__(self) -> None: + self._temp_dir = TempDirectory( + kind=tempdir_kinds.EPHEM_WHEEL_CACHE, + globally_managed=True, + ) + + super().__init__(self._temp_dir.path) + + +class CacheEntry: + def __init__( + self, + link: Link, + persistent: bool, + ): + self.link = link + self.persistent = persistent + self.origin: Optional[DirectUrl] = None + origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME + if origin_direct_url_path.exists(): + try: + self.origin = DirectUrl.from_json( + origin_direct_url_path.read_text(encoding="utf-8") + ) + except Exception as e: + logger.warning( + "Ignoring invalid cache entry origin file %s for %s (%s)", + origin_direct_url_path, + link.filename, + e, + ) + + +class WheelCache(Cache): + """Wraps EphemWheelCache and SimpleWheelCache into a single Cache + + This Cache allows for gracefully degradation, using the ephem wheel cache + when a certain link is not found in the simple wheel cache first. + """ + + def __init__(self, cache_dir: str) -> None: + super().__init__(cache_dir) + self._wheel_cache = SimpleWheelCache(cache_dir) + self._ephem_cache = EphemWheelCache() + + def get_path_for_link(self, link: Link) -> str: + return self._wheel_cache.get_path_for_link(link) + + def get_ephem_path_for_link(self, link: Link) -> str: + return self._ephem_cache.get_path_for_link(link) + + def get( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Link: + cache_entry = self.get_cache_entry(link, package_name, supported_tags) + if cache_entry is None: + return link + return cache_entry.link + + def get_cache_entry( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Optional[CacheEntry]: + """Returns a CacheEntry with a link to a cached item if it exists or + None. The cache entry indicates if the item was found in the persistent + or ephemeral cache. + """ + retval = self._wheel_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=True) + + retval = self._ephem_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=False) + + return None + + @staticmethod + def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None: + origin_path = Path(cache_dir) / ORIGIN_JSON_NAME + if origin_path.exists(): + try: + origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8")) + except Exception as e: + logger.warning( + "Could not read origin file %s in cache entry (%s). " + "Will attempt to overwrite it.", + origin_path, + e, + ) + else: + # TODO: use DirectUrl.equivalent when + # https://github.com/pypa/pip/pull/10564 is merged. + if origin.url != download_info.url: + logger.warning( + "Origin URL %s in cache entry %s does not match download URL " + "%s. This is likely a pip bug or a cache corruption issue. " + "Will overwrite it with the new value.", + origin.url, + cache_dir, + download_info.url, + ) + origin_path.write_text(download_info.to_json(), encoding="utf-8") diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__init__.py new file mode 100644 index 0000000..e589bb9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__init__.py @@ -0,0 +1,4 @@ +"""Subpackage containing all of pip's command line interface related code +""" + +# This file intentionally does not import submodules diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..8218f8d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-312.pyc new file mode 100644 index 0000000..94e9b54 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-312.pyc new file mode 100644 index 0000000..2123353 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-312.pyc new file mode 100644 index 0000000..5bed68e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-312.pyc new file mode 100644 index 0000000..b03e042 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000..9cdce03 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-312.pyc new file mode 100644 index 0000000..04ac395 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/parser.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/parser.cpython-312.pyc new file mode 100644 index 0000000..6016910 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/parser.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-312.pyc new file mode 100644 index 0000000..6506705 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-312.pyc new file mode 100644 index 0000000..58fffe3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-312.pyc new file mode 100644 index 0000000..55b89e1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-312.pyc new file mode 100644 index 0000000..a5a5c68 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/autocompletion.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/autocompletion.py new file mode 100644 index 0000000..e5950b9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/autocompletion.py @@ -0,0 +1,172 @@ +"""Logic that powers autocompletion installed by ``pip completion``. +""" + +import optparse +import os +import sys +from itertools import chain +from typing import Any, Iterable, List, Optional + +from pip._internal.cli.main_parser import create_main_parser +from pip._internal.commands import commands_dict, create_command +from pip._internal.metadata import get_default_environment + + +def autocomplete() -> None: + """Entry Point for completion of main and subcommand options.""" + # Don't complete if user hasn't sourced bash_completion file. + if "PIP_AUTO_COMPLETE" not in os.environ: + return + cwords = os.environ["COMP_WORDS"].split()[1:] + cword = int(os.environ["COMP_CWORD"]) + try: + current = cwords[cword - 1] + except IndexError: + current = "" + + parser = create_main_parser() + subcommands = list(commands_dict) + options = [] + + # subcommand + subcommand_name: Optional[str] = None + for word in cwords: + if word in subcommands: + subcommand_name = word + break + # subcommand options + if subcommand_name is not None: + # special case: 'help' subcommand has no options + if subcommand_name == "help": + sys.exit(1) + # special case: list locally installed dists for show and uninstall + should_list_installed = not current.startswith("-") and subcommand_name in [ + "show", + "uninstall", + ] + if should_list_installed: + env = get_default_environment() + lc = current.lower() + installed = [ + dist.canonical_name + for dist in env.iter_installed_distributions(local_only=True) + if dist.canonical_name.startswith(lc) + and dist.canonical_name not in cwords[1:] + ] + # if there are no dists installed, fall back to option completion + if installed: + for dist in installed: + print(dist) + sys.exit(1) + + should_list_installables = ( + not current.startswith("-") and subcommand_name == "install" + ) + if should_list_installables: + for path in auto_complete_paths(current, "path"): + print(path) + sys.exit(1) + + subcommand = create_command(subcommand_name) + + for opt in subcommand.parser.option_list_all: + if opt.help != optparse.SUPPRESS_HELP: + options += [ + (opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts + ] + + # filter out previously specified options from available options + prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]] + options = [(x, v) for (x, v) in options if x not in prev_opts] + # filter options by current input + options = [(k, v) for k, v in options if k.startswith(current)] + # get completion type given cwords and available subcommand options + completion_type = get_path_completion_type( + cwords, + cword, + subcommand.parser.option_list_all, + ) + # get completion files and directories if ``completion_type`` is + # ````, ```` or ```` + if completion_type: + paths = auto_complete_paths(current, completion_type) + options = [(path, 0) for path in paths] + for option in options: + opt_label = option[0] + # append '=' to options which require args + if option[1] and option[0][:2] == "--": + opt_label += "=" + print(opt_label) + else: + # show main parser options only when necessary + + opts = [i.option_list for i in parser.option_groups] + opts.append(parser.option_list) + flattened_opts = chain.from_iterable(opts) + if current.startswith("-"): + for opt in flattened_opts: + if opt.help != optparse.SUPPRESS_HELP: + subcommands += opt._long_opts + opt._short_opts + else: + # get completion type given cwords and all available options + completion_type = get_path_completion_type(cwords, cword, flattened_opts) + if completion_type: + subcommands = list(auto_complete_paths(current, completion_type)) + + print(" ".join([x for x in subcommands if x.startswith(current)])) + sys.exit(1) + + +def get_path_completion_type( + cwords: List[str], cword: int, opts: Iterable[Any] +) -> Optional[str]: + """Get the type of path completion (``file``, ``dir``, ``path`` or None) + + :param cwords: same as the environmental variable ``COMP_WORDS`` + :param cword: same as the environmental variable ``COMP_CWORD`` + :param opts: The available options to check + :return: path completion type (``file``, ``dir``, ``path`` or None) + """ + if cword < 2 or not cwords[cword - 2].startswith("-"): + return None + for opt in opts: + if opt.help == optparse.SUPPRESS_HELP: + continue + for o in str(opt).split("/"): + if cwords[cword - 2].split("=")[0] == o: + if not opt.metavar or any( + x in ("path", "file", "dir") for x in opt.metavar.split("/") + ): + return opt.metavar + return None + + +def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]: + """If ``completion_type`` is ``file`` or ``path``, list all regular files + and directories starting with ``current``; otherwise only list directories + starting with ``current``. + + :param current: The word to be completed + :param completion_type: path completion type(``file``, ``path`` or ``dir``) + :return: A generator of regular files and/or directories + """ + directory, filename = os.path.split(current) + current_path = os.path.abspath(directory) + # Don't complete paths if they can't be accessed + if not os.access(current_path, os.R_OK): + return + filename = os.path.normcase(filename) + # list all files that start with ``filename`` + file_list = ( + x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename) + ) + for f in file_list: + opt = os.path.join(current_path, f) + comp_file = os.path.normcase(os.path.join(directory, f)) + # complete regular files when there is not ```` after option + # complete directories when there is ````, ```` or + # ````after option + if completion_type != "dir" and os.path.isfile(opt): + yield comp_file + elif os.path.isdir(opt): + yield os.path.join(comp_file, "") diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/base_command.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/base_command.py new file mode 100644 index 0000000..db9d5cc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/base_command.py @@ -0,0 +1,236 @@ +"""Base Command class, and related routines""" + +import functools +import logging +import logging.config +import optparse +import os +import sys +import traceback +from optparse import Values +from typing import Any, Callable, List, Optional, Tuple + +from pip._vendor.rich import traceback as rich_traceback + +from pip._internal.cli import cmdoptions +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip._internal.cli.status_codes import ( + ERROR, + PREVIOUS_BUILD_DIR_ERROR, + UNKNOWN_ERROR, + VIRTUALENV_NOT_FOUND, +) +from pip._internal.exceptions import ( + BadCommand, + CommandError, + DiagnosticPipError, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, + UninstallationError, +) +from pip._internal.utils.filesystem import check_path_owner +from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging +from pip._internal.utils.misc import get_prog, normalize_path +from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry +from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry +from pip._internal.utils.virtualenv import running_under_virtualenv + +__all__ = ["Command"] + +logger = logging.getLogger(__name__) + + +class Command(CommandContextMixIn): + usage: str = "" + ignore_require_venv: bool = False + + def __init__(self, name: str, summary: str, isolated: bool = False) -> None: + super().__init__() + + self.name = name + self.summary = summary + self.parser = ConfigOptionParser( + usage=self.usage, + prog=f"{get_prog()} {name}", + formatter=UpdatingDefaultsHelpFormatter(), + add_help_option=False, + name=name, + description=self.__doc__, + isolated=isolated, + ) + + self.tempdir_registry: Optional[TempDirRegistry] = None + + # Commands should add options to this option group + optgroup_name = f"{self.name.capitalize()} Options" + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) + + # Add the general options + gen_opts = cmdoptions.make_option_group( + cmdoptions.general_group, + self.parser, + ) + self.parser.add_option_group(gen_opts) + + self.add_options() + + def add_options(self) -> None: + pass + + def handle_pip_version_check(self, options: Values) -> None: + """ + This is a no-op so that commands by default do not do the pip version + check. + """ + # Make sure we do the pip version check if the index_group options + # are present. + assert not hasattr(options, "no_index") + + def run(self, options: Values, args: List[str]) -> int: + raise NotImplementedError + + def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]: + # factored out for testability + return self.parser.parse_args(args) + + def main(self, args: List[str]) -> int: + try: + with self.main_context(): + return self._main(args) + finally: + logging.shutdown() + + def _main(self, args: List[str]) -> int: + # We must initialize this before the tempdir manager, otherwise the + # configuration would not be accessible by the time we clean up the + # tempdir manager. + self.tempdir_registry = self.enter_context(tempdir_registry()) + # Intentionally set as early as possible so globally-managed temporary + # directories are available to the rest of the code. + self.enter_context(global_tempdir_manager()) + + options, args = self.parse_args(args) + + # Set verbosity so that it can be used elsewhere. + self.verbosity = options.verbose - options.quiet + + level_number = setup_logging( + verbosity=self.verbosity, + no_color=options.no_color, + user_log_file=options.log, + ) + + always_enabled_features = set(options.features_enabled) & set( + cmdoptions.ALWAYS_ENABLED_FEATURES + ) + if always_enabled_features: + logger.warning( + "The following features are always enabled: %s. ", + ", ".join(sorted(always_enabled_features)), + ) + + # Make sure that the --python argument isn't specified after the + # subcommand. We can tell, because if --python was specified, + # we should only reach this point if we're running in the created + # subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment + # variable set. + if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: + logger.critical( + "The --python option must be placed before the pip subcommand name" + ) + sys.exit(ERROR) + + # TODO: Try to get these passing down from the command? + # without resorting to os.environ to hold these. + # This also affects isolated builds and it should. + + if options.no_input: + os.environ["PIP_NO_INPUT"] = "1" + + if options.exists_action: + os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action) + + if options.require_venv and not self.ignore_require_venv: + # If a venv is required check if it can really be found + if not running_under_virtualenv(): + logger.critical("Could not find an activated virtualenv (required).") + sys.exit(VIRTUALENV_NOT_FOUND) + + if options.cache_dir: + options.cache_dir = normalize_path(options.cache_dir) + if not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "or is not writable by the current user. The cache " + "has been disabled. Check the permissions and owner of " + "that directory. If executing pip with sudo, you should " + "use sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + def intercepts_unhandled_exc( + run_func: Callable[..., int] + ) -> Callable[..., int]: + @functools.wraps(run_func) + def exc_logging_wrapper(*args: Any) -> int: + try: + status = run_func(*args) + assert isinstance(status, int) + return status + except DiagnosticPipError as exc: + logger.error("%s", exc, extra={"rich": True}) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except PreviousBuildDirError as exc: + logger.critical(str(exc)) + logger.debug("Exception information:", exc_info=True) + + return PREVIOUS_BUILD_DIR_ERROR + except ( + InstallationError, + UninstallationError, + BadCommand, + NetworkConnectionError, + ) as exc: + logger.critical(str(exc)) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except CommandError as exc: + logger.critical("%s", exc) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except BrokenStdoutLoggingError: + # Bypass our logger and write any remaining messages to + # stderr because stdout no longer works. + print("ERROR: Pipe to stdout was broken", file=sys.stderr) + if level_number <= logging.DEBUG: + traceback.print_exc(file=sys.stderr) + + return ERROR + except KeyboardInterrupt: + logger.critical("Operation cancelled by user") + logger.debug("Exception information:", exc_info=True) + + return ERROR + except BaseException: + logger.critical("Exception:", exc_info=True) + + return UNKNOWN_ERROR + + return exc_logging_wrapper + + try: + if not options.debug_mode: + run = intercepts_unhandled_exc(self.run) + else: + run = self.run + rich_traceback.install(show_locals=True) + return run(options, args) + finally: + self.handle_pip_version_check(options) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/cmdoptions.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/cmdoptions.py new file mode 100644 index 0000000..d643256 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/cmdoptions.py @@ -0,0 +1,1074 @@ +""" +shared options and groups + +The principle here is to define options once, but *not* instantiate them +globally. One reason being that options with action='append' can carry state +between parses. pip parses general options twice internally, and shouldn't +pass on state. To be consistent, all options will follow this design. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import importlib.util +import logging +import os +import textwrap +from functools import partial +from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values +from textwrap import dedent +from typing import Any, Callable, Dict, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.parser import ConfigOptionParser +from pip._internal.exceptions import CommandError +from pip._internal.locations import USER_CACHE_DIR, get_src_prefix +from pip._internal.models.format_control import FormatControl +from pip._internal.models.index import PyPI +from pip._internal.models.target_python import TargetPython +from pip._internal.utils.hashes import STRONG_HASHES +from pip._internal.utils.misc import strtobool + +logger = logging.getLogger(__name__) + + +def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None: + """ + Raise an option parsing error using parser.error(). + + Args: + parser: an OptionParser instance. + option: an Option instance. + msg: the error text. + """ + msg = f"{option} error: {msg}" + msg = textwrap.fill(" ".join(msg.split())) + parser.error(msg) + + +def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup: + """ + Return an OptionGroup object + group -- assumed to be dict with 'name' and 'options' keys + parser -- an optparse Parser + """ + option_group = OptionGroup(parser, group["name"]) + for option in group["options"]: + option_group.add_option(option()) + return option_group + + +def check_dist_restriction(options: Values, check_target: bool = False) -> None: + """Function for determining if custom platform options are allowed. + + :param options: The OptionParser options. + :param check_target: Whether or not to check if --target is being used. + """ + dist_restriction_set = any( + [ + options.python_version, + options.platforms, + options.abis, + options.implementation, + ] + ) + + binary_only = FormatControl(set(), {":all:"}) + sdist_dependencies_allowed = ( + options.format_control != binary_only and not options.ignore_dependencies + ) + + # Installations or downloads using dist restrictions must not combine + # source distributions and dist-specific wheels, as they are not + # guaranteed to be locally compatible. + if dist_restriction_set and sdist_dependencies_allowed: + raise CommandError( + "When restricting platform and interpreter constraints using " + "--python-version, --platform, --abi, or --implementation, " + "either --no-deps must be set, or --only-binary=:all: must be " + "set and --no-binary must not be set (or must be set to " + ":none:)." + ) + + if check_target: + if not options.dry_run and dist_restriction_set and not options.target_dir: + raise CommandError( + "Can not use any platform or abi specific options unless " + "installing via '--target' or using '--dry-run'" + ) + + +def _path_option_check(option: Option, opt: str, value: str) -> str: + return os.path.expanduser(value) + + +def _package_name_option_check(option: Option, opt: str, value: str) -> str: + return canonicalize_name(value) + + +class PipOption(Option): + TYPES = Option.TYPES + ("path", "package_name") + TYPE_CHECKER = Option.TYPE_CHECKER.copy() + TYPE_CHECKER["package_name"] = _package_name_option_check + TYPE_CHECKER["path"] = _path_option_check + + +########### +# options # +########### + +help_: Callable[..., Option] = partial( + Option, + "-h", + "--help", + dest="help", + action="help", + help="Show help.", +) + +debug_mode: Callable[..., Option] = partial( + Option, + "--debug", + dest="debug_mode", + action="store_true", + default=False, + help=( + "Let unhandled exceptions propagate outside the main subroutine, " + "instead of logging them to stderr." + ), +) + +isolated_mode: Callable[..., Option] = partial( + Option, + "--isolated", + dest="isolated_mode", + action="store_true", + default=False, + help=( + "Run pip in an isolated mode, ignoring environment variables and user " + "configuration." + ), +) + +require_virtualenv: Callable[..., Option] = partial( + Option, + "--require-virtualenv", + "--require-venv", + dest="require_venv", + action="store_true", + default=False, + help=( + "Allow pip to only run in a virtual environment; " + "exit with an error otherwise." + ), +) + +override_externally_managed: Callable[..., Option] = partial( + Option, + "--break-system-packages", + dest="override_externally_managed", + action="store_true", + help="Allow pip to modify an EXTERNALLY-MANAGED Python installation", +) + +python: Callable[..., Option] = partial( + Option, + "--python", + dest="python", + help="Run pip with the specified Python interpreter.", +) + +verbose: Callable[..., Option] = partial( + Option, + "-v", + "--verbose", + dest="verbose", + action="count", + default=0, + help="Give more output. Option is additive, and can be used up to 3 times.", +) + +no_color: Callable[..., Option] = partial( + Option, + "--no-color", + dest="no_color", + action="store_true", + default=False, + help="Suppress colored output.", +) + +version: Callable[..., Option] = partial( + Option, + "-V", + "--version", + dest="version", + action="store_true", + help="Show version and exit.", +) + +quiet: Callable[..., Option] = partial( + Option, + "-q", + "--quiet", + dest="quiet", + action="count", + default=0, + help=( + "Give less output. Option is additive, and can be used up to 3" + " times (corresponding to WARNING, ERROR, and CRITICAL logging" + " levels)." + ), +) + +progress_bar: Callable[..., Option] = partial( + Option, + "--progress-bar", + dest="progress_bar", + type="choice", + choices=["on", "off"], + default="on", + help="Specify whether the progress bar should be used [on, off] (default: on)", +) + +log: Callable[..., Option] = partial( + PipOption, + "--log", + "--log-file", + "--local-log", + dest="log", + metavar="path", + type="path", + help="Path to a verbose appending log.", +) + +no_input: Callable[..., Option] = partial( + Option, + # Don't ask for input + "--no-input", + dest="no_input", + action="store_true", + default=False, + help="Disable prompting for input.", +) + +keyring_provider: Callable[..., Option] = partial( + Option, + "--keyring-provider", + dest="keyring_provider", + choices=["auto", "disabled", "import", "subprocess"], + default="auto", + help=( + "Enable the credential lookup via the keyring library if user input is allowed." + " Specify which mechanism to use [disabled, import, subprocess]." + " (default: disabled)" + ), +) + +proxy: Callable[..., Option] = partial( + Option, + "--proxy", + dest="proxy", + type="str", + default="", + help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.", +) + +retries: Callable[..., Option] = partial( + Option, + "--retries", + dest="retries", + type="int", + default=5, + help="Maximum number of retries each connection should attempt " + "(default %default times).", +) + +timeout: Callable[..., Option] = partial( + Option, + "--timeout", + "--default-timeout", + metavar="sec", + dest="timeout", + type="float", + default=15, + help="Set the socket timeout (default %default seconds).", +) + + +def exists_action() -> Option: + return Option( + # Option when path already exist + "--exists-action", + dest="exists_action", + type="choice", + choices=["s", "i", "w", "b", "a"], + default=[], + action="append", + metavar="action", + help="Default action when a path already exists: " + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", + ) + + +cert: Callable[..., Option] = partial( + PipOption, + "--cert", + dest="cert", + type="path", + metavar="path", + help=( + "Path to PEM-encoded CA certificate bundle. " + "If provided, overrides the default. " + "See 'SSL Certificate Verification' in pip documentation " + "for more information." + ), +) + +client_cert: Callable[..., Option] = partial( + PipOption, + "--client-cert", + dest="client_cert", + type="path", + default=None, + metavar="path", + help="Path to SSL client certificate, a single file containing the " + "private key and the certificate in PEM format.", +) + +index_url: Callable[..., Option] = partial( + Option, + "-i", + "--index-url", + "--pypi-url", + dest="index_url", + metavar="URL", + default=PyPI.simple_url, + help="Base URL of the Python Package Index (default %default). " + "This should point to a repository compliant with PEP 503 " + "(the simple repository API) or a local directory laid out " + "in the same format.", +) + + +def extra_index_url() -> Option: + return Option( + "--extra-index-url", + dest="extra_index_urls", + metavar="URL", + action="append", + default=[], + help="Extra URLs of package indexes to use in addition to " + "--index-url. Should follow the same rules as " + "--index-url.", + ) + + +no_index: Callable[..., Option] = partial( + Option, + "--no-index", + dest="no_index", + action="store_true", + default=False, + help="Ignore package index (only looking at --find-links URLs instead).", +) + + +def find_links() -> Option: + return Option( + "-f", + "--find-links", + dest="find_links", + action="append", + default=[], + metavar="url", + help="If a URL or path to an html file, then parse for links to " + "archives such as sdist (.tar.gz) or wheel (.whl) files. " + "If a local path or file:// URL that's a directory, " + "then look for archives in the directory listing. " + "Links to VCS project URLs are not supported.", + ) + + +def trusted_host() -> Option: + return Option( + "--trusted-host", + dest="trusted_hosts", + action="append", + metavar="HOSTNAME", + default=[], + help="Mark this host or host:port pair as trusted, even though it " + "does not have valid or any HTTPS.", + ) + + +def constraints() -> Option: + return Option( + "-c", + "--constraint", + dest="constraints", + action="append", + default=[], + metavar="file", + help="Constrain versions using the given constraints file. " + "This option can be used multiple times.", + ) + + +def requirements() -> Option: + return Option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help="Install from the given requirements file. " + "This option can be used multiple times.", + ) + + +def editable() -> Option: + return Option( + "-e", + "--editable", + dest="editables", + action="append", + default=[], + metavar="path/url", + help=( + "Install a project in editable mode (i.e. setuptools " + '"develop mode") from a local project path or a VCS url.' + ), + ) + + +def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None: + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + +src: Callable[..., Option] = partial( + PipOption, + "--src", + "--source", + "--source-dir", + "--source-directory", + dest="src_dir", + type="path", + metavar="dir", + default=get_src_prefix(), + action="callback", + callback=_handle_src, + help="Directory to check out editable projects into. " + 'The default in a virtualenv is "/src". ' + 'The default for global installs is "/src".', +) + + +def _get_format_control(values: Values, option: Option) -> Any: + """Get a format_control object.""" + return getattr(values, option.dest) + + +def _handle_no_binary( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, + existing.no_binary, + existing.only_binary, + ) + + +def _handle_only_binary( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, + existing.only_binary, + existing.no_binary, + ) + + +def no_binary() -> Option: + format_control = FormatControl(set(), set()) + return Option( + "--no-binary", + dest="format_control", + action="callback", + callback=_handle_no_binary, + type="str", + default=format_control, + help="Do not use binary packages. Can be supplied multiple times, and " + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all binary packages, ":none:" to empty the set (notice ' + "the colons), or one or more package names with commas between " + "them (no colons). Note that some packages are tricky to compile " + "and may fail to install when this option is used on them.", + ) + + +def only_binary() -> Option: + format_control = FormatControl(set(), set()) + return Option( + "--only-binary", + dest="format_control", + action="callback", + callback=_handle_only_binary, + type="str", + default=format_control, + help="Do not use source packages. Can be supplied multiple times, and " + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all source packages, ":none:" to empty the set, or one ' + "or more package names with commas between them. Packages " + "without binary distributions will fail to install when this " + "option is used on them.", + ) + + +platforms: Callable[..., Option] = partial( + Option, + "--platform", + dest="platforms", + metavar="platform", + action="append", + default=None, + help=( + "Only use wheels compatible with . Defaults to the " + "platform of the running system. Use this option multiple times to " + "specify multiple platforms supported by the target interpreter." + ), +) + + +# This was made a separate function for unit-testing purposes. +def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]: + """ + Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. + + :return: A 2-tuple (version_info, error_msg), where `error_msg` is + non-None if and only if there was a parsing error. + """ + if not value: + # The empty string is the same as not providing a value. + return (None, None) + + parts = value.split(".") + if len(parts) > 3: + return ((), "at most three version parts are allowed") + + if len(parts) == 1: + # Then we are in the case of "3" or "37". + value = parts[0] + if len(value) > 1: + parts = [value[0], value[1:]] + + try: + version_info = tuple(int(part) for part in parts) + except ValueError: + return ((), "each version part must be an integer") + + return (version_info, None) + + +def _handle_python_version( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + """ + Handle a provided --python-version value. + """ + version_info, error_msg = _convert_python_version(value) + if error_msg is not None: + msg = f"invalid --python-version value: {value!r}: {error_msg}" + raise_option_error(parser, option=option, msg=msg) + + parser.values.python_version = version_info + + +python_version: Callable[..., Option] = partial( + Option, + "--python-version", + dest="python_version", + metavar="python_version", + action="callback", + callback=_handle_python_version, + type="str", + default=None, + help=dedent( + """\ + The Python interpreter version to use for wheel and "Requires-Python" + compatibility checks. Defaults to a version derived from the running + interpreter. The version can be specified using up to three dot-separated + integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor + version can also be given as a string without dots (e.g. "37" for 3.7.0). + """ + ), +) + + +implementation: Callable[..., Option] = partial( + Option, + "--implementation", + dest="implementation", + metavar="implementation", + default=None, + help=( + "Only use wheels compatible with Python " + "implementation , e.g. 'pp', 'jy', 'cp', " + " or 'ip'. If not specified, then the current " + "interpreter implementation is used. Use 'py' to force " + "implementation-agnostic wheels." + ), +) + + +abis: Callable[..., Option] = partial( + Option, + "--abi", + dest="abis", + metavar="abi", + action="append", + default=None, + help=( + "Only use wheels compatible with Python abi , e.g. 'pypy_41'. " + "If not specified, then the current interpreter abi tag is used. " + "Use this option multiple times to specify multiple abis supported " + "by the target interpreter. Generally you will need to specify " + "--implementation, --platform, and --python-version when using this " + "option." + ), +) + + +def add_target_python_options(cmd_opts: OptionGroup) -> None: + cmd_opts.add_option(platforms()) + cmd_opts.add_option(python_version()) + cmd_opts.add_option(implementation()) + cmd_opts.add_option(abis()) + + +def make_target_python(options: Values) -> TargetPython: + target_python = TargetPython( + platforms=options.platforms, + py_version_info=options.python_version, + abis=options.abis, + implementation=options.implementation, + ) + + return target_python + + +def prefer_binary() -> Option: + return Option( + "--prefer-binary", + dest="prefer_binary", + action="store_true", + default=False, + help=( + "Prefer binary packages over source packages, even if the " + "source packages are newer." + ), + ) + + +cache_dir: Callable[..., Option] = partial( + PipOption, + "--cache-dir", + dest="cache_dir", + default=USER_CACHE_DIR, + metavar="dir", + type="path", + help="Store the cache data in .", +) + + +def _handle_no_cache_dir( + option: Option, opt: str, value: str, parser: OptionParser +) -> None: + """ + Process a value provided for the --no-cache-dir option. + + This is an optparse.Option callback for the --no-cache-dir option. + """ + # The value argument will be None if --no-cache-dir is passed via the + # command-line, since the option doesn't accept arguments. However, + # the value can be non-None if the option is triggered e.g. by an + # environment variable, like PIP_NO_CACHE_DIR=true. + if value is not None: + # Then parse the string value to get argument error-checking. + try: + strtobool(value) + except ValueError as exc: + raise_option_error(parser, option=option, msg=str(exc)) + + # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() + # converted to 0 (like "false" or "no") caused cache_dir to be disabled + # rather than enabled (logic would say the latter). Thus, we disable + # the cache directory not just on values that parse to True, but (for + # backwards compatibility reasons) also on values that parse to False. + # In other words, always set it to False if the option is provided in + # some (valid) form. + parser.values.cache_dir = False + + +no_cache: Callable[..., Option] = partial( + Option, + "--no-cache-dir", + dest="cache_dir", + action="callback", + callback=_handle_no_cache_dir, + help="Disable the cache.", +) + +no_deps: Callable[..., Option] = partial( + Option, + "--no-deps", + "--no-dependencies", + dest="ignore_dependencies", + action="store_true", + default=False, + help="Don't install package dependencies.", +) + +ignore_requires_python: Callable[..., Option] = partial( + Option, + "--ignore-requires-python", + dest="ignore_requires_python", + action="store_true", + help="Ignore the Requires-Python information.", +) + +no_build_isolation: Callable[..., Option] = partial( + Option, + "--no-build-isolation", + dest="build_isolation", + action="store_false", + default=True, + help="Disable isolation when building a modern source distribution. " + "Build dependencies specified by PEP 518 must be already installed " + "if this option is used.", +) + +check_build_deps: Callable[..., Option] = partial( + Option, + "--check-build-dependencies", + dest="check_build_deps", + action="store_true", + default=False, + help="Check the build dependencies when PEP517 is used.", +) + + +def _handle_no_use_pep517( + option: Option, opt: str, value: str, parser: OptionParser +) -> None: + """ + Process a value provided for the --no-use-pep517 option. + + This is an optparse.Option callback for the no_use_pep517 option. + """ + # Since --no-use-pep517 doesn't accept arguments, the value argument + # will be None if --no-use-pep517 is passed via the command-line. + # However, the value can be non-None if the option is triggered e.g. + # by an environment variable, for example "PIP_NO_USE_PEP517=true". + if value is not None: + msg = """A value was passed for --no-use-pep517, + probably using either the PIP_NO_USE_PEP517 environment variable + or the "no-use-pep517" config file option. Use an appropriate value + of the PIP_USE_PEP517 environment variable or the "use-pep517" + config file option instead. + """ + raise_option_error(parser, option=option, msg=msg) + + # If user doesn't wish to use pep517, we check if setuptools and wheel are installed + # and raise error if it is not. + packages = ("setuptools", "wheel") + if not all(importlib.util.find_spec(package) for package in packages): + msg = ( + f"It is not possible to use --no-use-pep517 " + f"without {' and '.join(packages)} installed." + ) + raise_option_error(parser, option=option, msg=msg) + + # Otherwise, --no-use-pep517 was passed via the command-line. + parser.values.use_pep517 = False + + +use_pep517: Any = partial( + Option, + "--use-pep517", + dest="use_pep517", + action="store_true", + default=None, + help="Use PEP 517 for building source distributions " + "(use --no-use-pep517 to force legacy behaviour).", +) + +no_use_pep517: Any = partial( + Option, + "--no-use-pep517", + dest="use_pep517", + action="callback", + callback=_handle_no_use_pep517, + default=None, + help=SUPPRESS_HELP, +) + + +def _handle_config_settings( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + key, sep, val = value.partition("=") + if sep != "=": + parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") + dest = getattr(parser.values, option.dest) + if dest is None: + dest = {} + setattr(parser.values, option.dest, dest) + if key in dest: + if isinstance(dest[key], list): + dest[key].append(val) + else: + dest[key] = [dest[key], val] + else: + dest[key] = val + + +config_settings: Callable[..., Option] = partial( + Option, + "-C", + "--config-settings", + dest="config_settings", + type=str, + action="callback", + callback=_handle_config_settings, + metavar="settings", + help="Configuration settings to be passed to the PEP 517 build backend. " + "Settings take the form KEY=VALUE. Use multiple --config-settings options " + "to pass multiple keys to the backend.", +) + +build_options: Callable[..., Option] = partial( + Option, + "--build-option", + dest="build_options", + metavar="options", + action="append", + help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", +) + +global_options: Callable[..., Option] = partial( + Option, + "--global-option", + dest="global_options", + action="append", + metavar="options", + help="Extra global options to be supplied to the setup.py " + "call before the install or bdist_wheel command.", +) + +no_clean: Callable[..., Option] = partial( + Option, + "--no-clean", + action="store_true", + default=False, + help="Don't clean up build directories.", +) + +pre: Callable[..., Option] = partial( + Option, + "--pre", + action="store_true", + default=False, + help="Include pre-release and development versions. By default, " + "pip only finds stable versions.", +) + +disable_pip_version_check: Callable[..., Option] = partial( + Option, + "--disable-pip-version-check", + dest="disable_pip_version_check", + action="store_true", + default=True, + help="Don't periodically check PyPI to determine whether a new version " + "of pip is available for download. Implied with --no-index.", +) + +root_user_action: Callable[..., Option] = partial( + Option, + "--root-user-action", + dest="root_user_action", + default="warn", + choices=["warn", "ignore"], + help="Action if pip is run as a root user. By default, a warning message is shown.", +) + + +def _handle_merge_hash( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + """Given a value spelled "algo:digest", append the digest to a list + pointed to in a dict by the algo name.""" + if not parser.values.hashes: + parser.values.hashes = {} + try: + algo, digest = value.split(":", 1) + except ValueError: + parser.error( + f"Arguments to {opt_str} must be a hash name " + "followed by a value, like --hash=sha256:" + "abcde..." + ) + if algo not in STRONG_HASHES: + parser.error( + "Allowed hash algorithms for {} are {}.".format( + opt_str, ", ".join(STRONG_HASHES) + ) + ) + parser.values.hashes.setdefault(algo, []).append(digest) + + +hash: Callable[..., Option] = partial( + Option, + "--hash", + # Hash values eventually end up in InstallRequirement.hashes due to + # __dict__ copying in process_line(). + dest="hashes", + action="callback", + callback=_handle_merge_hash, + type="string", + help="Verify that the package's archive matches this " + "hash before installing. Example: --hash=sha256:abcdef...", +) + + +require_hashes: Callable[..., Option] = partial( + Option, + "--require-hashes", + dest="require_hashes", + action="store_true", + default=False, + help="Require a hash to check each requirement against, for " + "repeatable installs. This option is implied when any package in a " + "requirements file has a --hash option.", +) + + +list_path: Callable[..., Option] = partial( + PipOption, + "--path", + dest="path", + type="path", + action="append", + help="Restrict to the specified installation path for listing " + "packages (can be used multiple times).", +) + + +def check_list_path_option(options: Values) -> None: + if options.path and (options.user or options.local): + raise CommandError("Cannot combine '--path' with '--user' or '--local'") + + +list_exclude: Callable[..., Option] = partial( + PipOption, + "--exclude", + dest="excludes", + action="append", + metavar="package", + type="package_name", + help="Exclude specified package from the output", +) + + +no_python_version_warning: Callable[..., Option] = partial( + Option, + "--no-python-version-warning", + dest="no_python_version_warning", + action="store_true", + default=False, + help="Silence deprecation warnings for upcoming unsupported Pythons.", +) + + +# Features that are now always on. A warning is printed if they are used. +ALWAYS_ENABLED_FEATURES = [ + "no-binary-enable-wheel-cache", # always on since 23.1 +] + +use_new_feature: Callable[..., Option] = partial( + Option, + "--use-feature", + dest="features_enabled", + metavar="feature", + action="append", + default=[], + choices=[ + "fast-deps", + "truststore", + ] + + ALWAYS_ENABLED_FEATURES, + help="Enable new functionality, that may be backward incompatible.", +) + +use_deprecated_feature: Callable[..., Option] = partial( + Option, + "--use-deprecated", + dest="deprecated_features_enabled", + metavar="feature", + action="append", + default=[], + choices=[ + "legacy-resolver", + ], + help=("Enable deprecated functionality, that will be removed in the future."), +) + + +########## +# groups # +########## + +general_group: Dict[str, Any] = { + "name": "General Options", + "options": [ + help_, + debug_mode, + isolated_mode, + require_virtualenv, + python, + verbose, + version, + quiet, + log, + no_input, + keyring_provider, + proxy, + retries, + timeout, + exists_action, + trusted_host, + cert, + client_cert, + cache_dir, + no_cache, + disable_pip_version_check, + no_color, + no_python_version_warning, + use_new_feature, + use_deprecated_feature, + ], +} + +index_group: Dict[str, Any] = { + "name": "Package Index Options", + "options": [ + index_url, + extra_index_url, + no_index, + find_links, + ], +} diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/command_context.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/command_context.py new file mode 100644 index 0000000..139995a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/command_context.py @@ -0,0 +1,27 @@ +from contextlib import ExitStack, contextmanager +from typing import ContextManager, Generator, TypeVar + +_T = TypeVar("_T", covariant=True) + + +class CommandContextMixIn: + def __init__(self) -> None: + super().__init__() + self._in_main_context = False + self._main_context = ExitStack() + + @contextmanager + def main_context(self) -> Generator[None, None, None]: + assert not self._in_main_context + + self._in_main_context = True + try: + with self._main_context: + yield + finally: + self._in_main_context = False + + def enter_context(self, context_provider: ContextManager[_T]) -> _T: + assert self._in_main_context + + return self._main_context.enter_context(context_provider) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/main.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/main.py new file mode 100644 index 0000000..7e061f5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/main.py @@ -0,0 +1,79 @@ +"""Primary application entrypoint. +""" +import locale +import logging +import os +import sys +import warnings +from typing import List, Optional + +from pip._internal.cli.autocompletion import autocomplete +from pip._internal.cli.main_parser import parse_command +from pip._internal.commands import create_command +from pip._internal.exceptions import PipError +from pip._internal.utils import deprecation + +logger = logging.getLogger(__name__) + + +# Do not import and use main() directly! Using it directly is actively +# discouraged by pip's maintainers. The name, location and behavior of +# this function is subject to change, so calling it directly is not +# portable across different pip versions. + +# In addition, running pip in-process is unsupported and unsafe. This is +# elaborated in detail at +# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program. +# That document also provides suggestions that should work for nearly +# all users that are considering importing and using main() directly. + +# However, we know that certain users will still want to invoke pip +# in-process. If you understand and accept the implications of using pip +# in an unsupported manner, the best approach is to use runpy to avoid +# depending on the exact location of this entry point. + +# The following example shows how to use runpy to invoke pip in that +# case: +# +# sys.argv = ["pip", your, args, here] +# runpy.run_module("pip", run_name="__main__") +# +# Note that this will exit the process after running, unlike a direct +# call to main. As it is not safe to do any processing after calling +# main, this should not be an issue in practice. + + +def main(args: Optional[List[str]] = None) -> int: + if args is None: + args = sys.argv[1:] + + # Suppress the pkg_resources deprecation warning + # Note - we use a module of .*pkg_resources to cover + # the normal case (pip._vendor.pkg_resources) and the + # devendored case (a bare pkg_resources) + warnings.filterwarnings( + action="ignore", category=DeprecationWarning, module=".*pkg_resources" + ) + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parse_command(args) + except PipError as exc: + sys.stderr.write(f"ERROR: {exc}") + sys.stderr.write(os.linesep) + sys.exit(1) + + # Needed for locale.getpreferredencoding(False) to work + # in pip._internal.utils.encoding.auto_decode + try: + locale.setlocale(locale.LC_ALL, "") + except locale.Error as e: + # setlocale can apparently crash if locale are uninitialized + logger.debug("Ignoring error %s when setting locale", e) + command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) + + return command.main(cmd_args) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/main_parser.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/main_parser.py new file mode 100644 index 0000000..5ade356 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/main_parser.py @@ -0,0 +1,134 @@ +"""A single place for constructing and exposing the main parser +""" + +import os +import subprocess +import sys +from typing import List, Optional, Tuple + +from pip._internal.build_env import get_runnable_pip +from pip._internal.cli import cmdoptions +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip._internal.commands import commands_dict, get_similar_commands +from pip._internal.exceptions import CommandError +from pip._internal.utils.misc import get_pip_version, get_prog + +__all__ = ["create_main_parser", "parse_command"] + + +def create_main_parser() -> ConfigOptionParser: + """Creates and returns the main parser for pip's CLI""" + + parser = ConfigOptionParser( + usage="\n%prog [options]", + add_help_option=False, + formatter=UpdatingDefaultsHelpFormatter(), + name="global", + prog=get_prog(), + ) + parser.disable_interspersed_args() + + parser.version = get_pip_version() + + # add the general options + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) + parser.add_option_group(gen_opts) + + # so the help formatter knows + parser.main = True # type: ignore + + # create command listing for description + description = [""] + [ + f"{name:27} {command_info.summary}" + for name, command_info in commands_dict.items() + ] + parser.description = "\n".join(description) + + return parser + + +def identify_python_interpreter(python: str) -> Optional[str]: + # If the named file exists, use it. + # If it's a directory, assume it's a virtual environment and + # look for the environment's Python executable. + if os.path.exists(python): + if os.path.isdir(python): + # bin/python for Unix, Scripts/python.exe for Windows + # Try both in case of odd cases like cygwin. + for exe in ("bin/python", "Scripts/python.exe"): + py = os.path.join(python, exe) + if os.path.exists(py): + return py + else: + return python + + # Could not find the interpreter specified + return None + + +def parse_command(args: List[str]) -> Tuple[str, List[str]]: + parser = create_main_parser() + + # Note: parser calls disable_interspersed_args(), so the result of this + # call is to split the initial args into the general options before the + # subcommand and everything else. + # For example: + # args: ['--timeout=5', 'install', '--user', 'INITools'] + # general_options: ['--timeout==5'] + # args_else: ['install', '--user', 'INITools'] + general_options, args_else = parser.parse_args(args) + + # --python + if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: + # Re-invoke pip using the specified Python interpreter + interpreter = identify_python_interpreter(general_options.python) + if interpreter is None: + raise CommandError( + f"Could not locate Python interpreter {general_options.python}" + ) + + pip_cmd = [ + interpreter, + get_runnable_pip(), + ] + pip_cmd.extend(args) + + # Set a flag so the child doesn't re-invoke itself, causing + # an infinite loop. + os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1" + returncode = 0 + try: + proc = subprocess.run(pip_cmd) + returncode = proc.returncode + except (subprocess.SubprocessError, OSError) as exc: + raise CommandError(f"Failed to run pip under {interpreter}: {exc}") + sys.exit(returncode) + + # --version + if general_options.version: + sys.stdout.write(parser.version) + sys.stdout.write(os.linesep) + sys.exit() + + # pip || pip help -> print_help() + if not args_else or (args_else[0] == "help" and len(args_else) == 1): + parser.print_help() + sys.exit() + + # the subcommand name + cmd_name = args_else[0] + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = [f'unknown command "{cmd_name}"'] + if guess: + msg.append(f'maybe you meant "{guess}"') + + raise CommandError(" - ".join(msg)) + + # all the args without the subcommand + cmd_args = args[:] + cmd_args.remove(cmd_name) + + return cmd_name, cmd_args diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/parser.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/parser.py new file mode 100644 index 0000000..ae554b2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/parser.py @@ -0,0 +1,294 @@ +"""Base option parser setup""" + +import logging +import optparse +import shutil +import sys +import textwrap +from contextlib import suppress +from typing import Any, Dict, Generator, List, Tuple + +from pip._internal.cli.status_codes import UNKNOWN_ERROR +from pip._internal.configuration import Configuration, ConfigurationError +from pip._internal.utils.misc import redact_auth_from_url, strtobool + +logger = logging.getLogger(__name__) + + +class PrettyHelpFormatter(optparse.IndentedHelpFormatter): + """A prettier/less verbose help formatter for optparse.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + # help position must be aligned with __init__.parseopts.description + kwargs["max_help_position"] = 30 + kwargs["indent_increment"] = 1 + kwargs["width"] = shutil.get_terminal_size()[0] - 2 + super().__init__(*args, **kwargs) + + def format_option_strings(self, option: optparse.Option) -> str: + return self._format_option_strings(option) + + def _format_option_strings( + self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", " + ) -> str: + """ + Return a comma-separated list of option strings and metavars. + + :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') + :param mvarfmt: metavar format string + :param optsep: separator + """ + opts = [] + + if option._short_opts: + opts.append(option._short_opts[0]) + if option._long_opts: + opts.append(option._long_opts[0]) + if len(opts) > 1: + opts.insert(1, optsep) + + if option.takes_value(): + assert option.dest is not None + metavar = option.metavar or option.dest.lower() + opts.append(mvarfmt.format(metavar.lower())) + + return "".join(opts) + + def format_heading(self, heading: str) -> str: + if heading == "Options": + return "" + return heading + ":\n" + + def format_usage(self, usage: str) -> str: + """ + Ensure there is only one newline between usage and the first heading + if there is no description. + """ + msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " ")) + return msg + + def format_description(self, description: str) -> str: + # leave full control over description to us + if description: + if hasattr(self.parser, "main"): + label = "Commands" + else: + label = "Description" + # some doc strings have initial newlines, some don't + description = description.lstrip("\n") + # some doc strings have final newlines and spaces, some don't + description = description.rstrip() + # dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") + description = f"{label}:\n{description}\n" + return description + else: + return "" + + def format_epilog(self, epilog: str) -> str: + # leave full control over epilog to us + if epilog: + return epilog + else: + return "" + + def indent_lines(self, text: str, indent: str) -> str: + new_lines = [indent + line for line in text.split("\n")] + return "\n".join(new_lines) + + +class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): + """Custom help formatter for use in ConfigOptionParser. + + This is updates the defaults before expanding them, allowing + them to show up correctly in the help listing. + + Also redact auth from url type options + """ + + def expand_default(self, option: optparse.Option) -> str: + default_values = None + if self.parser is not None: + assert isinstance(self.parser, ConfigOptionParser) + self.parser._update_defaults(self.parser.defaults) + assert option.dest is not None + default_values = self.parser.defaults.get(option.dest) + help_text = super().expand_default(option) + + if default_values and option.metavar == "URL": + if isinstance(default_values, str): + default_values = [default_values] + + # If its not a list, we should abort and just return the help text + if not isinstance(default_values, list): + default_values = [] + + for val in default_values: + help_text = help_text.replace(val, redact_auth_from_url(val)) + + return help_text + + +class CustomOptionParser(optparse.OptionParser): + def insert_option_group( + self, idx: int, *args: Any, **kwargs: Any + ) -> optparse.OptionGroup: + """Insert an OptionGroup at a given position.""" + group = self.add_option_group(*args, **kwargs) + + self.option_groups.pop() + self.option_groups.insert(idx, group) + + return group + + @property + def option_list_all(self) -> List[optparse.Option]: + """Get a list of all options, including those in option groups.""" + res = self.option_list[:] + for i in self.option_groups: + res.extend(i.option_list) + + return res + + +class ConfigOptionParser(CustomOptionParser): + """Custom option parser which updates its defaults by checking the + configuration files and environmental variables""" + + def __init__( + self, + *args: Any, + name: str, + isolated: bool = False, + **kwargs: Any, + ) -> None: + self.name = name + self.config = Configuration(isolated) + + assert self.name + super().__init__(*args, **kwargs) + + def check_default(self, option: optparse.Option, key: str, val: Any) -> Any: + try: + return option.check_value(key, val) + except optparse.OptionValueError as exc: + print(f"An error occurred during configuration: {exc}") + sys.exit(3) + + def _get_ordered_configuration_items( + self, + ) -> Generator[Tuple[str, Any], None, None]: + # Configuration gives keys in an unordered manner. Order them. + override_order = ["global", self.name, ":env:"] + + # Pool the options into different groups + section_items: Dict[str, List[Tuple[str, Any]]] = { + name: [] for name in override_order + } + for section_key, val in self.config.items(): + # ignore empty values + if not val: + logger.debug( + "Ignoring configuration key '%s' as it's value is empty.", + section_key, + ) + continue + + section, key = section_key.split(".", 1) + if section in override_order: + section_items[section].append((key, val)) + + # Yield each group in their override order + for section in override_order: + for key, val in section_items[section]: + yield key, val + + def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]: + """Updates the given defaults with values from the config files and + the environ. Does a little special handling for certain types of + options (lists).""" + + # Accumulate complex default state. + self.values = optparse.Values(self.defaults) + late_eval = set() + # Then set the options with those values + for key, val in self._get_ordered_configuration_items(): + # '--' because configuration supports only long names + option = self.get_option("--" + key) + + # Ignore options not present in this parser. E.g. non-globals put + # in [global] by users that want them to apply to all applicable + # commands. + if option is None: + continue + + assert option.dest is not None + + if option.action in ("store_true", "store_false"): + try: + val = strtobool(val) + except ValueError: + self.error( + f"{val} is not a valid value for {key} option, " + "please specify a boolean value like yes/no, " + "true/false or 1/0 instead." + ) + elif option.action == "count": + with suppress(ValueError): + val = strtobool(val) + with suppress(ValueError): + val = int(val) + if not isinstance(val, int) or val < 0: + self.error( + f"{val} is not a valid value for {key} option, " + "please instead specify either a non-negative integer " + "or a boolean value like yes/no or false/true " + "which is equivalent to 1/0." + ) + elif option.action == "append": + val = val.split() + val = [self.check_default(option, key, v) for v in val] + elif option.action == "callback": + assert option.callback is not None + late_eval.add(option.dest) + opt_str = option.get_opt_string() + val = option.convert_value(opt_str, val) + # From take_action + args = option.callback_args or () + kwargs = option.callback_kwargs or {} + option.callback(option, opt_str, val, self, *args, **kwargs) + else: + val = self.check_default(option, key, val) + + defaults[option.dest] = val + + for key in late_eval: + defaults[key] = getattr(self.values, key) + self.values = None + return defaults + + def get_default_values(self) -> optparse.Values: + """Overriding to make updating the defaults after instantiation of + the option parser possible, _update_defaults() does the dirty work.""" + if not self.process_default_values: + # Old, pre-Optik 1.5 behaviour. + return optparse.Values(self.defaults) + + # Load the configuration, or error out in case of an error + try: + self.config.load() + except ConfigurationError as err: + self.exit(UNKNOWN_ERROR, str(err)) + + defaults = self._update_defaults(self.defaults.copy()) # ours + for option in self._get_all_options(): + assert option.dest is not None + default = defaults.get(option.dest) + if isinstance(default, str): + opt_str = option.get_opt_string() + defaults[option.dest] = option.check_value(opt_str, default) + return optparse.Values(defaults) + + def error(self, msg: str) -> None: + self.print_usage(sys.stderr) + self.exit(UNKNOWN_ERROR, f"{msg}\n") diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/progress_bars.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/progress_bars.py new file mode 100644 index 0000000..0ad1403 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/progress_bars.py @@ -0,0 +1,68 @@ +import functools +from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple + +from pip._vendor.rich.progress import ( + BarColumn, + DownloadColumn, + FileSizeColumn, + Progress, + ProgressColumn, + SpinnerColumn, + TextColumn, + TimeElapsedColumn, + TimeRemainingColumn, + TransferSpeedColumn, +) + +from pip._internal.utils.logging import get_indentation + +DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]] + + +def _rich_progress_bar( + iterable: Iterable[bytes], + *, + bar_type: str, + size: int, +) -> Generator[bytes, None, None]: + assert bar_type == "on", "This should only be used in the default mode." + + if not size: + total = float("inf") + columns: Tuple[ProgressColumn, ...] = ( + TextColumn("[progress.description]{task.description}"), + SpinnerColumn("line", speed=1.5), + FileSizeColumn(), + TransferSpeedColumn(), + TimeElapsedColumn(), + ) + else: + total = size + columns = ( + TextColumn("[progress.description]{task.description}"), + BarColumn(), + DownloadColumn(), + TransferSpeedColumn(), + TextColumn("eta"), + TimeRemainingColumn(), + ) + + progress = Progress(*columns, refresh_per_second=30) + task_id = progress.add_task(" " * (get_indentation() + 2), total=total) + with progress: + for chunk in iterable: + yield chunk + progress.update(task_id, advance=len(chunk)) + + +def get_download_progress_renderer( + *, bar_type: str, size: Optional[int] = None +) -> DownloadProgressRenderer: + """Get an object that can be used to render the download progress. + + Returns a callable, that takes an iterable to "wrap". + """ + if bar_type == "on": + return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size) + else: + return iter # no-op, when passed an iterator diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/req_command.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/req_command.py new file mode 100644 index 0000000..6f2f79c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/req_command.py @@ -0,0 +1,505 @@ +"""Contains the Command base classes that depend on PipSession. + +The classes in this module are in a separate module so the commands not +needing download / PackageFinder capability don't unnecessarily import the +PackageFinder machinery and all its vendored dependencies, etc. +""" + +import logging +import os +import sys +from functools import partial +from optparse import Values +from typing import TYPE_CHECKING, Any, List, Optional, Tuple + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.exceptions import CommandError, PreviousBuildDirError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.network.session import PipSession +from pip._internal.operations.build.build_tracker import BuildTracker +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, + install_req_from_parsed_requirement, + install_req_from_req_string, +) +from pip._internal.req.req_file import parse_requirements +from pip._internal.req.req_install import InstallRequirement +from pip._internal.resolution.base import BaseResolver +from pip._internal.self_outdated_check import pip_self_version_check +from pip._internal.utils.temp_dir import ( + TempDirectory, + TempDirectoryTypeRegistry, + tempdir_kinds, +) +from pip._internal.utils.virtualenv import running_under_virtualenv + +if TYPE_CHECKING: + from ssl import SSLContext + +logger = logging.getLogger(__name__) + + +def _create_truststore_ssl_context() -> Optional["SSLContext"]: + if sys.version_info < (3, 10): + raise CommandError("The truststore feature is only available for Python 3.10+") + + try: + import ssl + except ImportError: + logger.warning("Disabling truststore since ssl support is missing") + return None + + try: + from pip._vendor import truststore + except ImportError as e: + raise CommandError(f"The truststore feature is unavailable: {e}") + + return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + + +class SessionCommandMixin(CommandContextMixIn): + + """ + A class mixin for command classes needing _build_session(). + """ + + def __init__(self) -> None: + super().__init__() + self._session: Optional[PipSession] = None + + @classmethod + def _get_index_urls(cls, options: Values) -> Optional[List[str]]: + """Return a list of index urls from user-provided options.""" + index_urls = [] + if not getattr(options, "no_index", False): + url = getattr(options, "index_url", None) + if url: + index_urls.append(url) + urls = getattr(options, "extra_index_urls", None) + if urls: + index_urls.extend(urls) + # Return None rather than an empty list + return index_urls or None + + def get_default_session(self, options: Values) -> PipSession: + """Get a default-managed session.""" + if self._session is None: + self._session = self.enter_context(self._build_session(options)) + # there's no type annotation on requests.Session, so it's + # automatically ContextManager[Any] and self._session becomes Any, + # then https://github.com/python/mypy/issues/7696 kicks in + assert self._session is not None + return self._session + + def _build_session( + self, + options: Values, + retries: Optional[int] = None, + timeout: Optional[int] = None, + fallback_to_certifi: bool = False, + ) -> PipSession: + cache_dir = options.cache_dir + assert not cache_dir or os.path.isabs(cache_dir) + + if "truststore" in options.features_enabled: + try: + ssl_context = _create_truststore_ssl_context() + except Exception: + if not fallback_to_certifi: + raise + ssl_context = None + else: + ssl_context = None + + session = PipSession( + cache=os.path.join(cache_dir, "http-v2") if cache_dir else None, + retries=retries if retries is not None else options.retries, + trusted_hosts=options.trusted_hosts, + index_urls=self._get_index_urls(options), + ssl_context=ssl_context, + ) + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle SSL client certificate + if options.client_cert: + session.cert = options.client_cert + + # Handle timeouts + if options.timeout or timeout: + session.timeout = timeout if timeout is not None else options.timeout + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + session.auth.keyring_provider = options.keyring_provider + + return session + + +class IndexGroupCommand(Command, SessionCommandMixin): + + """ + Abstract base class for commands with the index_group options. + + This also corresponds to the commands that permit the pip version check. + """ + + def handle_pip_version_check(self, options: Values) -> None: + """ + Do the pip version check if not disabled. + + This overrides the default behavior of not doing the check. + """ + # Make sure the index_group options are present. + assert hasattr(options, "no_index") + + if options.disable_pip_version_check or options.no_index: + return + + # Otherwise, check if we're using the latest version of pip available. + session = self._build_session( + options, + retries=0, + timeout=min(5, options.timeout), + # This is set to ensure the function does not fail when truststore is + # specified in use-feature but cannot be loaded. This usually raises a + # CommandError and shows a nice user-facing error, but this function is not + # called in that try-except block. + fallback_to_certifi=True, + ) + with session: + pip_self_version_check(session, options) + + +KEEPABLE_TEMPDIR_TYPES = [ + tempdir_kinds.BUILD_ENV, + tempdir_kinds.EPHEM_WHEEL_CACHE, + tempdir_kinds.REQ_BUILD, +] + + +def warn_if_run_as_root() -> None: + """Output a warning for sudo users on Unix. + + In a virtual environment, sudo pip still writes to virtualenv. + On Windows, users may run pip as Administrator without issues. + This warning only applies to Unix root users outside of virtualenv. + """ + if running_under_virtualenv(): + return + if not hasattr(os, "getuid"): + return + # On Windows, there are no "system managed" Python packages. Installing as + # Administrator via pip is the correct way of updating system environments. + # + # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform + # checks: https://mypy.readthedocs.io/en/stable/common_issues.html + if sys.platform == "win32" or sys.platform == "cygwin": + return + + if os.getuid() != 0: + return + + logger.warning( + "Running pip as the 'root' user can result in broken permissions and " + "conflicting behaviour with the system package manager. " + "It is recommended to use a virtual environment instead: " + "https://pip.pypa.io/warnings/venv" + ) + + +def with_cleanup(func: Any) -> Any: + """Decorator for common logic related to managing temporary + directories. + """ + + def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None: + for t in KEEPABLE_TEMPDIR_TYPES: + registry.set_delete(t, False) + + def wrapper( + self: RequirementCommand, options: Values, args: List[Any] + ) -> Optional[int]: + assert self.tempdir_registry is not None + if options.no_clean: + configure_tempdir_registry(self.tempdir_registry) + + try: + return func(self, options, args) + except PreviousBuildDirError: + # This kind of conflict can occur when the user passes an explicit + # build directory with a pre-existing folder. In that case we do + # not want to accidentally remove it. + configure_tempdir_registry(self.tempdir_registry) + raise + + return wrapper + + +class RequirementCommand(IndexGroupCommand): + def __init__(self, *args: Any, **kw: Any) -> None: + super().__init__(*args, **kw) + + self.cmd_opts.add_option(cmdoptions.no_clean()) + + @staticmethod + def determine_resolver_variant(options: Values) -> str: + """Determines which resolver should be used, based on the given options.""" + if "legacy-resolver" in options.deprecated_features_enabled: + return "legacy" + + return "resolvelib" + + @classmethod + def make_requirement_preparer( + cls, + temp_build_dir: TempDirectory, + options: Values, + build_tracker: BuildTracker, + session: PipSession, + finder: PackageFinder, + use_user_site: bool, + download_dir: Optional[str] = None, + verbosity: int = 0, + ) -> RequirementPreparer: + """ + Create a RequirementPreparer instance for the given parameters. + """ + temp_build_dir_path = temp_build_dir.path + assert temp_build_dir_path is not None + legacy_resolver = False + + resolver_variant = cls.determine_resolver_variant(options) + if resolver_variant == "resolvelib": + lazy_wheel = "fast-deps" in options.features_enabled + if lazy_wheel: + logger.warning( + "pip is using lazily downloaded wheels using HTTP " + "range requests to obtain dependency information. " + "This experimental feature is enabled through " + "--use-feature=fast-deps and it is not ready for " + "production." + ) + else: + legacy_resolver = True + lazy_wheel = False + if "fast-deps" in options.features_enabled: + logger.warning( + "fast-deps has no effect when used with the legacy resolver." + ) + + return RequirementPreparer( + build_dir=temp_build_dir_path, + src_dir=options.src_dir, + download_dir=download_dir, + build_isolation=options.build_isolation, + check_build_deps=options.check_build_deps, + build_tracker=build_tracker, + session=session, + progress_bar=options.progress_bar, + finder=finder, + require_hashes=options.require_hashes, + use_user_site=use_user_site, + lazy_wheel=lazy_wheel, + verbosity=verbosity, + legacy_resolver=legacy_resolver, + ) + + @classmethod + def make_resolver( + cls, + preparer: RequirementPreparer, + finder: PackageFinder, + options: Values, + wheel_cache: Optional[WheelCache] = None, + use_user_site: bool = False, + ignore_installed: bool = True, + ignore_requires_python: bool = False, + force_reinstall: bool = False, + upgrade_strategy: str = "to-satisfy-only", + use_pep517: Optional[bool] = None, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> BaseResolver: + """ + Create a Resolver instance for the given parameters. + """ + make_install_req = partial( + install_req_from_req_string, + isolated=options.isolated_mode, + use_pep517=use_pep517, + ) + resolver_variant = cls.determine_resolver_variant(options) + # The long import name and duplicated invocation is needed to convince + # Mypy into correctly typechecking. Otherwise it would complain the + # "Resolver" class being redefined. + if resolver_variant == "resolvelib": + import pip._internal.resolution.resolvelib.resolver + + return pip._internal.resolution.resolvelib.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + import pip._internal.resolution.legacy.resolver + + return pip._internal.resolution.legacy.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + + def get_requirements( + self, + args: List[str], + options: Values, + finder: PackageFinder, + session: PipSession, + ) -> List[InstallRequirement]: + """ + Parse command-line arguments into the corresponding requirements. + """ + requirements: List[InstallRequirement] = [] + for filename in options.constraints: + for parsed_req in parse_requirements( + filename, + constraint=True, + finder=finder, + options=options, + session=session, + ): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + user_supplied=False, + ) + requirements.append(req_to_add) + + for req in args: + req_to_add = install_req_from_line( + req, + comes_from=None, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + config_settings=getattr(options, "config_settings", None), + ) + requirements.append(req_to_add) + + for req in options.editables: + req_to_add = install_req_from_editable( + req, + user_supplied=True, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + config_settings=getattr(options, "config_settings", None), + ) + requirements.append(req_to_add) + + # NOTE: options.require_hashes may be set if --require-hashes is True + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, finder=finder, options=options, session=session + ): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + config_settings=parsed_req.options.get("config_settings") + if parsed_req.options + else None, + ) + requirements.append(req_to_add) + + # If any requirement has hash options, enable hash checking. + if any(req.has_hash_options for req in requirements): + options.require_hashes = True + + if not (args or options.editables or options.requirements): + opts = {"name": self.name} + if options.find_links: + raise CommandError( + "You must give at least one requirement to {name} " + '(maybe you meant "pip {name} {links}"?)'.format( + **dict(opts, links=" ".join(options.find_links)) + ) + ) + else: + raise CommandError( + "You must give at least one requirement to {name} " + '(see "pip help {name}")'.format(**opts) + ) + + return requirements + + @staticmethod + def trace_basic_info(finder: PackageFinder) -> None: + """ + Trace basic information about the provided objects. + """ + # Display where finder is looking for packages + search_scope = finder.search_scope + locations = search_scope.get_formatted_locations() + if locations: + logger.info(locations) + + def _build_package_finder( + self, + options: Values, + session: PipSession, + target_python: Optional[TargetPython] = None, + ignore_requires_python: Optional[bool] = None, + ) -> PackageFinder: + """ + Create a package finder appropriate to this requirement command. + + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + link_collector = LinkCollector.create(session, options=options) + selection_prefs = SelectionPreferences( + allow_yanked=True, + format_control=options.format_control, + allow_all_prereleases=options.pre, + prefer_binary=options.prefer_binary, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/spinners.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/spinners.py new file mode 100644 index 0000000..cf2b976 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/spinners.py @@ -0,0 +1,159 @@ +import contextlib +import itertools +import logging +import sys +import time +from typing import IO, Generator, Optional + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation + +logger = logging.getLogger(__name__) + + +class SpinnerInterface: + def spin(self) -> None: + raise NotImplementedError() + + def finish(self, final_status: str) -> None: + raise NotImplementedError() + + +class InteractiveSpinner(SpinnerInterface): + def __init__( + self, + message: str, + file: Optional[IO[str]] = None, + spin_chars: str = "-\\|/", + # Empirically, 8 updates/second looks nice + min_update_interval_seconds: float = 0.125, + ): + self._message = message + if file is None: + file = sys.stdout + self._file = file + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._finished = False + + self._spin_cycle = itertools.cycle(spin_chars) + + self._file.write(" " * get_indentation() + self._message + " ... ") + self._width = 0 + + def _write(self, status: str) -> None: + assert not self._finished + # Erase what we wrote before by backspacing to the beginning, writing + # spaces to overwrite the old text, and then backspacing again + backup = "\b" * self._width + self._file.write(backup + " " * self._width + backup) + # Now we have a blank slate to add our status + self._file.write(status) + self._width = len(status) + self._file.flush() + self._rate_limiter.reset() + + def spin(self) -> None: + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._write(next(self._spin_cycle)) + + def finish(self, final_status: str) -> None: + if self._finished: + return + self._write(final_status) + self._file.write("\n") + self._file.flush() + self._finished = True + + +# Used for dumb terminals, non-interactive installs (no tty), etc. +# We still print updates occasionally (once every 60 seconds by default) to +# act as a keep-alive for systems like Travis-CI that take lack-of-output as +# an indication that a task has frozen. +class NonInteractiveSpinner(SpinnerInterface): + def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None: + self._message = message + self._finished = False + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._update("started") + + def _update(self, status: str) -> None: + assert not self._finished + self._rate_limiter.reset() + logger.info("%s: %s", self._message, status) + + def spin(self) -> None: + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._update("still running...") + + def finish(self, final_status: str) -> None: + if self._finished: + return + self._update(f"finished with status '{final_status}'") + self._finished = True + + +class RateLimiter: + def __init__(self, min_update_interval_seconds: float) -> None: + self._min_update_interval_seconds = min_update_interval_seconds + self._last_update: float = 0 + + def ready(self) -> bool: + now = time.time() + delta = now - self._last_update + return delta >= self._min_update_interval_seconds + + def reset(self) -> None: + self._last_update = time.time() + + +@contextlib.contextmanager +def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]: + # Interactive spinner goes directly to sys.stdout rather than being routed + # through the logging system, but it acts like it has level INFO, + # i.e. it's only displayed if we're at level INFO or better. + # Non-interactive spinner goes through the logging system, so it is always + # in sync with logging configuration. + if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: + spinner: SpinnerInterface = InteractiveSpinner(message) + else: + spinner = NonInteractiveSpinner(message) + try: + with hidden_cursor(sys.stdout): + yield spinner + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") + + +HIDE_CURSOR = "\x1b[?25l" +SHOW_CURSOR = "\x1b[?25h" + + +@contextlib.contextmanager +def hidden_cursor(file: IO[str]) -> Generator[None, None, None]: + # The Windows terminal does not support the hide/show cursor ANSI codes, + # even via colorama. So don't even try. + if WINDOWS: + yield + # We don't want to clutter the output with control characters if we're + # writing to a file, or if the user is running with --quiet. + # See https://github.com/pypa/pip/issues/3418 + elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: + yield + else: + file.write(HIDE_CURSOR) + try: + yield + finally: + file.write(SHOW_CURSOR) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/cli/status_codes.py b/myenv/lib/python3.12/site-packages/pip/_internal/cli/status_codes.py new file mode 100644 index 0000000..5e29502 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/cli/status_codes.py @@ -0,0 +1,6 @@ +SUCCESS = 0 +ERROR = 1 +UNKNOWN_ERROR = 2 +VIRTUALENV_NOT_FOUND = 3 +PREVIOUS_BUILD_DIR_ERROR = 4 +NO_MATCHES_FOUND = 23 diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__init__.py new file mode 100644 index 0000000..858a410 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__init__.py @@ -0,0 +1,132 @@ +""" +Package containing all pip commands +""" + +import importlib +from collections import namedtuple +from typing import Any, Dict, Optional + +from pip._internal.cli.base_command import Command + +CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary") + +# This dictionary does a bunch of heavy lifting for help output: +# - Enables avoiding additional (costly) imports for presenting `--help`. +# - The ordering matters for help display. +# +# Even though the module path starts with the same "pip._internal.commands" +# prefix, the full path makes testing easier (specifically when modifying +# `commands_dict` in test setup / teardown). +commands_dict: Dict[str, CommandInfo] = { + "install": CommandInfo( + "pip._internal.commands.install", + "InstallCommand", + "Install packages.", + ), + "download": CommandInfo( + "pip._internal.commands.download", + "DownloadCommand", + "Download packages.", + ), + "uninstall": CommandInfo( + "pip._internal.commands.uninstall", + "UninstallCommand", + "Uninstall packages.", + ), + "freeze": CommandInfo( + "pip._internal.commands.freeze", + "FreezeCommand", + "Output installed packages in requirements format.", + ), + "inspect": CommandInfo( + "pip._internal.commands.inspect", + "InspectCommand", + "Inspect the python environment.", + ), + "list": CommandInfo( + "pip._internal.commands.list", + "ListCommand", + "List installed packages.", + ), + "show": CommandInfo( + "pip._internal.commands.show", + "ShowCommand", + "Show information about installed packages.", + ), + "check": CommandInfo( + "pip._internal.commands.check", + "CheckCommand", + "Verify installed packages have compatible dependencies.", + ), + "config": CommandInfo( + "pip._internal.commands.configuration", + "ConfigurationCommand", + "Manage local and global configuration.", + ), + "search": CommandInfo( + "pip._internal.commands.search", + "SearchCommand", + "Search PyPI for packages.", + ), + "cache": CommandInfo( + "pip._internal.commands.cache", + "CacheCommand", + "Inspect and manage pip's wheel cache.", + ), + "index": CommandInfo( + "pip._internal.commands.index", + "IndexCommand", + "Inspect information available from package indexes.", + ), + "wheel": CommandInfo( + "pip._internal.commands.wheel", + "WheelCommand", + "Build wheels from your requirements.", + ), + "hash": CommandInfo( + "pip._internal.commands.hash", + "HashCommand", + "Compute hashes of package archives.", + ), + "completion": CommandInfo( + "pip._internal.commands.completion", + "CompletionCommand", + "A helper command used for command completion.", + ), + "debug": CommandInfo( + "pip._internal.commands.debug", + "DebugCommand", + "Show information useful for debugging.", + ), + "help": CommandInfo( + "pip._internal.commands.help", + "HelpCommand", + "Show help for commands.", + ), +} + + +def create_command(name: str, **kwargs: Any) -> Command: + """ + Create an instance of the Command class with the given name. + """ + module_path, class_name, summary = commands_dict[name] + module = importlib.import_module(module_path) + command_class = getattr(module, class_name) + command = command_class(name=name, summary=summary, **kwargs) + + return command + + +def get_similar_commands(name: str) -> Optional[str]: + """Command name auto-correct.""" + from difflib import get_close_matches + + name = name.lower() + + close_commands = get_close_matches(name, commands_dict.keys()) + + if close_commands: + return close_commands[0] + else: + return None diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..53ce042 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/cache.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/cache.cpython-312.pyc new file mode 100644 index 0000000..01bb56d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/cache.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/check.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/check.cpython-312.pyc new file mode 100644 index 0000000..8eb56de Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/check.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/completion.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/completion.cpython-312.pyc new file mode 100644 index 0000000..065cbf5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/completion.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-312.pyc new file mode 100644 index 0000000..f47e82b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/debug.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/debug.cpython-312.pyc new file mode 100644 index 0000000..9898490 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/debug.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/download.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/download.cpython-312.pyc new file mode 100644 index 0000000..67d8a9a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/download.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-312.pyc new file mode 100644 index 0000000..1bf3e2c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/hash.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/hash.cpython-312.pyc new file mode 100644 index 0000000..78dda59 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/hash.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/help.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/help.cpython-312.pyc new file mode 100644 index 0000000..b26f00c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/help.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/index.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/index.cpython-312.pyc new file mode 100644 index 0000000..5f17358 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/index.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-312.pyc new file mode 100644 index 0000000..f9e91a6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/install.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/install.cpython-312.pyc new file mode 100644 index 0000000..51e38f6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/install.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/list.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/list.cpython-312.pyc new file mode 100644 index 0000000..ce1b867 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/list.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/search.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/search.cpython-312.pyc new file mode 100644 index 0000000..be4c7e3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/search.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/show.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/show.cpython-312.pyc new file mode 100644 index 0000000..1a1ecad Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/show.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc new file mode 100644 index 0000000..59735a7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..baa421f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/cache.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/cache.py new file mode 100644 index 0000000..3283361 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/cache.py @@ -0,0 +1,225 @@ +import os +import textwrap +from optparse import Values +from typing import Any, List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, PipError +from pip._internal.utils import filesystem +from pip._internal.utils.logging import getLogger + +logger = getLogger(__name__) + + +class CacheCommand(Command): + """ + Inspect and manage pip's wheel cache. + + Subcommands: + + - dir: Show the cache directory. + - info: Show information about the cache. + - list: List filenames of packages stored in the cache. + - remove: Remove one or more package from the cache. + - purge: Remove all items from the cache. + + ```` can be a glob expression or a package name. + """ + + ignore_require_venv = True + usage = """ + %prog dir + %prog info + %prog list [] [--format=[human, abspath]] + %prog remove + %prog purge + """ + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--format", + action="store", + dest="list_format", + default="human", + choices=("human", "abspath"), + help="Select the output format among: human (default) or abspath", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + handlers = { + "dir": self.get_cache_dir, + "info": self.get_cache_info, + "list": self.list_cache_items, + "remove": self.remove_cache_items, + "purge": self.purge_cache, + } + + if not options.cache_dir: + logger.error("pip cache commands can not function since cache is disabled.") + return ERROR + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def get_cache_dir(self, options: Values, args: List[Any]) -> None: + if args: + raise CommandError("Too many arguments") + + logger.info(options.cache_dir) + + def get_cache_info(self, options: Values, args: List[Any]) -> None: + if args: + raise CommandError("Too many arguments") + + num_http_files = len(self._find_http_files(options)) + num_packages = len(self._find_wheels(options, "*")) + + http_cache_location = self._cache_dir(options, "http-v2") + old_http_cache_location = self._cache_dir(options, "http") + wheels_cache_location = self._cache_dir(options, "wheels") + http_cache_size = filesystem.format_size( + filesystem.directory_size(http_cache_location) + + filesystem.directory_size(old_http_cache_location) + ) + wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) + + message = ( + textwrap.dedent( + """ + Package index page cache location (pip v23.3+): {http_cache_location} + Package index page cache location (older pips): {old_http_cache_location} + Package index page cache size: {http_cache_size} + Number of HTTP files: {num_http_files} + Locally built wheels location: {wheels_cache_location} + Locally built wheels size: {wheels_cache_size} + Number of locally built wheels: {package_count} + """ # noqa: E501 + ) + .format( + http_cache_location=http_cache_location, + old_http_cache_location=old_http_cache_location, + http_cache_size=http_cache_size, + num_http_files=num_http_files, + wheels_cache_location=wheels_cache_location, + package_count=num_packages, + wheels_cache_size=wheels_cache_size, + ) + .strip() + ) + + logger.info(message) + + def list_cache_items(self, options: Values, args: List[Any]) -> None: + if len(args) > 1: + raise CommandError("Too many arguments") + + if args: + pattern = args[0] + else: + pattern = "*" + + files = self._find_wheels(options, pattern) + if options.list_format == "human": + self.format_for_human(files) + else: + self.format_for_abspath(files) + + def format_for_human(self, files: List[str]) -> None: + if not files: + logger.info("No locally built wheels cached.") + return + + results = [] + for filename in files: + wheel = os.path.basename(filename) + size = filesystem.format_file_size(filename) + results.append(f" - {wheel} ({size})") + logger.info("Cache contents:\n") + logger.info("\n".join(sorted(results))) + + def format_for_abspath(self, files: List[str]) -> None: + if files: + logger.info("\n".join(sorted(files))) + + def remove_cache_items(self, options: Values, args: List[Any]) -> None: + if len(args) > 1: + raise CommandError("Too many arguments") + + if not args: + raise CommandError("Please provide a pattern") + + files = self._find_wheels(options, args[0]) + + no_matching_msg = "No matching packages" + if args[0] == "*": + # Only fetch http files if no specific pattern given + files += self._find_http_files(options) + else: + # Add the pattern to the log message + no_matching_msg += f' for pattern "{args[0]}"' + + if not files: + logger.warning(no_matching_msg) + + for filename in files: + os.unlink(filename) + logger.verbose("Removed %s", filename) + logger.info("Files removed: %s", len(files)) + + def purge_cache(self, options: Values, args: List[Any]) -> None: + if args: + raise CommandError("Too many arguments") + + return self.remove_cache_items(options, ["*"]) + + def _cache_dir(self, options: Values, subdir: str) -> str: + return os.path.join(options.cache_dir, subdir) + + def _find_http_files(self, options: Values) -> List[str]: + old_http_dir = self._cache_dir(options, "http") + new_http_dir = self._cache_dir(options, "http-v2") + return filesystem.find_files(old_http_dir, "*") + filesystem.find_files( + new_http_dir, "*" + ) + + def _find_wheels(self, options: Values, pattern: str) -> List[str]: + wheel_dir = self._cache_dir(options, "wheels") + + # The wheel filename format, as specified in PEP 427, is: + # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl + # + # Additionally, non-alphanumeric values in the distribution are + # normalized to underscores (_), meaning hyphens can never occur + # before `-{version}`. + # + # Given that information: + # - If the pattern we're given contains a hyphen (-), the user is + # providing at least the version. Thus, we can just append `*.whl` + # to match the rest of it. + # - If the pattern we're given doesn't contain a hyphen (-), the + # user is only providing the name. Thus, we append `-*.whl` to + # match the hyphen before the version, followed by anything else. + # + # PEP 427: https://www.python.org/dev/peps/pep-0427/ + pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl") + + return filesystem.find_files(wheel_dir, pattern) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/check.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/check.py new file mode 100644 index 0000000..5efd0a3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/check.py @@ -0,0 +1,54 @@ +import logging +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.operations.check import ( + check_package_set, + create_package_set_from_installed, + warn_legacy_versions_and_specifiers, +) +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class CheckCommand(Command): + """Verify installed packages have compatible dependencies.""" + + usage = """ + %prog [options]""" + + def run(self, options: Values, args: List[str]) -> int: + package_set, parsing_probs = create_package_set_from_installed() + warn_legacy_versions_and_specifiers(package_set) + missing, conflicting = check_package_set(package_set) + + for project_name in missing: + version = package_set[project_name].version + for dependency in missing[project_name]: + write_output( + "%s %s requires %s, which is not installed.", + project_name, + version, + dependency[0], + ) + + for project_name in conflicting: + version = package_set[project_name].version + for dep_name, dep_version, req in conflicting[project_name]: + write_output( + "%s %s has requirement %s, but you have %s %s.", + project_name, + version, + req, + dep_name, + dep_version, + ) + + if missing or conflicting or parsing_probs: + return ERROR + else: + write_output("No broken requirements found.") + return SUCCESS diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/completion.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/completion.py new file mode 100644 index 0000000..9e89e27 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/completion.py @@ -0,0 +1,130 @@ +import sys +import textwrap +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.utils.misc import get_prog + +BASE_COMPLETION = """ +# pip {shell} completion start{script}# pip {shell} completion end +""" + +COMPLETION_SCRIPTS = { + "bash": """ + _pip_completion() + {{ + COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\ + COMP_CWORD=$COMP_CWORD \\ + PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) ) + }} + complete -o default -F _pip_completion {prog} + """, + "zsh": """ + #compdef -P pip[0-9.]# + __pip() {{ + compadd $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$((CURRENT-1)) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ) + }} + if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + __pip "$@" + else + # eval/source/. command, register function for later + compdef __pip -P 'pip[0-9.]#' + fi + """, + "fish": """ + function __fish_complete_pip + set -lx COMP_WORDS (commandline -o) "" + set -lx COMP_CWORD ( \\ + math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ + ) + set -lx PIP_AUTO_COMPLETE 1 + string split \\ -- (eval $COMP_WORDS[1]) + end + complete -fa "(__fish_complete_pip)" -c {prog} + """, + "powershell": """ + if ((Test-Path Function:\\TabExpansion) -and -not ` + (Test-Path Function:\\_pip_completeBackup)) {{ + Rename-Item Function:\\TabExpansion _pip_completeBackup + }} + function TabExpansion($line, $lastWord) {{ + $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart() + if ($lastBlock.StartsWith("{prog} ")) {{ + $Env:COMP_WORDS=$lastBlock + $Env:COMP_CWORD=$lastBlock.Split().Length - 1 + $Env:PIP_AUTO_COMPLETE=1 + (& {prog}).Split() + Remove-Item Env:COMP_WORDS + Remove-Item Env:COMP_CWORD + Remove-Item Env:PIP_AUTO_COMPLETE + }} + elseif (Test-Path Function:\\_pip_completeBackup) {{ + # Fall back on existing tab expansion + _pip_completeBackup $line $lastWord + }} + }} + """, +} + + +class CompletionCommand(Command): + """A helper command to be used for command completion.""" + + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--bash", + "-b", + action="store_const", + const="bash", + dest="shell", + help="Emit completion code for bash", + ) + self.cmd_opts.add_option( + "--zsh", + "-z", + action="store_const", + const="zsh", + dest="shell", + help="Emit completion code for zsh", + ) + self.cmd_opts.add_option( + "--fish", + "-f", + action="store_const", + const="fish", + dest="shell", + help="Emit completion code for fish", + ) + self.cmd_opts.add_option( + "--powershell", + "-p", + action="store_const", + const="powershell", + dest="shell", + help="Emit completion code for powershell", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + """Prints the completion code of the given shell""" + shells = COMPLETION_SCRIPTS.keys() + shell_options = ["--" + shell for shell in sorted(shells)] + if options.shell in shells: + script = textwrap.dedent( + COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog()) + ) + print(BASE_COMPLETION.format(script=script, shell=options.shell)) + return SUCCESS + else: + sys.stderr.write( + "ERROR: You must pass {}\n".format(" or ".join(shell_options)) + ) + return SUCCESS diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/configuration.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/configuration.py new file mode 100644 index 0000000..1a1dc6b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/configuration.py @@ -0,0 +1,280 @@ +import logging +import os +import subprocess +from optparse import Values +from typing import Any, List, Optional + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.configuration import ( + Configuration, + Kind, + get_configuration_files, + kinds, +) +from pip._internal.exceptions import PipError +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_prog, write_output + +logger = logging.getLogger(__name__) + + +class ConfigurationCommand(Command): + """ + Manage local and global configuration. + + Subcommands: + + - list: List the active configuration (or from the file specified) + - edit: Edit the configuration file in an editor + - get: Get the value associated with command.option + - set: Set the command.option=value + - unset: Unset the value associated with command.option + - debug: List the configuration files and values defined under them + + Configuration keys should be dot separated command and option name, + with the special prefix "global" affecting any command. For example, + "pip config set global.index-url https://example.org/" would configure + the index url for all commands, but "pip config set download.timeout 10" + would configure a 10 second timeout only for "pip download" commands. + + If none of --user, --global and --site are passed, a virtual + environment configuration file is used if one is active and the file + exists. Otherwise, all modifications happen to the user file by + default. + """ + + ignore_require_venv = True + usage = """ + %prog [] list + %prog [] [--editor ] edit + + %prog [] get command.option + %prog [] set command.option value + %prog [] unset command.option + %prog [] debug + """ + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--editor", + dest="editor", + action="store", + default=None, + help=( + "Editor to use to edit the file. Uses VISUAL or EDITOR " + "environment variables if not provided." + ), + ) + + self.cmd_opts.add_option( + "--global", + dest="global_file", + action="store_true", + default=False, + help="Use the system-wide configuration file only", + ) + + self.cmd_opts.add_option( + "--user", + dest="user_file", + action="store_true", + default=False, + help="Use the user configuration file only", + ) + + self.cmd_opts.add_option( + "--site", + dest="site_file", + action="store_true", + default=False, + help="Use the current environment configuration file only", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + handlers = { + "list": self.list_values, + "edit": self.open_in_editor, + "get": self.get_name, + "set": self.set_name_value, + "unset": self.unset_name, + "debug": self.list_config_values, + } + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Determine which configuration files are to be loaded + # Depends on whether the command is modifying. + try: + load_only = self._determine_file( + options, need_value=(action in ["get", "set", "unset", "edit"]) + ) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + # Load a new configuration + self.configuration = Configuration( + isolated=options.isolated_mode, load_only=load_only + ) + self.configuration.load() + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]: + file_options = [ + key + for key, value in ( + (kinds.USER, options.user_file), + (kinds.GLOBAL, options.global_file), + (kinds.SITE, options.site_file), + ) + if value + ] + + if not file_options: + if not need_value: + return None + # Default to user, unless there's a site file. + elif any( + os.path.exists(site_config_file) + for site_config_file in get_configuration_files()[kinds.SITE] + ): + return kinds.SITE + else: + return kinds.USER + elif len(file_options) == 1: + return file_options[0] + + raise PipError( + "Need exactly one file to operate upon " + "(--user, --site, --global) to perform." + ) + + def list_values(self, options: Values, args: List[str]) -> None: + self._get_n_args(args, "list", n=0) + + for key, value in sorted(self.configuration.items()): + write_output("%s=%r", key, value) + + def get_name(self, options: Values, args: List[str]) -> None: + key = self._get_n_args(args, "get [name]", n=1) + value = self.configuration.get_value(key) + + write_output("%s", value) + + def set_name_value(self, options: Values, args: List[str]) -> None: + key, value = self._get_n_args(args, "set [name] [value]", n=2) + self.configuration.set_value(key, value) + + self._save_configuration() + + def unset_name(self, options: Values, args: List[str]) -> None: + key = self._get_n_args(args, "unset [name]", n=1) + self.configuration.unset_value(key) + + self._save_configuration() + + def list_config_values(self, options: Values, args: List[str]) -> None: + """List config key-value pairs across different config files""" + self._get_n_args(args, "debug", n=0) + + self.print_env_var_values() + # Iterate over config files and print if they exist, and the + # key-value pairs present in them if they do + for variant, files in sorted(self.configuration.iter_config_files()): + write_output("%s:", variant) + for fname in files: + with indent_log(): + file_exists = os.path.exists(fname) + write_output("%s, exists: %r", fname, file_exists) + if file_exists: + self.print_config_file_values(variant) + + def print_config_file_values(self, variant: Kind) -> None: + """Get key-value pairs from the file of a variant""" + for name, value in self.configuration.get_values_in_config(variant).items(): + with indent_log(): + write_output("%s: %s", name, value) + + def print_env_var_values(self) -> None: + """Get key-values pairs present as environment variables""" + write_output("%s:", "env_var") + with indent_log(): + for key, value in sorted(self.configuration.get_environ_vars()): + env_var = f"PIP_{key.upper()}" + write_output("%s=%r", env_var, value) + + def open_in_editor(self, options: Values, args: List[str]) -> None: + editor = self._determine_editor(options) + + fname = self.configuration.get_file_to_edit() + if fname is None: + raise PipError("Could not determine appropriate file.") + elif '"' in fname: + # This shouldn't happen, unless we see a username like that. + # If that happens, we'd appreciate a pull request fixing this. + raise PipError( + f'Can not open an editor for a file name containing "\n{fname}' + ) + + try: + subprocess.check_call(f'{editor} "{fname}"', shell=True) + except FileNotFoundError as e: + if not e.filename: + e.filename = editor + raise + except subprocess.CalledProcessError as e: + raise PipError(f"Editor Subprocess exited with exit code {e.returncode}") + + def _get_n_args(self, args: List[str], example: str, n: int) -> Any: + """Helper to make sure the command got the right number of arguments""" + if len(args) != n: + msg = ( + f"Got unexpected number of arguments, expected {n}. " + f'(example: "{get_prog()} config {example}")' + ) + raise PipError(msg) + + if n == 1: + return args[0] + else: + return args + + def _save_configuration(self) -> None: + # We successfully ran a modifying command. Need to save the + # configuration. + try: + self.configuration.save() + except Exception: + logger.exception( + "Unable to save configuration. Please report this as a bug." + ) + raise PipError("Internal Error.") + + def _determine_editor(self, options: Values) -> str: + if options.editor is not None: + return options.editor + elif "VISUAL" in os.environ: + return os.environ["VISUAL"] + elif "EDITOR" in os.environ: + return os.environ["EDITOR"] + else: + raise PipError("Could not determine editor to use.") diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/debug.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/debug.py new file mode 100644 index 0000000..7e5271c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/debug.py @@ -0,0 +1,201 @@ +import importlib.resources +import locale +import logging +import os +import sys +from optparse import Values +from types import ModuleType +from typing import Any, Dict, List, Optional + +import pip._vendor +from pip._vendor.certifi import where +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.configuration import Configuration +from pip._internal.metadata import get_environment +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_pip_version + +logger = logging.getLogger(__name__) + + +def show_value(name: str, value: Any) -> None: + logger.info("%s: %s", name, value) + + +def show_sys_implementation() -> None: + logger.info("sys.implementation:") + implementation_name = sys.implementation.name + with indent_log(): + show_value("name", implementation_name) + + +def create_vendor_txt_map() -> Dict[str, str]: + with importlib.resources.open_text("pip._vendor", "vendor.txt") as f: + # Purge non version specifying lines. + # Also, remove any space prefix or suffixes (including comments). + lines = [ + line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line + ] + + # Transform into "module" -> version dict. + return dict(line.split("==", 1) for line in lines) + + +def get_module_from_module_name(module_name: str) -> Optional[ModuleType]: + # Module name can be uppercase in vendor.txt for some reason... + module_name = module_name.lower().replace("-", "_") + # PATCH: setuptools is actually only pkg_resources. + if module_name == "setuptools": + module_name = "pkg_resources" + + try: + __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0) + return getattr(pip._vendor, module_name) + except ImportError: + # We allow 'truststore' to fail to import due + # to being unavailable on Python 3.9 and earlier. + if module_name == "truststore" and sys.version_info < (3, 10): + return None + raise + + +def get_vendor_version_from_module(module_name: str) -> Optional[str]: + module = get_module_from_module_name(module_name) + version = getattr(module, "__version__", None) + + if module and not version: + # Try to find version in debundled module info. + assert module.__file__ is not None + env = get_environment([os.path.dirname(module.__file__)]) + dist = env.get_distribution(module_name) + if dist: + version = str(dist.version) + + return version + + +def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None: + """Log the actual version and print extra info if there is + a conflict or if the actual version could not be imported. + """ + for module_name, expected_version in vendor_txt_versions.items(): + extra_message = "" + actual_version = get_vendor_version_from_module(module_name) + if not actual_version: + extra_message = ( + " (Unable to locate actual module version, using" + " vendor.txt specified version)" + ) + actual_version = expected_version + elif parse_version(actual_version) != parse_version(expected_version): + extra_message = ( + " (CONFLICT: vendor.txt suggests version should" + f" be {expected_version})" + ) + logger.info("%s==%s%s", module_name, actual_version, extra_message) + + +def show_vendor_versions() -> None: + logger.info("vendored library versions:") + + vendor_txt_versions = create_vendor_txt_map() + with indent_log(): + show_actual_vendor_versions(vendor_txt_versions) + + +def show_tags(options: Values) -> None: + tag_limit = 10 + + target_python = make_target_python(options) + tags = target_python.get_sorted_tags() + + # Display the target options that were explicitly provided. + formatted_target = target_python.format_given() + suffix = "" + if formatted_target: + suffix = f" (target: {formatted_target})" + + msg = f"Compatible tags: {len(tags)}{suffix}" + logger.info(msg) + + if options.verbose < 1 and len(tags) > tag_limit: + tags_limited = True + tags = tags[:tag_limit] + else: + tags_limited = False + + with indent_log(): + for tag in tags: + logger.info(str(tag)) + + if tags_limited: + msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]" + logger.info(msg) + + +def ca_bundle_info(config: Configuration) -> str: + levels = {key.split(".", 1)[0] for key, _ in config.items()} + if not levels: + return "Not specified" + + levels_that_override_global = ["install", "wheel", "download"] + global_overriding_level = [ + level for level in levels if level in levels_that_override_global + ] + if not global_overriding_level: + return "global" + + if "global" in levels: + levels.remove("global") + return ", ".join(levels) + + +class DebugCommand(Command): + """ + Display debug information. + """ + + usage = """ + %prog """ + ignore_require_venv = True + + def add_options(self) -> None: + cmdoptions.add_target_python_options(self.cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + self.parser.config.load() + + def run(self, options: Values, args: List[str]) -> int: + logger.warning( + "This command is only meant for debugging. " + "Do not use this with automation for parsing and getting these " + "details, since the output and options of this command may " + "change without notice." + ) + show_value("pip version", get_pip_version()) + show_value("sys.version", sys.version) + show_value("sys.executable", sys.executable) + show_value("sys.getdefaultencoding", sys.getdefaultencoding()) + show_value("sys.getfilesystemencoding", sys.getfilesystemencoding()) + show_value( + "locale.getpreferredencoding", + locale.getpreferredencoding(), + ) + show_value("sys.platform", sys.platform) + show_sys_implementation() + + show_value("'cert' config value", ca_bundle_info(self.parser.config)) + show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE")) + show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE")) + show_value("pip._vendor.certifi.where()", where()) + show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED) + + show_vendor_versions() + + show_tags(options) + + return SUCCESS diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/download.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/download.py new file mode 100644 index 0000000..54247a7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/download.py @@ -0,0 +1,147 @@ +import logging +import os +from optparse import Values +from typing import List + +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.operations.build.build_tracker import get_build_tracker +from pip._internal.req.req_install import check_legacy_setup_py_options +from pip._internal.utils.misc import ensure_dir, normalize_path, write_output +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class DownloadCommand(RequirementCommand): + """ + Download packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports downloading from "requirements files", which provide + an easy way to specify a whole environment to be downloaded. + """ + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] ... + %prog [options] ... + %prog [options] ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.global_options()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.check_build_deps()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + + self.cmd_opts.add_option( + "-d", + "--dest", + "--destination-dir", + "--destination-directory", + dest="download_dir", + metavar="dir", + default=os.curdir, + help="Download packages into .", + ) + + cmdoptions.add_target_python_options(self.cmd_opts) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options: Values, args: List[str]) -> int: + options.ignore_installed = True + # editable doesn't really make sense for `pip download`, but the bowels + # of the RequirementSet code require that property. + options.editables = [] + + cmdoptions.check_dist_restriction(options) + + options.download_dir = normalize_path(options.download_dir) + ensure_dir(options.download_dir) + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + + build_tracker = self.enter_context(get_build_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="download", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + check_legacy_setup_py_options(options, reqs) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + build_tracker=build_tracker, + session=session, + finder=finder, + download_dir=options.download_dir, + use_user_site=False, + verbosity=self.verbosity, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + ignore_requires_python=options.ignore_requires_python, + use_pep517=options.use_pep517, + py_version_info=options.python_version, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + + downloaded: List[str] = [] + for req in requirement_set.requirements.values(): + if req.satisfied_by is None: + assert req.name is not None + preparer.save_linked_requirement(req) + downloaded.append(req.name) + + preparer.prepare_linked_requirements_more(requirement_set.requirements.values()) + requirement_set.warn_legacy_versions_and_specifiers() + + if downloaded: + write_output("Successfully downloaded %s", " ".join(downloaded)) + + return SUCCESS diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/freeze.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/freeze.py new file mode 100644 index 0000000..e64cb3d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/freeze.py @@ -0,0 +1,109 @@ +import sys +from optparse import Values +from typing import AbstractSet, List + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.operations.freeze import freeze +from pip._internal.utils.compat import stdlib_pkgs + + +def _should_suppress_build_backends() -> bool: + return sys.version_info < (3, 12) + + +def _dev_pkgs() -> AbstractSet[str]: + pkgs = {"pip"} + + if _should_suppress_build_backends(): + pkgs |= {"setuptools", "distribute", "wheel"} + pkgs |= {"setuptools", "distribute", "wheel", "pkg-resources"} + + return pkgs + + +class FreezeCommand(Command): + """ + Output installed packages in requirements format. + + packages are listed in a case-insensitive sorted order. + """ + + usage = """ + %prog [options]""" + log_streams = ("ext://sys.stderr", "ext://sys.stderr") + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help=( + "Use the order in the given requirements file and its " + "comments when generating output. This option can be " + "used multiple times." + ), + ) + self.cmd_opts.add_option( + "-l", + "--local", + dest="local", + action="store_true", + default=False, + help=( + "If in a virtualenv that has global access, do not output " + "globally-installed packages." + ), + ) + self.cmd_opts.add_option( + "--user", + dest="user", + action="store_true", + default=False, + help="Only output packages installed in user-site.", + ) + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( + "--all", + dest="freeze_all", + action="store_true", + help=( + "Do not skip these packages in the output:" + " {}".format(", ".join(_dev_pkgs())) + ), + ) + self.cmd_opts.add_option( + "--exclude-editable", + dest="exclude_editable", + action="store_true", + help="Exclude editable package from output.", + ) + self.cmd_opts.add_option(cmdoptions.list_exclude()) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + skip = set(stdlib_pkgs) + if not options.freeze_all: + skip.update(_dev_pkgs()) + + if options.excludes: + skip.update(options.excludes) + + cmdoptions.check_list_path_option(options) + + for line in freeze( + requirement=options.requirements, + local_only=options.local, + user_only=options.user, + paths=options.path, + isolated=options.isolated_mode, + skip=skip, + exclude_editable=options.exclude_editable, + ): + sys.stdout.write(line + "\n") + return SUCCESS diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/hash.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/hash.py new file mode 100644 index 0000000..042dac8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/hash.py @@ -0,0 +1,59 @@ +import hashlib +import logging +import sys +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES +from pip._internal.utils.misc import read_chunks, write_output + +logger = logging.getLogger(__name__) + + +class HashCommand(Command): + """ + Compute a hash of a local package archive. + + These can be used with --hash in a requirements file to do repeatable + installs. + """ + + usage = "%prog [options] ..." + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-a", + "--algorithm", + dest="algorithm", + choices=STRONG_HASHES, + action="store", + default=FAVORITE_HASH, + help="The hash algorithm to use: one of {}".format( + ", ".join(STRONG_HASHES) + ), + ) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + if not args: + self.parser.print_usage(sys.stderr) + return ERROR + + algorithm = options.algorithm + for path in args: + write_output( + "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm) + ) + return SUCCESS + + +def _hash_of_file(path: str, algorithm: str) -> str: + """Return the hash digest of a file.""" + with open(path, "rb") as archive: + hash = hashlib.new(algorithm) + for chunk in read_chunks(archive): + hash.update(chunk) + return hash.hexdigest() diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/help.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/help.py new file mode 100644 index 0000000..6206631 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/help.py @@ -0,0 +1,41 @@ +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError + + +class HelpCommand(Command): + """Show help for commands""" + + usage = """ + %prog """ + ignore_require_venv = True + + def run(self, options: Values, args: List[str]) -> int: + from pip._internal.commands import ( + commands_dict, + create_command, + get_similar_commands, + ) + + try: + # 'pip help' with no args is handled by pip.__init__.parseopt() + cmd_name = args[0] # the command we need help for + except IndexError: + return SUCCESS + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = [f'unknown command "{cmd_name}"'] + if guess: + msg.append(f'maybe you meant "{guess}"') + + raise CommandError(" - ".join(msg)) + + command = create_command(cmd_name) + command.parser.print_help() + + return SUCCESS diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/index.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/index.py new file mode 100644 index 0000000..f55e9e4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/index.py @@ -0,0 +1,139 @@ +import logging +from optparse import Values +from typing import Any, Iterable, List, Optional, Union + +from pip._vendor.packaging.version import LegacyVersion, Version + +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import IndexGroupCommand +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.commands.search import print_dist_installation_info +from pip._internal.exceptions import CommandError, DistributionNotFound, PipError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.network.session import PipSession +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class IndexCommand(IndexGroupCommand): + """ + Inspect information available from package indexes. + """ + + ignore_require_venv = True + usage = """ + %prog versions + """ + + def add_options(self) -> None: + cmdoptions.add_target_python_options(self.cmd_opts) + + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + handlers = { + "versions": self.get_available_package_versions, + } + + logger.warning( + "pip index is currently an experimental command. " + "It may be removed/changed in a future release " + "without prior warning." + ) + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def _build_package_finder( + self, + options: Values, + session: PipSession, + target_python: Optional[TargetPython] = None, + ignore_requires_python: Optional[bool] = None, + ) -> PackageFinder: + """ + Create a package finder appropriate to the index command. + """ + link_collector = LinkCollector.create(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=options.pre, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + ) + + def get_available_package_versions(self, options: Values, args: List[Any]) -> None: + if len(args) != 1: + raise CommandError("You need to specify exactly one argument") + + target_python = cmdoptions.make_target_python(options) + query = args[0] + + with self._build_session(options) as session: + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + + versions: Iterable[Union[LegacyVersion, Version]] = ( + candidate.version for candidate in finder.find_all_candidates(query) + ) + + if not options.pre: + # Remove prereleases + versions = ( + version for version in versions if not version.is_prerelease + ) + versions = set(versions) + + if not versions: + raise DistributionNotFound( + f"No matching distribution found for {query}" + ) + + formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)] + latest = formatted_versions[0] + + write_output(f"{query} ({latest})") + write_output("Available versions: {}".format(", ".join(formatted_versions))) + print_dist_installation_info(query, latest) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/inspect.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/inspect.py new file mode 100644 index 0000000..27c8fa3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/inspect.py @@ -0,0 +1,92 @@ +import logging +from optparse import Values +from typing import Any, Dict, List + +from pip._vendor.packaging.markers import default_environment +from pip._vendor.rich import print_json + +from pip import __version__ +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.metadata import BaseDistribution, get_environment +from pip._internal.utils.compat import stdlib_pkgs +from pip._internal.utils.urls import path_to_url + +logger = logging.getLogger(__name__) + + +class InspectCommand(Command): + """ + Inspect the content of a Python environment and produce a report in JSON format. + """ + + ignore_require_venv = True + usage = """ + %prog [options]""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--local", + action="store_true", + default=False, + help=( + "If in a virtualenv that has global access, do not list " + "globally-installed packages." + ), + ) + self.cmd_opts.add_option( + "--user", + dest="user", + action="store_true", + default=False, + help="Only output packages installed in user-site.", + ) + self.cmd_opts.add_option(cmdoptions.list_path()) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + cmdoptions.check_list_path_option(options) + dists = get_environment(options.path).iter_installed_distributions( + local_only=options.local, + user_only=options.user, + skip=set(stdlib_pkgs), + ) + output = { + "version": "1", + "pip_version": __version__, + "installed": [self._dist_to_dict(dist) for dist in dists], + "environment": default_environment(), + # TODO tags? scheme? + } + print_json(data=output) + return SUCCESS + + def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]: + res: Dict[str, Any] = { + "metadata": dist.metadata_dict, + "metadata_location": dist.info_location, + } + # direct_url. Note that we don't have download_info (as in the installation + # report) since it is not recorded in installed metadata. + direct_url = dist.direct_url + if direct_url is not None: + res["direct_url"] = direct_url.to_dict() + else: + # Emulate direct_url for legacy editable installs. + editable_project_location = dist.editable_project_location + if editable_project_location is not None: + res["direct_url"] = { + "url": path_to_url(editable_project_location), + "dir_info": { + "editable": True, + }, + } + # installer + installer = dist.installer + if dist.installer: + res["installer"] = installer + # requested + if dist.installed_with_dist_info: + res["requested"] = dist.requested + return res diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/install.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/install.py new file mode 100644 index 0000000..e944bb9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/install.py @@ -0,0 +1,774 @@ +import errno +import json +import operator +import os +import shutil +import site +from optparse import SUPPRESS_HELP, Values +from typing import List, Optional + +from pip._vendor.rich import print_json + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import ( + RequirementCommand, + warn_if_run_as_root, + with_cleanup, +) +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, InstallationError +from pip._internal.locations import get_scheme +from pip._internal.metadata import get_environment +from pip._internal.models.installation_report import InstallationReport +from pip._internal.operations.build.build_tracker import get_build_tracker +from pip._internal.operations.check import ConflictDetails, check_install_conflicts +from pip._internal.req import install_given_reqs +from pip._internal.req.req_install import ( + InstallRequirement, + check_legacy_setup_py_options, +) +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.filesystem import test_writable_dir +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ( + check_externally_managed, + ensure_dir, + get_pip_version, + protect_pip_from_modification_on_windows, + write_output, +) +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, +) +from pip._internal.wheel_builder import build, should_build_for_install_command + +logger = getLogger(__name__) + + +class InstallCommand(RequirementCommand): + """ + Install packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports installing from "requirements files", which provide + an easy way to specify a whole environment to be installed. + """ + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.pre()) + + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option( + "--dry-run", + action="store_true", + dest="dry_run", + default=False, + help=( + "Don't actually install anything, just print what would be. " + "Can be used in combination with --ignore-installed " + "to 'resolve' the requirements." + ), + ) + self.cmd_opts.add_option( + "-t", + "--target", + dest="target_dir", + metavar="dir", + default=None, + help=( + "Install packages into . " + "By default this will not replace existing files/folders in " + ". Use --upgrade to replace existing packages in " + "with new versions." + ), + ) + cmdoptions.add_target_python_options(self.cmd_opts) + + self.cmd_opts.add_option( + "--user", + dest="use_user_site", + action="store_true", + help=( + "Install to the Python user install directory for your " + "platform. Typically ~/.local/, or %APPDATA%\\Python on " + "Windows. (See the Python documentation for site.USER_BASE " + "for full details.)" + ), + ) + self.cmd_opts.add_option( + "--no-user", + dest="use_user_site", + action="store_false", + help=SUPPRESS_HELP, + ) + self.cmd_opts.add_option( + "--root", + dest="root_path", + metavar="dir", + default=None, + help="Install everything relative to this alternate root directory.", + ) + self.cmd_opts.add_option( + "--prefix", + dest="prefix_path", + metavar="dir", + default=None, + help=( + "Installation prefix where lib, bin and other top-level " + "folders are placed. Note that the resulting installation may " + "contain scripts and other resources which reference the " + "Python interpreter of pip, and not that of ``--prefix``. " + "See also the ``--python`` option if the intention is to " + "install packages into another (possibly pip-free) " + "environment." + ), + ) + + self.cmd_opts.add_option(cmdoptions.src()) + + self.cmd_opts.add_option( + "-U", + "--upgrade", + dest="upgrade", + action="store_true", + help=( + "Upgrade all specified packages to the newest available " + "version. The handling of dependencies depends on the " + "upgrade-strategy used." + ), + ) + + self.cmd_opts.add_option( + "--upgrade-strategy", + dest="upgrade_strategy", + default="only-if-needed", + choices=["only-if-needed", "eager"], + help=( + "Determines how dependency upgrading should be handled " + "[default: %default]. " + '"eager" - dependencies are upgraded regardless of ' + "whether the currently installed version satisfies the " + "requirements of the upgraded package(s). " + '"only-if-needed" - are upgraded only when they do not ' + "satisfy the requirements of the upgraded package(s)." + ), + ) + + self.cmd_opts.add_option( + "--force-reinstall", + dest="force_reinstall", + action="store_true", + help="Reinstall all packages even if they are already up-to-date.", + ) + + self.cmd_opts.add_option( + "-I", + "--ignore-installed", + dest="ignore_installed", + action="store_true", + help=( + "Ignore the installed packages, overwriting them. " + "This can break your system if the existing package " + "is of a different version or was installed " + "with a different package manager!" + ), + ) + + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.check_build_deps()) + self.cmd_opts.add_option(cmdoptions.override_externally_managed()) + + self.cmd_opts.add_option(cmdoptions.config_settings()) + self.cmd_opts.add_option(cmdoptions.global_options()) + + self.cmd_opts.add_option( + "--compile", + action="store_true", + dest="compile", + default=True, + help="Compile Python source files to bytecode", + ) + + self.cmd_opts.add_option( + "--no-compile", + action="store_false", + dest="compile", + help="Do not compile Python source files to bytecode", + ) + + self.cmd_opts.add_option( + "--no-warn-script-location", + action="store_false", + dest="warn_script_location", + default=True, + help="Do not warn when installing scripts outside PATH", + ) + self.cmd_opts.add_option( + "--no-warn-conflicts", + action="store_false", + dest="warn_about_conflicts", + default=True, + help="Do not warn about broken dependencies", + ) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.root_user_action()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + self.cmd_opts.add_option( + "--report", + dest="json_report_file", + metavar="file", + default=None, + help=( + "Generate a JSON file describing what pip did to install " + "the provided requirements. " + "Can be used in combination with --dry-run and --ignore-installed " + "to 'resolve' the requirements. " + "When - is used as file name it writes to stdout. " + "When writing to stdout, please combine with the --quiet option " + "to avoid mixing pip logging output with JSON output." + ), + ) + + @with_cleanup + def run(self, options: Values, args: List[str]) -> int: + if options.use_user_site and options.target_dir is not None: + raise CommandError("Can not combine '--user' and '--target'") + + # Check whether the environment we're installing into is externally + # managed, as specified in PEP 668. Specifying --root, --target, or + # --prefix disables the check, since there's no reliable way to locate + # the EXTERNALLY-MANAGED file for those cases. An exception is also + # made specifically for "--dry-run --report" for convenience. + installing_into_current_environment = ( + not (options.dry_run and options.json_report_file) + and options.root_path is None + and options.target_dir is None + and options.prefix_path is None + ) + if ( + installing_into_current_environment + and not options.override_externally_managed + ): + check_externally_managed() + + upgrade_strategy = "to-satisfy-only" + if options.upgrade: + upgrade_strategy = options.upgrade_strategy + + cmdoptions.check_dist_restriction(options, check_target=True) + + logger.verbose("Using %s", get_pip_version()) + options.use_user_site = decide_user_install( + options.use_user_site, + prefix_path=options.prefix_path, + target_dir=options.target_dir, + root_path=options.root_path, + isolated_mode=options.isolated_mode, + ) + + target_temp_dir: Optional[TempDirectory] = None + target_temp_dir_path: Optional[str] = None + if options.target_dir: + options.ignore_installed = True + options.target_dir = os.path.abspath(options.target_dir) + if ( + # fmt: off + os.path.exists(options.target_dir) and + not os.path.isdir(options.target_dir) + # fmt: on + ): + raise CommandError( + "Target path exists but is not a directory, will not continue." + ) + + # Create a target directory for using with the target option + target_temp_dir = TempDirectory(kind="target") + target_temp_dir_path = target_temp_dir.path + self.enter_context(target_temp_dir) + + global_options = options.global_options or [] + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + build_tracker = self.enter_context(get_build_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="install", + globally_managed=True, + ) + + try: + reqs = self.get_requirements(args, options, finder, session) + check_legacy_setup_py_options(options, reqs) + + wheel_cache = WheelCache(options.cache_dir) + + # Only when installing is it permitted to use PEP 660. + # In other circumstances (pip wheel, pip download) we generate + # regular (i.e. non editable) metadata and wheels. + for req in reqs: + req.permit_editable_wheels = True + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + build_tracker=build_tracker, + session=session, + finder=finder, + use_user_site=options.use_user_site, + verbosity=self.verbosity, + ) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + use_user_site=options.use_user_site, + ignore_installed=options.ignore_installed, + ignore_requires_python=options.ignore_requires_python, + force_reinstall=options.force_reinstall, + upgrade_strategy=upgrade_strategy, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve( + reqs, check_supported_wheels=not options.target_dir + ) + + if options.json_report_file: + report = InstallationReport(requirement_set.requirements_to_install) + if options.json_report_file == "-": + print_json(data=report.to_dict()) + else: + with open(options.json_report_file, "w", encoding="utf-8") as f: + json.dump(report.to_dict(), f, indent=2, ensure_ascii=False) + + if options.dry_run: + # In non dry-run mode, the legacy versions and specifiers check + # will be done as part of conflict detection. + requirement_set.warn_legacy_versions_and_specifiers() + would_install_items = sorted( + (r.metadata["name"], r.metadata["version"]) + for r in requirement_set.requirements_to_install + ) + if would_install_items: + write_output( + "Would install %s", + " ".join("-".join(item) for item in would_install_items), + ) + return SUCCESS + + try: + pip_req = requirement_set.get_requirement("pip") + except KeyError: + modifying_pip = False + else: + # If we're not replacing an already installed pip, + # we're not modifying it. + modifying_pip = pip_req.satisfied_by is None + protect_pip_from_modification_on_windows(modifying_pip=modifying_pip) + + reqs_to_build = [ + r + for r in requirement_set.requirements.values() + if should_build_for_install_command(r) + ] + + _, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=True, + build_options=[], + global_options=global_options, + ) + + if build_failures: + raise InstallationError( + "Could not build wheels for {}, which is required to " + "install pyproject.toml-based projects".format( + ", ".join(r.name for r in build_failures) # type: ignore + ) + ) + + to_install = resolver.get_installation_order(requirement_set) + + # Check for conflicts in the package set we're installing. + conflicts: Optional[ConflictDetails] = None + should_warn_about_conflicts = ( + not options.ignore_dependencies and options.warn_about_conflicts + ) + if should_warn_about_conflicts: + conflicts = self._determine_conflicts(to_install) + + # Don't warn about script install locations if + # --target or --prefix has been specified + warn_script_location = options.warn_script_location + if options.target_dir or options.prefix_path: + warn_script_location = False + + installed = install_given_reqs( + to_install, + global_options, + root=options.root_path, + home=target_temp_dir_path, + prefix=options.prefix_path, + warn_script_location=warn_script_location, + use_user_site=options.use_user_site, + pycompile=options.compile, + ) + + lib_locations = get_lib_location_guesses( + user=options.use_user_site, + home=target_temp_dir_path, + root=options.root_path, + prefix=options.prefix_path, + isolated=options.isolated_mode, + ) + env = get_environment(lib_locations) + + installed.sort(key=operator.attrgetter("name")) + items = [] + for result in installed: + item = result.name + try: + installed_dist = env.get_distribution(item) + if installed_dist is not None: + item = f"{item}-{installed_dist.version}" + except Exception: + pass + items.append(item) + + if conflicts is not None: + self._warn_about_conflicts( + conflicts, + resolver_variant=self.determine_resolver_variant(options), + ) + + installed_desc = " ".join(items) + if installed_desc: + write_output( + "Successfully installed %s", + installed_desc, + ) + except OSError as error: + show_traceback = self.verbosity >= 1 + + message = create_os_error_message( + error, + show_traceback, + options.use_user_site, + ) + logger.error(message, exc_info=show_traceback) + + return ERROR + + if options.target_dir: + assert target_temp_dir + self._handle_target_dir( + options.target_dir, target_temp_dir, options.upgrade + ) + if options.root_user_action == "warn": + warn_if_run_as_root() + return SUCCESS + + def _handle_target_dir( + self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool + ) -> None: + ensure_dir(target_dir) + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + lib_dir_list = [] + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + scheme = get_scheme("", home=target_temp_dir.path) + purelib_dir = scheme.purelib + platlib_dir = scheme.platlib + data_dir = scheme.data + + if os.path.exists(purelib_dir): + lib_dir_list.append(purelib_dir) + if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: + lib_dir_list.append(platlib_dir) + if os.path.exists(data_dir): + lib_dir_list.append(data_dir) + + for lib_dir in lib_dir_list: + for item in os.listdir(lib_dir): + if lib_dir == data_dir: + ddir = os.path.join(data_dir, item) + if any(s.startswith(ddir) for s in lib_dir_list[:-1]): + continue + target_item_dir = os.path.join(target_dir, item) + if os.path.exists(target_item_dir): + if not upgrade: + logger.warning( + "Target directory %s already exists. Specify " + "--upgrade to force replacement.", + target_item_dir, + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + "Target directory %s already exists and is " + "a link. pip will not automatically replace " + "links, please remove if replacement is " + "desired.", + target_item_dir, + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move(os.path.join(lib_dir, item), target_item_dir) + + def _determine_conflicts( + self, to_install: List[InstallRequirement] + ) -> Optional[ConflictDetails]: + try: + return check_install_conflicts(to_install) + except Exception: + logger.exception( + "Error while checking for conflicts. Please file an issue on " + "pip's issue tracker: https://github.com/pypa/pip/issues/new" + ) + return None + + def _warn_about_conflicts( + self, conflict_details: ConflictDetails, resolver_variant: str + ) -> None: + package_set, (missing, conflicting) = conflict_details + if not missing and not conflicting: + return + + parts: List[str] = [] + if resolver_variant == "legacy": + parts.append( + "pip's legacy dependency resolver does not consider dependency " + "conflicts when selecting packages. This behaviour is the " + "source of the following dependency conflicts." + ) + else: + assert resolver_variant == "resolvelib" + parts.append( + "pip's dependency resolver does not currently take into account " + "all the packages that are installed. This behaviour is the " + "source of the following dependency conflicts." + ) + + # NOTE: There is some duplication here, with commands/check.py + for project_name in missing: + version = package_set[project_name][0] + for dependency in missing[project_name]: + message = ( + f"{project_name} {version} requires {dependency[1]}, " + "which is not installed." + ) + parts.append(message) + + for project_name in conflicting: + version = package_set[project_name][0] + for dep_name, dep_version, req in conflicting[project_name]: + message = ( + "{name} {version} requires {requirement}, but {you} have " + "{dep_name} {dep_version} which is incompatible." + ).format( + name=project_name, + version=version, + requirement=req, + dep_name=dep_name, + dep_version=dep_version, + you=("you" if resolver_variant == "resolvelib" else "you'll"), + ) + parts.append(message) + + logger.critical("\n".join(parts)) + + +def get_lib_location_guesses( + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, +) -> List[str]: + scheme = get_scheme( + "", + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + return [scheme.purelib, scheme.platlib] + + +def site_packages_writable(root: Optional[str], isolated: bool) -> bool: + return all( + test_writable_dir(d) + for d in set(get_lib_location_guesses(root=root, isolated=isolated)) + ) + + +def decide_user_install( + use_user_site: Optional[bool], + prefix_path: Optional[str] = None, + target_dir: Optional[str] = None, + root_path: Optional[str] = None, + isolated_mode: bool = False, +) -> bool: + """Determine whether to do a user install based on the input options. + + If use_user_site is False, no additional checks are done. + If use_user_site is True, it is checked for compatibility with other + options. + If use_user_site is None, the default behaviour depends on the environment, + which is provided by the other arguments. + """ + # In some cases (config from tox), use_user_site can be set to an integer + # rather than a bool, which 'use_user_site is False' wouldn't catch. + if (use_user_site is not None) and (not use_user_site): + logger.debug("Non-user install by explicit request") + return False + + if use_user_site: + if prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + logger.debug("User install by explicit request") + return True + + # If we are here, user installs have not been explicitly requested/avoided + assert use_user_site is None + + # user install incompatible with --prefix/--target + if prefix_path or target_dir: + logger.debug("Non-user install due to --prefix or --target option") + return False + + # If user installs are not enabled, choose a non-user install + if not site.ENABLE_USER_SITE: + logger.debug("Non-user install because user site-packages disabled") + return False + + # If we have permission for a non-user install, do that, + # otherwise do a user install. + if site_packages_writable(root=root_path, isolated=isolated_mode): + logger.debug("Non-user install because site-packages writeable") + return False + + logger.info( + "Defaulting to user installation because normal site-packages " + "is not writeable" + ) + return True + + +def create_os_error_message( + error: OSError, show_traceback: bool, using_user_site: bool +) -> str: + """Format an error message for an OSError + + It may occur anytime during the execution of the install command. + """ + parts = [] + + # Mention the error if we are not going to show a traceback + parts.append("Could not install packages due to an OSError") + if not show_traceback: + parts.append(": ") + parts.append(str(error)) + else: + parts.append(".") + + # Spilt the error indication from a helper message (if any) + parts[-1] += "\n" + + # Suggest useful actions to the user: + # (1) using user site-packages or (2) verifying the permissions + if error.errno == errno.EACCES: + user_option_part = "Consider using the `--user` option" + permissions_part = "Check the permissions" + + if not running_under_virtualenv() and not using_user_site: + parts.extend( + [ + user_option_part, + " or ", + permissions_part.lower(), + ] + ) + else: + parts.append(permissions_part) + parts.append(".\n") + + # Suggest the user to enable Long Paths if path length is + # more than 260 + if ( + WINDOWS + and error.errno == errno.ENOENT + and error.filename + and len(error.filename) > 260 + ): + parts.append( + "HINT: This error might have occurred since " + "this system does not have Windows Long Path " + "support enabled. You can find information on " + "how to enable this at " + "https://pip.pypa.io/warnings/enable-long-paths\n" + ) + + return "".join(parts).strip() + "\n" diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/list.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/list.py new file mode 100644 index 0000000..32fb19b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/list.py @@ -0,0 +1,370 @@ +import json +import logging +from optparse import Values +from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import IndexGroupCommand +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution, get_environment +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.session import PipSession +from pip._internal.utils.compat import stdlib_pkgs +from pip._internal.utils.misc import tabulate, write_output + +if TYPE_CHECKING: + from pip._internal.metadata.base import DistributionVersion + + class _DistWithLatestInfo(BaseDistribution): + """Give the distribution object a couple of extra fields. + + These will be populated during ``get_outdated()``. This is dirty but + makes the rest of the code much cleaner. + """ + + latest_version: DistributionVersion + latest_filetype: str + + _ProcessedDists = Sequence[_DistWithLatestInfo] + + +from pip._vendor.packaging.version import parse + +logger = logging.getLogger(__name__) + + +class ListCommand(IndexGroupCommand): + """ + List installed packages, including editables. + + Packages are listed in a case-insensitive sorted order. + """ + + ignore_require_venv = True + usage = """ + %prog [options]""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-o", + "--outdated", + action="store_true", + default=False, + help="List outdated packages", + ) + self.cmd_opts.add_option( + "-u", + "--uptodate", + action="store_true", + default=False, + help="List uptodate packages", + ) + self.cmd_opts.add_option( + "-e", + "--editable", + action="store_true", + default=False, + help="List editable projects.", + ) + self.cmd_opts.add_option( + "-l", + "--local", + action="store_true", + default=False, + help=( + "If in a virtualenv that has global access, do not list " + "globally-installed packages." + ), + ) + self.cmd_opts.add_option( + "--user", + dest="user", + action="store_true", + default=False, + help="Only output packages installed in user-site.", + ) + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( + "--pre", + action="store_true", + default=False, + help=( + "Include pre-release and development versions. By default, " + "pip only finds stable versions." + ), + ) + + self.cmd_opts.add_option( + "--format", + action="store", + dest="list_format", + default="columns", + choices=("columns", "freeze", "json"), + help=( + "Select the output format among: columns (default), freeze, or json. " + "The 'freeze' format cannot be used with the --outdated option." + ), + ) + + self.cmd_opts.add_option( + "--not-required", + action="store_true", + dest="not_required", + help="List packages that are not dependencies of installed packages.", + ) + + self.cmd_opts.add_option( + "--exclude-editable", + action="store_false", + dest="include_editable", + help="Exclude editable package from output.", + ) + self.cmd_opts.add_option( + "--include-editable", + action="store_true", + dest="include_editable", + help="Include editable package from output.", + default=True, + ) + self.cmd_opts.add_option(cmdoptions.list_exclude()) + index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + def _build_package_finder( + self, options: Values, session: PipSession + ) -> PackageFinder: + """ + Create a package finder appropriate to this list command. + """ + link_collector = LinkCollector.create(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=options.pre, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + ) + + def run(self, options: Values, args: List[str]) -> int: + if options.outdated and options.uptodate: + raise CommandError("Options --outdated and --uptodate cannot be combined.") + + if options.outdated and options.list_format == "freeze": + raise CommandError( + "List format 'freeze' cannot be used with the --outdated option." + ) + + cmdoptions.check_list_path_option(options) + + skip = set(stdlib_pkgs) + if options.excludes: + skip.update(canonicalize_name(n) for n in options.excludes) + + packages: "_ProcessedDists" = [ + cast("_DistWithLatestInfo", d) + for d in get_environment(options.path).iter_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + include_editables=options.include_editable, + skip=skip, + ) + ] + + # get_not_required must be called firstly in order to find and + # filter out all dependencies correctly. Otherwise a package + # can't be identified as requirement because some parent packages + # could be filtered out before. + if options.not_required: + packages = self.get_not_required(packages, options) + + if options.outdated: + packages = self.get_outdated(packages, options) + elif options.uptodate: + packages = self.get_uptodate(packages, options) + + self.output_package_listing(packages, options) + return SUCCESS + + def get_outdated( + self, packages: "_ProcessedDists", options: Values + ) -> "_ProcessedDists": + return [ + dist + for dist in self.iter_packages_latest_infos(packages, options) + if parse(str(dist.latest_version)) > parse(str(dist.version)) + ] + + def get_uptodate( + self, packages: "_ProcessedDists", options: Values + ) -> "_ProcessedDists": + return [ + dist + for dist in self.iter_packages_latest_infos(packages, options) + if parse(str(dist.latest_version)) == parse(str(dist.version)) + ] + + def get_not_required( + self, packages: "_ProcessedDists", options: Values + ) -> "_ProcessedDists": + dep_keys = { + canonicalize_name(dep.name) + for dist in packages + for dep in (dist.iter_dependencies() or ()) + } + + # Create a set to remove duplicate packages, and cast it to a list + # to keep the return type consistent with get_outdated and + # get_uptodate + return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys}) + + def iter_packages_latest_infos( + self, packages: "_ProcessedDists", options: Values + ) -> Generator["_DistWithLatestInfo", None, None]: + with self._build_session(options) as session: + finder = self._build_package_finder(options, session) + + def latest_info( + dist: "_DistWithLatestInfo", + ) -> Optional["_DistWithLatestInfo"]: + all_candidates = finder.find_all_candidates(dist.canonical_name) + if not options.pre: + # Remove prereleases + all_candidates = [ + candidate + for candidate in all_candidates + if not candidate.version.is_prerelease + ] + + evaluator = finder.make_candidate_evaluator( + project_name=dist.canonical_name, + ) + best_candidate = evaluator.sort_best_candidate(all_candidates) + if best_candidate is None: + return None + + remote_version = best_candidate.version + if best_candidate.link.is_wheel: + typ = "wheel" + else: + typ = "sdist" + dist.latest_version = remote_version + dist.latest_filetype = typ + return dist + + for dist in map(latest_info, packages): + if dist is not None: + yield dist + + def output_package_listing( + self, packages: "_ProcessedDists", options: Values + ) -> None: + packages = sorted( + packages, + key=lambda dist: dist.canonical_name, + ) + if options.list_format == "columns" and packages: + data, header = format_for_columns(packages, options) + self.output_package_listing_columns(data, header) + elif options.list_format == "freeze": + for dist in packages: + if options.verbose >= 1: + write_output( + "%s==%s (%s)", dist.raw_name, dist.version, dist.location + ) + else: + write_output("%s==%s", dist.raw_name, dist.version) + elif options.list_format == "json": + write_output(format_for_json(packages, options)) + + def output_package_listing_columns( + self, data: List[List[str]], header: List[str] + ) -> None: + # insert the header first: we need to know the size of column names + if len(data) > 0: + data.insert(0, header) + + pkg_strings, sizes = tabulate(data) + + # Create and add a separator. + if len(data) > 0: + pkg_strings.insert(1, " ".join("-" * x for x in sizes)) + + for val in pkg_strings: + write_output(val) + + +def format_for_columns( + pkgs: "_ProcessedDists", options: Values +) -> Tuple[List[List[str]], List[str]]: + """ + Convert the package data into something usable + by output_package_listing_columns. + """ + header = ["Package", "Version"] + + running_outdated = options.outdated + if running_outdated: + header.extend(["Latest", "Type"]) + + has_editables = any(x.editable for x in pkgs) + if has_editables: + header.append("Editable project location") + + if options.verbose >= 1: + header.append("Location") + if options.verbose >= 1: + header.append("Installer") + + data = [] + for proj in pkgs: + # if we're working on the 'outdated' list, separate out the + # latest_version and type + row = [proj.raw_name, str(proj.version)] + + if running_outdated: + row.append(str(proj.latest_version)) + row.append(proj.latest_filetype) + + if has_editables: + row.append(proj.editable_project_location or "") + + if options.verbose >= 1: + row.append(proj.location or "") + if options.verbose >= 1: + row.append(proj.installer) + + data.append(row) + + return data, header + + +def format_for_json(packages: "_ProcessedDists", options: Values) -> str: + data = [] + for dist in packages: + info = { + "name": dist.raw_name, + "version": str(dist.version), + } + if options.verbose >= 1: + info["location"] = dist.location or "" + info["installer"] = dist.installer + if options.outdated: + info["latest_version"] = str(dist.latest_version) + info["latest_filetype"] = dist.latest_filetype + editable_project_location = dist.editable_project_location + if editable_project_location: + info["editable_project_location"] = editable_project_location + data.append(info) + return json.dumps(data) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/search.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/search.py new file mode 100644 index 0000000..03ed925 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/search.py @@ -0,0 +1,174 @@ +import logging +import shutil +import sys +import textwrap +import xmlrpc.client +from collections import OrderedDict +from optparse import Values +from typing import TYPE_CHECKING, Dict, List, Optional + +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin +from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.metadata import get_default_environment +from pip._internal.models.index import PyPI +from pip._internal.network.xmlrpc import PipXmlrpcTransport +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import write_output + +if TYPE_CHECKING: + from typing import TypedDict + + class TransformedHit(TypedDict): + name: str + summary: str + versions: List[str] + + +logger = logging.getLogger(__name__) + + +class SearchCommand(Command, SessionCommandMixin): + """Search for PyPI packages whose name or summary contains .""" + + usage = """ + %prog [options] """ + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-i", + "--index", + dest="index", + metavar="URL", + default=PyPI.pypi_url, + help="Base URL of Python Package Index (default %default)", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + if not args: + raise CommandError("Missing required argument (search query).") + query = args + pypi_hits = self.search(query, options) + hits = transform_hits(pypi_hits) + + terminal_width = None + if sys.stdout.isatty(): + terminal_width = shutil.get_terminal_size()[0] + + print_results(hits, terminal_width=terminal_width) + if pypi_hits: + return SUCCESS + return NO_MATCHES_FOUND + + def search(self, query: List[str], options: Values) -> List[Dict[str, str]]: + index_url = options.index + + session = self.get_default_session(options) + + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc.client.ServerProxy(index_url, transport) + try: + hits = pypi.search({"name": query, "summary": query}, "or") + except xmlrpc.client.Fault as fault: + message = "XMLRPC request failed [code: {code}]\n{string}".format( + code=fault.faultCode, + string=fault.faultString, + ) + raise CommandError(message) + assert isinstance(hits, list) + return hits + + +def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]: + """ + The list from pypi is really a list of versions. We want a list of + packages with the list of versions stored inline. This converts the + list from pypi into one we can use. + """ + packages: Dict[str, "TransformedHit"] = OrderedDict() + for hit in hits: + name = hit["name"] + summary = hit["summary"] + version = hit["version"] + + if name not in packages.keys(): + packages[name] = { + "name": name, + "summary": summary, + "versions": [version], + } + else: + packages[name]["versions"].append(version) + + # if this is the highest version, replace summary and score + if version == highest_version(packages[name]["versions"]): + packages[name]["summary"] = summary + + return list(packages.values()) + + +def print_dist_installation_info(name: str, latest: str) -> None: + env = get_default_environment() + dist = env.get_distribution(name) + if dist is not None: + with indent_log(): + if dist.version == latest: + write_output("INSTALLED: %s (latest)", dist.version) + else: + write_output("INSTALLED: %s", dist.version) + if parse_version(latest).pre: + write_output( + "LATEST: %s (pre-release; install" + " with `pip install --pre`)", + latest, + ) + else: + write_output("LATEST: %s", latest) + + +def print_results( + hits: List["TransformedHit"], + name_column_width: Optional[int] = None, + terminal_width: Optional[int] = None, +) -> None: + if not hits: + return + if name_column_width is None: + name_column_width = ( + max( + [ + len(hit["name"]) + len(highest_version(hit.get("versions", ["-"]))) + for hit in hits + ] + ) + + 4 + ) + + for hit in hits: + name = hit["name"] + summary = hit["summary"] or "" + latest = highest_version(hit.get("versions", ["-"])) + if terminal_width is not None: + target_width = terminal_width - name_column_width - 5 + if target_width > 10: + # wrap and indent summary to fit terminal + summary_lines = textwrap.wrap(summary, target_width) + summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines) + + name_latest = f"{name} ({latest})" + line = f"{name_latest:{name_column_width}} - {summary}" + try: + write_output(line) + print_dist_installation_info(name, latest) + except UnicodeEncodeError: + pass + + +def highest_version(versions: List[str]) -> str: + return max(versions, key=parse_version) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/show.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/show.py new file mode 100644 index 0000000..3f10701 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/show.py @@ -0,0 +1,189 @@ +import logging +from optparse import Values +from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.metadata import BaseDistribution, get_default_environment +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class ShowCommand(Command): + """ + Show information about one or more installed packages. + + The output is in RFC-compliant mail header format. + """ + + usage = """ + %prog [options] ...""" + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-f", + "--files", + dest="files", + action="store_true", + default=False, + help="Show the full list of installed files for each package.", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + if not args: + logger.warning("ERROR: Please provide a package name or names.") + return ERROR + query = args + + results = search_packages_info(query) + if not print_results( + results, list_files=options.files, verbose=options.verbose + ): + return ERROR + return SUCCESS + + +class _PackageInfo(NamedTuple): + name: str + version: str + location: str + editable_project_location: Optional[str] + requires: List[str] + required_by: List[str] + installer: str + metadata_version: str + classifiers: List[str] + summary: str + homepage: str + project_urls: List[str] + author: str + author_email: str + license: str + entry_points: List[str] + files: Optional[List[str]] + + +def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]: + """ + Gather details from installed distributions. Print distribution name, + version, location, and installed files. Installed files requires a + pip generated 'installed-files.txt' in the distributions '.egg-info' + directory. + """ + env = get_default_environment() + + installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()} + query_names = [canonicalize_name(name) for name in query] + missing = sorted( + [name for name, pkg in zip(query, query_names) if pkg not in installed] + ) + if missing: + logger.warning("Package(s) not found: %s", ", ".join(missing)) + + def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: + return ( + dist.metadata["Name"] or "UNKNOWN" + for dist in installed.values() + if current_dist.canonical_name + in {canonicalize_name(d.name) for d in dist.iter_dependencies()} + ) + + for query_name in query_names: + try: + dist = installed[query_name] + except KeyError: + continue + + requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower) + required_by = sorted(_get_requiring_packages(dist), key=str.lower) + + try: + entry_points_text = dist.read_text("entry_points.txt") + entry_points = entry_points_text.splitlines(keepends=False) + except FileNotFoundError: + entry_points = [] + + files_iter = dist.iter_declared_entries() + if files_iter is None: + files: Optional[List[str]] = None + else: + files = sorted(files_iter) + + metadata = dist.metadata + + yield _PackageInfo( + name=dist.raw_name, + version=str(dist.version), + location=dist.location or "", + editable_project_location=dist.editable_project_location, + requires=requires, + required_by=required_by, + installer=dist.installer, + metadata_version=dist.metadata_version or "", + classifiers=metadata.get_all("Classifier", []), + summary=metadata.get("Summary", ""), + homepage=metadata.get("Home-page", ""), + project_urls=metadata.get_all("Project-URL", []), + author=metadata.get("Author", ""), + author_email=metadata.get("Author-email", ""), + license=metadata.get("License", ""), + entry_points=entry_points, + files=files, + ) + + +def print_results( + distributions: Iterable[_PackageInfo], + list_files: bool, + verbose: bool, +) -> bool: + """ + Print the information from installed distributions found. + """ + results_printed = False + for i, dist in enumerate(distributions): + results_printed = True + if i > 0: + write_output("---") + + write_output("Name: %s", dist.name) + write_output("Version: %s", dist.version) + write_output("Summary: %s", dist.summary) + write_output("Home-page: %s", dist.homepage) + write_output("Author: %s", dist.author) + write_output("Author-email: %s", dist.author_email) + write_output("License: %s", dist.license) + write_output("Location: %s", dist.location) + if dist.editable_project_location is not None: + write_output( + "Editable project location: %s", dist.editable_project_location + ) + write_output("Requires: %s", ", ".join(dist.requires)) + write_output("Required-by: %s", ", ".join(dist.required_by)) + + if verbose: + write_output("Metadata-Version: %s", dist.metadata_version) + write_output("Installer: %s", dist.installer) + write_output("Classifiers:") + for classifier in dist.classifiers: + write_output(" %s", classifier) + write_output("Entry-points:") + for entry in dist.entry_points: + write_output(" %s", entry.strip()) + write_output("Project-URLs:") + for project_url in dist.project_urls: + write_output(" %s", project_url) + if list_files: + write_output("Files:") + if dist.files is None: + write_output("Cannot locate RECORD or installed-files.txt") + else: + for line in dist.files: + write_output(" %s", line.strip()) + return results_printed diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/uninstall.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/uninstall.py new file mode 100644 index 0000000..f198fc3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/uninstall.py @@ -0,0 +1,113 @@ +import logging +from optparse import Values +from typing import List + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import InstallationError +from pip._internal.req import parse_requirements +from pip._internal.req.constructors import ( + install_req_from_line, + install_req_from_parsed_requirement, +) +from pip._internal.utils.misc import ( + check_externally_managed, + protect_pip_from_modification_on_windows, +) + +logger = logging.getLogger(__name__) + + +class UninstallCommand(Command, SessionCommandMixin): + """ + Uninstall packages. + + pip is able to uninstall most installed packages. Known exceptions are: + + - Pure distutils packages installed with ``python setup.py install``, which + leave behind no metadata to determine what files were installed. + - Script wrappers installed by ``python setup.py develop``. + """ + + usage = """ + %prog [options] ... + %prog [options] -r ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help=( + "Uninstall all the packages listed in the given requirements " + "file. This option can be used multiple times." + ), + ) + self.cmd_opts.add_option( + "-y", + "--yes", + dest="yes", + action="store_true", + help="Don't ask for confirmation of uninstall deletions.", + ) + self.cmd_opts.add_option(cmdoptions.root_user_action()) + self.cmd_opts.add_option(cmdoptions.override_externally_managed()) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + session = self.get_default_session(options) + + reqs_to_uninstall = {} + for name in args: + req = install_req_from_line( + name, + isolated=options.isolated_mode, + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + else: + logger.warning( + "Invalid requirement: %r ignored -" + " the uninstall command expects named" + " requirements.", + name, + ) + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, options=options, session=session + ): + req = install_req_from_parsed_requirement( + parsed_req, isolated=options.isolated_mode + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + if not reqs_to_uninstall: + raise InstallationError( + f"You must give at least one requirement to {self.name} (see " + f'"pip help {self.name}")' + ) + + if not options.override_externally_managed: + check_externally_managed() + + protect_pip_from_modification_on_windows( + modifying_pip="pip" in reqs_to_uninstall + ) + + for req in reqs_to_uninstall.values(): + uninstall_pathset = req.uninstall( + auto_confirm=options.yes, + verbose=self.verbosity > 0, + ) + if uninstall_pathset: + uninstall_pathset.commit() + if options.root_user_action == "warn": + warn_if_run_as_root() + return SUCCESS diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/commands/wheel.py b/myenv/lib/python3.12/site-packages/pip/_internal/commands/wheel.py new file mode 100644 index 0000000..ed578aa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/commands/wheel.py @@ -0,0 +1,183 @@ +import logging +import os +import shutil +from optparse import Values +from typing import List + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.operations.build.build_tracker import get_build_tracker +from pip._internal.req.req_install import ( + InstallRequirement, + check_legacy_setup_py_options, +) +from pip._internal.utils.misc import ensure_dir, normalize_path +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.wheel_builder import build, should_build_for_wheel_command + +logger = logging.getLogger(__name__) + + +class WheelCommand(RequirementCommand): + """ + Build Wheel archives for your requirements and dependencies. + + Wheel is a built-package format, and offers the advantage of not + recompiling your software during every install. For more details, see the + wheel docs: https://wheel.readthedocs.io/en/latest/ + + 'pip wheel' uses the build system interface as described here: + https://pip.pypa.io/en/stable/reference/build-system/ + + """ + + usage = """ + %prog [options] ... + %prog [options] -r ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-w", + "--wheel-dir", + dest="wheel_dir", + metavar="dir", + default=os.curdir, + help=( + "Build wheels into , where the default is the " + "current working directory." + ), + ) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.check_build_deps()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + self.cmd_opts.add_option( + "--no-verify", + dest="no_verify", + action="store_true", + default=False, + help="Don't verify if built wheel is valid.", + ) + + self.cmd_opts.add_option(cmdoptions.config_settings()) + self.cmd_opts.add_option(cmdoptions.build_options()) + self.cmd_opts.add_option(cmdoptions.global_options()) + + self.cmd_opts.add_option( + "--pre", + action="store_true", + default=False, + help=( + "Include pre-release and development versions. By default, " + "pip only finds stable versions." + ), + ) + + self.cmd_opts.add_option(cmdoptions.require_hashes()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options: Values, args: List[str]) -> int: + session = self.get_default_session(options) + + finder = self._build_package_finder(options, session) + + options.wheel_dir = normalize_path(options.wheel_dir) + ensure_dir(options.wheel_dir) + + build_tracker = self.enter_context(get_build_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="wheel", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + check_legacy_setup_py_options(options, reqs) + + wheel_cache = WheelCache(options.cache_dir) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + build_tracker=build_tracker, + session=session, + finder=finder, + download_dir=options.wheel_dir, + use_user_site=False, + verbosity=self.verbosity, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + ignore_requires_python=options.ignore_requires_python, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + + reqs_to_build: List[InstallRequirement] = [] + for req in requirement_set.requirements.values(): + if req.is_wheel: + preparer.save_linked_requirement(req) + elif should_build_for_wheel_command(req): + reqs_to_build.append(req) + + preparer.prepare_linked_requirements_more(requirement_set.requirements.values()) + requirement_set.warn_legacy_versions_and_specifiers() + + # build wheels + build_successes, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=(not options.no_verify), + build_options=options.build_options or [], + global_options=options.global_options or [], + ) + for req in build_successes: + assert req.link and req.link.is_wheel + assert req.local_file_path + # copy from cache to target directory + try: + shutil.copy(req.local_file_path, options.wheel_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, + e, + ) + build_failures.append(req) + if len(build_failures) != 0: + raise CommandError("Failed to build one or more wheels") + + return SUCCESS diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/configuration.py b/myenv/lib/python3.12/site-packages/pip/_internal/configuration.py new file mode 100644 index 0000000..c25273d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/configuration.py @@ -0,0 +1,383 @@ +"""Configuration management setup + +Some terminology: +- name + As written in config files. +- value + Value associated with a name +- key + Name combined with it's section (section.name) +- variant + A single word describing where the configuration key-value pair came from +""" + +import configparser +import locale +import os +import sys +from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple + +from pip._internal.exceptions import ( + ConfigurationError, + ConfigurationFileCouldNotBeLoaded, +) +from pip._internal.utils import appdirs +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ensure_dir, enum + +RawConfigParser = configparser.RawConfigParser # Shorthand +Kind = NewType("Kind", str) + +CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf" +ENV_NAMES_IGNORED = "version", "help" + +# The kinds of configurations there are. +kinds = enum( + USER="user", # User Specific + GLOBAL="global", # System Wide + SITE="site", # [Virtual] Environment Specific + ENV="env", # from PIP_CONFIG_FILE + ENV_VAR="env-var", # from Environment Variables +) +OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR +VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE + +logger = getLogger(__name__) + + +# NOTE: Maybe use the optionx attribute to normalize keynames. +def _normalize_name(name: str) -> str: + """Make a name consistent regardless of source (environment or file)""" + name = name.lower().replace("_", "-") + if name.startswith("--"): + name = name[2:] # only prefer long opts + return name + + +def _disassemble_key(name: str) -> List[str]: + if "." not in name: + error_message = ( + "Key does not contain dot separated section and key. " + f"Perhaps you wanted to use 'global.{name}' instead?" + ) + raise ConfigurationError(error_message) + return name.split(".", 1) + + +def get_configuration_files() -> Dict[Kind, List[str]]: + global_config_files = [ + os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip") + ] + + site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) + legacy_config_file = os.path.join( + os.path.expanduser("~"), + "pip" if WINDOWS else ".pip", + CONFIG_BASENAME, + ) + new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME) + return { + kinds.GLOBAL: global_config_files, + kinds.SITE: [site_config_file], + kinds.USER: [legacy_config_file, new_config_file], + } + + +class Configuration: + """Handles management of configuration. + + Provides an interface to accessing and managing configuration files. + + This class converts provides an API that takes "section.key-name" style + keys and stores the value associated with it as "key-name" under the + section "section". + + This allows for a clean interface wherein the both the section and the + key-name are preserved in an easy to manage form in the configuration files + and the data stored is also nice. + """ + + def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None: + super().__init__() + + if load_only is not None and load_only not in VALID_LOAD_ONLY: + raise ConfigurationError( + "Got invalid value for load_only - should be one of {}".format( + ", ".join(map(repr, VALID_LOAD_ONLY)) + ) + ) + self.isolated = isolated + self.load_only = load_only + + # Because we keep track of where we got the data from + self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = { + variant: [] for variant in OVERRIDE_ORDER + } + self._config: Dict[Kind, Dict[str, Any]] = { + variant: {} for variant in OVERRIDE_ORDER + } + self._modified_parsers: List[Tuple[str, RawConfigParser]] = [] + + def load(self) -> None: + """Loads configuration from configuration files and environment""" + self._load_config_files() + if not self.isolated: + self._load_environment_vars() + + def get_file_to_edit(self) -> Optional[str]: + """Returns the file with highest priority in configuration""" + assert self.load_only is not None, "Need to be specified a file to be editing" + + try: + return self._get_parser_to_modify()[0] + except IndexError: + return None + + def items(self) -> Iterable[Tuple[str, Any]]: + """Returns key-value pairs like dict.items() representing the loaded + configuration + """ + return self._dictionary.items() + + def get_value(self, key: str) -> Any: + """Get a value from the configuration.""" + orig_key = key + key = _normalize_name(key) + try: + return self._dictionary[key] + except KeyError: + # disassembling triggers a more useful error message than simply + # "No such key" in the case that the key isn't in the form command.option + _disassemble_key(key) + raise ConfigurationError(f"No such key - {orig_key}") + + def set_value(self, key: str, value: Any) -> None: + """Modify a value in the configuration.""" + key = _normalize_name(key) + self._ensure_have_load_only() + + assert self.load_only + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + + # Modify the parser and the configuration + if not parser.has_section(section): + parser.add_section(section) + parser.set(section, name, value) + + self._config[self.load_only][key] = value + self._mark_as_modified(fname, parser) + + def unset_value(self, key: str) -> None: + """Unset a value in the configuration.""" + orig_key = key + key = _normalize_name(key) + self._ensure_have_load_only() + + assert self.load_only + if key not in self._config[self.load_only]: + raise ConfigurationError(f"No such key - {orig_key}") + + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + if not ( + parser.has_section(section) and parser.remove_option(section, name) + ): + # The option was not removed. + raise ConfigurationError( + "Fatal Internal error [id=1]. Please report as a bug." + ) + + # The section may be empty after the option was removed. + if not parser.items(section): + parser.remove_section(section) + self._mark_as_modified(fname, parser) + + del self._config[self.load_only][key] + + def save(self) -> None: + """Save the current in-memory state.""" + self._ensure_have_load_only() + + for fname, parser in self._modified_parsers: + logger.info("Writing to %s", fname) + + # Ensure directory exists. + ensure_dir(os.path.dirname(fname)) + + # Ensure directory's permission(need to be writeable) + try: + with open(fname, "w") as f: + parser.write(f) + except OSError as error: + raise ConfigurationError( + f"An error occurred while writing to the configuration file " + f"{fname}: {error}" + ) + + # + # Private routines + # + + def _ensure_have_load_only(self) -> None: + if self.load_only is None: + raise ConfigurationError("Needed a specific file to be modifying.") + logger.debug("Will be working with %s variant only", self.load_only) + + @property + def _dictionary(self) -> Dict[str, Any]: + """A dictionary representing the loaded configuration.""" + # NOTE: Dictionaries are not populated if not loaded. So, conditionals + # are not needed here. + retval = {} + + for variant in OVERRIDE_ORDER: + retval.update(self._config[variant]) + + return retval + + def _load_config_files(self) -> None: + """Loads configuration from configuration files""" + config_files = dict(self.iter_config_files()) + if config_files[kinds.ENV][0:1] == [os.devnull]: + logger.debug( + "Skipping loading configuration files due to " + "environment's PIP_CONFIG_FILE being os.devnull" + ) + return + + for variant, files in config_files.items(): + for fname in files: + # If there's specific variant set in `load_only`, load only + # that variant, not the others. + if self.load_only is not None and variant != self.load_only: + logger.debug("Skipping file '%s' (variant: %s)", fname, variant) + continue + + parser = self._load_file(variant, fname) + + # Keeping track of the parsers used + self._parsers[variant].append((fname, parser)) + + def _load_file(self, variant: Kind, fname: str) -> RawConfigParser: + logger.verbose("For variant '%s', will try loading '%s'", variant, fname) + parser = self._construct_parser(fname) + + for section in parser.sections(): + items = parser.items(section) + self._config[variant].update(self._normalized_keys(section, items)) + + return parser + + def _construct_parser(self, fname: str) -> RawConfigParser: + parser = configparser.RawConfigParser() + # If there is no such file, don't bother reading it but create the + # parser anyway, to hold the data. + # Doing this is useful when modifying and saving files, where we don't + # need to construct a parser. + if os.path.exists(fname): + locale_encoding = locale.getpreferredencoding(False) + try: + parser.read(fname, encoding=locale_encoding) + except UnicodeDecodeError: + # See https://github.com/pypa/pip/issues/4963 + raise ConfigurationFileCouldNotBeLoaded( + reason=f"contains invalid {locale_encoding} characters", + fname=fname, + ) + except configparser.Error as error: + # See https://github.com/pypa/pip/issues/4893 + raise ConfigurationFileCouldNotBeLoaded(error=error) + return parser + + def _load_environment_vars(self) -> None: + """Loads configuration from environment variables""" + self._config[kinds.ENV_VAR].update( + self._normalized_keys(":env:", self.get_environ_vars()) + ) + + def _normalized_keys( + self, section: str, items: Iterable[Tuple[str, Any]] + ) -> Dict[str, Any]: + """Normalizes items to construct a dictionary with normalized keys. + + This routine is where the names become keys and are made the same + regardless of source - configuration files or environment. + """ + normalized = {} + for name, val in items: + key = section + "." + _normalize_name(name) + normalized[key] = val + return normalized + + def get_environ_vars(self) -> Iterable[Tuple[str, str]]: + """Returns a generator with all environmental vars with prefix PIP_""" + for key, val in os.environ.items(): + if key.startswith("PIP_"): + name = key[4:].lower() + if name not in ENV_NAMES_IGNORED: + yield name, val + + # XXX: This is patched in the tests. + def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: + """Yields variant and configuration files associated with it. + + This should be treated like items of a dictionary. The order + here doesn't affect what gets overridden. That is controlled + by OVERRIDE_ORDER. However this does control the order they are + displayed to the user. It's probably most ergononmic to display + things in the same order as OVERRIDE_ORDER + """ + # SMELL: Move the conditions out of this function + + env_config_file = os.environ.get("PIP_CONFIG_FILE", None) + config_files = get_configuration_files() + + yield kinds.GLOBAL, config_files[kinds.GLOBAL] + + # per-user config is not loaded when env_config_file exists + should_load_user_config = not self.isolated and not ( + env_config_file and os.path.exists(env_config_file) + ) + if should_load_user_config: + # The legacy config file is overridden by the new config file + yield kinds.USER, config_files[kinds.USER] + + # virtualenv config + yield kinds.SITE, config_files[kinds.SITE] + + if env_config_file is not None: + yield kinds.ENV, [env_config_file] + else: + yield kinds.ENV, [] + + def get_values_in_config(self, variant: Kind) -> Dict[str, Any]: + """Get values present in a config file""" + return self._config[variant] + + def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]: + # Determine which parser to modify + assert self.load_only + parsers = self._parsers[self.load_only] + if not parsers: + # This should not happen if everything works correctly. + raise ConfigurationError( + "Fatal Internal error [id=2]. Please report as a bug." + ) + + # Use the highest priority parser. + return parsers[-1] + + # XXX: This is patched in the tests. + def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None: + file_parser_tuple = (fname, parser) + if file_parser_tuple not in self._modified_parsers: + self._modified_parsers.append(file_parser_tuple) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self._dictionary!r})" diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__init__.py new file mode 100644 index 0000000..9a89a83 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__init__.py @@ -0,0 +1,21 @@ +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.distributions.sdist import SourceDistribution +from pip._internal.distributions.wheel import WheelDistribution +from pip._internal.req.req_install import InstallRequirement + + +def make_distribution_for_install_requirement( + install_req: InstallRequirement, +) -> AbstractDistribution: + """Returns a Distribution for the given InstallRequirement""" + # Editable requirements will always be source distributions. They use the + # legacy logic until we create a modern standard for them. + if install_req.editable: + return SourceDistribution(install_req) + + # If it's a wheel, it's a WheelDistribution + if install_req.is_wheel: + return WheelDistribution(install_req) + + # Otherwise, a SourceDistribution + return SourceDistribution(install_req) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..e1dc46c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..4eb2f79 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-312.pyc new file mode 100644 index 0000000..530fbaa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc new file mode 100644 index 0000000..564badf Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..b17c76f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/distributions/base.py b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/base.py new file mode 100644 index 0000000..6fb0d7b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/base.py @@ -0,0 +1,51 @@ +import abc +from typing import Optional + +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata.base import BaseDistribution +from pip._internal.req import InstallRequirement + + +class AbstractDistribution(metaclass=abc.ABCMeta): + """A base class for handling installable artifacts. + + The requirements for anything installable are as follows: + + - we must be able to determine the requirement name + (or we can't correctly handle the non-upgrade case). + + - for packages with setup requirements, we must also be able + to determine their requirements without installing additional + packages (for the same reason as run-time dependencies) + + - we must be able to create a Distribution object exposing the + above metadata. + + - if we need to do work in the build tracker, we must be able to generate a unique + string to identify the requirement in the build tracker. + """ + + def __init__(self, req: InstallRequirement) -> None: + super().__init__() + self.req = req + + @abc.abstractproperty + def build_tracker_id(self) -> Optional[str]: + """A string that uniquely identifies this requirement to the build tracker. + + If None, then this dist has no work to do in the build tracker, and + ``.prepare_distribution_metadata()`` will not be called.""" + raise NotImplementedError() + + @abc.abstractmethod + def get_metadata_distribution(self) -> BaseDistribution: + raise NotImplementedError() + + @abc.abstractmethod + def prepare_distribution_metadata( + self, + finder: PackageFinder, + build_isolation: bool, + check_build_deps: bool, + ) -> None: + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/distributions/installed.py b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/installed.py new file mode 100644 index 0000000..ab8d53b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/installed.py @@ -0,0 +1,29 @@ +from typing import Optional + +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution + + +class InstalledDistribution(AbstractDistribution): + """Represents an installed package. + + This does not need any preparation as the required information has already + been computed. + """ + + @property + def build_tracker_id(self) -> Optional[str]: + return None + + def get_metadata_distribution(self) -> BaseDistribution: + assert self.req.satisfied_by is not None, "not actually installed" + return self.req.satisfied_by + + def prepare_distribution_metadata( + self, + finder: PackageFinder, + build_isolation: bool, + check_build_deps: bool, + ) -> None: + pass diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/distributions/sdist.py b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/sdist.py new file mode 100644 index 0000000..15ff42b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/sdist.py @@ -0,0 +1,156 @@ +import logging +from typing import Iterable, Optional, Set, Tuple + +from pip._internal.build_env import BuildEnvironment +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.exceptions import InstallationError +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +class SourceDistribution(AbstractDistribution): + """Represents a source distribution. + + The preparation step for these needs metadata for the packages to be + generated, either using PEP 517 or using the legacy `setup.py egg_info`. + """ + + @property + def build_tracker_id(self) -> Optional[str]: + """Identify this requirement uniquely by its link.""" + assert self.req.link + return self.req.link.url_without_fragment + + def get_metadata_distribution(self) -> BaseDistribution: + return self.req.get_dist() + + def prepare_distribution_metadata( + self, + finder: PackageFinder, + build_isolation: bool, + check_build_deps: bool, + ) -> None: + # Load pyproject.toml, to determine whether PEP 517 is to be used + self.req.load_pyproject_toml() + + # Set up the build isolation, if this requirement should be isolated + should_isolate = self.req.use_pep517 and build_isolation + if should_isolate: + # Setup an isolated environment and install the build backend static + # requirements in it. + self._prepare_build_backend(finder) + # Check that if the requirement is editable, it either supports PEP 660 or + # has a setup.py or a setup.cfg. This cannot be done earlier because we need + # to setup the build backend to verify it supports build_editable, nor can + # it be done later, because we want to avoid installing build requirements + # needlessly. Doing it here also works around setuptools generating + # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory + # without setup.py nor setup.cfg. + self.req.isolated_editable_sanity_check() + # Install the dynamic build requirements. + self._install_build_reqs(finder) + # Check if the current environment provides build dependencies + should_check_deps = self.req.use_pep517 and check_build_deps + if should_check_deps: + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + conflicting, missing = self.req.build_env.check_requirements( + pyproject_requires + ) + if conflicting: + self._raise_conflicts("the backend dependencies", conflicting) + if missing: + self._raise_missing_reqs(missing) + self.req.prepare_metadata() + + def _prepare_build_backend(self, finder: PackageFinder) -> None: + # Isolate in a BuildEnvironment and install the build-time + # requirements. + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + + self.req.build_env = BuildEnvironment() + self.req.build_env.install_requirements( + finder, pyproject_requires, "overlay", kind="build dependencies" + ) + conflicting, missing = self.req.build_env.check_requirements( + self.req.requirements_to_check + ) + if conflicting: + self._raise_conflicts("PEP 517/518 supported requirements", conflicting) + if missing: + logger.warning( + "Missing build requirements in pyproject.toml for %s.", + self.req, + ) + logger.warning( + "The project does not specify a build backend, and " + "pip cannot fall back to setuptools without %s.", + " and ".join(map(repr, sorted(missing))), + ) + + def _get_build_requires_wheel(self) -> Iterable[str]: + with self.req.build_env: + runner = runner_with_spinner_message("Getting requirements to build wheel") + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + return backend.get_requires_for_build_wheel() + + def _get_build_requires_editable(self) -> Iterable[str]: + with self.req.build_env: + runner = runner_with_spinner_message( + "Getting requirements to build editable" + ) + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + return backend.get_requires_for_build_editable() + + def _install_build_reqs(self, finder: PackageFinder) -> None: + # Install any extra build dependencies that the backend requests. + # This must be done in a second pass, as the pyproject.toml + # dependencies must be installed before we can call the backend. + if ( + self.req.editable + and self.req.permit_editable_wheels + and self.req.supports_pyproject_editable() + ): + build_reqs = self._get_build_requires_editable() + else: + build_reqs = self._get_build_requires_wheel() + conflicting, missing = self.req.build_env.check_requirements(build_reqs) + if conflicting: + self._raise_conflicts("the backend dependencies", conflicting) + self.req.build_env.install_requirements( + finder, missing, "normal", kind="backend dependencies" + ) + + def _raise_conflicts( + self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]] + ) -> None: + format_string = ( + "Some build dependencies for {requirement} " + "conflict with {conflicting_with}: {description}." + ) + error_message = format_string.format( + requirement=self.req, + conflicting_with=conflicting_with, + description=", ".join( + f"{installed} is incompatible with {wanted}" + for installed, wanted in sorted(conflicting_reqs) + ), + ) + raise InstallationError(error_message) + + def _raise_missing_reqs(self, missing: Set[str]) -> None: + format_string = ( + "Some build dependencies for {requirement} are missing: {missing}." + ) + error_message = format_string.format( + requirement=self.req, missing=", ".join(map(repr, sorted(missing))) + ) + raise InstallationError(error_message) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/distributions/wheel.py b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/wheel.py new file mode 100644 index 0000000..eb16e25 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/distributions/wheel.py @@ -0,0 +1,40 @@ +from typing import Optional + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import ( + BaseDistribution, + FilesystemWheel, + get_wheel_distribution, +) + + +class WheelDistribution(AbstractDistribution): + """Represents a wheel distribution. + + This does not need any preparation as wheels can be directly unpacked. + """ + + @property + def build_tracker_id(self) -> Optional[str]: + return None + + def get_metadata_distribution(self) -> BaseDistribution: + """Loads the metadata from the wheel file into memory and returns a + Distribution that uses it, not relying on the wheel file or + requirement. + """ + assert self.req.local_file_path, "Set as part of preparation during download" + assert self.req.name, "Wheels are never unnamed" + wheel = FilesystemWheel(self.req.local_file_path) + return get_wheel_distribution(wheel, canonicalize_name(self.req.name)) + + def prepare_distribution_metadata( + self, + finder: PackageFinder, + build_isolation: bool, + check_build_deps: bool, + ) -> None: + pass diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/exceptions.py b/myenv/lib/python3.12/site-packages/pip/_internal/exceptions.py new file mode 100644 index 0000000..5007a62 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/exceptions.py @@ -0,0 +1,728 @@ +"""Exceptions used throughout package. + +This module MUST NOT try to import from anything within `pip._internal` to +operate. This is expected to be importable from any/all files within the +subpackage and, thus, should not depend on them. +""" + +import configparser +import contextlib +import locale +import logging +import pathlib +import re +import sys +from itertools import chain, groupby, repeat +from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Union + +from pip._vendor.requests.models import Request, Response +from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult +from pip._vendor.rich.markup import escape +from pip._vendor.rich.text import Text + +if TYPE_CHECKING: + from hashlib import _Hash + from typing import Literal + + from pip._internal.metadata import BaseDistribution + from pip._internal.req.req_install import InstallRequirement + +logger = logging.getLogger(__name__) + + +# +# Scaffolding +# +def _is_kebab_case(s: str) -> bool: + return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None + + +def _prefix_with_indent( + s: Union[Text, str], + console: Console, + *, + prefix: str, + indent: str, +) -> Text: + if isinstance(s, Text): + text = s + else: + text = console.render_str(s) + + return console.render_str(prefix, overflow="ignore") + console.render_str( + f"\n{indent}", overflow="ignore" + ).join(text.split(allow_blank=True)) + + +class PipError(Exception): + """The base pip error.""" + + +class DiagnosticPipError(PipError): + """An error, that presents diagnostic information to the user. + + This contains a bunch of logic, to enable pretty presentation of our error + messages. Each error gets a unique reference. Each error can also include + additional context, a hint and/or a note -- which are presented with the + main error message in a consistent style. + + This is adapted from the error output styling in `sphinx-theme-builder`. + """ + + reference: str + + def __init__( + self, + *, + kind: 'Literal["error", "warning"]' = "error", + reference: Optional[str] = None, + message: Union[str, Text], + context: Optional[Union[str, Text]], + hint_stmt: Optional[Union[str, Text]], + note_stmt: Optional[Union[str, Text]] = None, + link: Optional[str] = None, + ) -> None: + # Ensure a proper reference is provided. + if reference is None: + assert hasattr(self, "reference"), "error reference not provided!" + reference = self.reference + assert _is_kebab_case(reference), "error reference must be kebab-case!" + + self.kind = kind + self.reference = reference + + self.message = message + self.context = context + + self.note_stmt = note_stmt + self.hint_stmt = hint_stmt + + self.link = link + + super().__init__(f"<{self.__class__.__name__}: {self.reference}>") + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__}(" + f"reference={self.reference!r}, " + f"message={self.message!r}, " + f"context={self.context!r}, " + f"note_stmt={self.note_stmt!r}, " + f"hint_stmt={self.hint_stmt!r}" + ")>" + ) + + def __rich_console__( + self, + console: Console, + options: ConsoleOptions, + ) -> RenderResult: + colour = "red" if self.kind == "error" else "yellow" + + yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]" + yield "" + + if not options.ascii_only: + # Present the main message, with relevant context indented. + if self.context is not None: + yield _prefix_with_indent( + self.message, + console, + prefix=f"[{colour}]×[/] ", + indent=f"[{colour}]│[/] ", + ) + yield _prefix_with_indent( + self.context, + console, + prefix=f"[{colour}]╰─>[/] ", + indent=f"[{colour}] [/] ", + ) + else: + yield _prefix_with_indent( + self.message, + console, + prefix="[red]×[/] ", + indent=" ", + ) + else: + yield self.message + if self.context is not None: + yield "" + yield self.context + + if self.note_stmt is not None or self.hint_stmt is not None: + yield "" + + if self.note_stmt is not None: + yield _prefix_with_indent( + self.note_stmt, + console, + prefix="[magenta bold]note[/]: ", + indent=" ", + ) + if self.hint_stmt is not None: + yield _prefix_with_indent( + self.hint_stmt, + console, + prefix="[cyan bold]hint[/]: ", + indent=" ", + ) + + if self.link is not None: + yield "" + yield f"Link: {self.link}" + + +# +# Actual Errors +# +class ConfigurationError(PipError): + """General exception in configuration""" + + +class InstallationError(PipError): + """General exception during installation""" + + +class UninstallationError(PipError): + """General exception during uninstallation""" + + +class MissingPyProjectBuildRequires(DiagnosticPipError): + """Raised when pyproject.toml has `build-system`, but no `build-system.requires`.""" + + reference = "missing-pyproject-build-system-requires" + + def __init__(self, *, package: str) -> None: + super().__init__( + message=f"Can not process {escape(package)}", + context=Text( + "This package has an invalid pyproject.toml file.\n" + "The [build-system] table is missing the mandatory `requires` key." + ), + note_stmt="This is an issue with the package mentioned above, not pip.", + hint_stmt=Text("See PEP 518 for the detailed specification."), + ) + + +class InvalidPyProjectBuildRequires(DiagnosticPipError): + """Raised when pyproject.toml an invalid `build-system.requires`.""" + + reference = "invalid-pyproject-build-system-requires" + + def __init__(self, *, package: str, reason: str) -> None: + super().__init__( + message=f"Can not process {escape(package)}", + context=Text( + "This package has an invalid `build-system.requires` key in " + f"pyproject.toml.\n{reason}" + ), + note_stmt="This is an issue with the package mentioned above, not pip.", + hint_stmt=Text("See PEP 518 for the detailed specification."), + ) + + +class NoneMetadataError(PipError): + """Raised when accessing a Distribution's "METADATA" or "PKG-INFO". + + This signifies an inconsistency, when the Distribution claims to have + the metadata file (if not, raise ``FileNotFoundError`` instead), but is + not actually able to produce its content. This may be due to permission + errors. + """ + + def __init__( + self, + dist: "BaseDistribution", + metadata_name: str, + ) -> None: + """ + :param dist: A Distribution object. + :param metadata_name: The name of the metadata being accessed + (can be "METADATA" or "PKG-INFO"). + """ + self.dist = dist + self.metadata_name = metadata_name + + def __str__(self) -> str: + # Use `dist` in the error message because its stringification + # includes more information, like the version and location. + return f"None {self.metadata_name} metadata found for distribution: {self.dist}" + + +class UserInstallationInvalid(InstallationError): + """A --user install is requested on an environment without user site.""" + + def __str__(self) -> str: + return "User base directory is not specified" + + +class InvalidSchemeCombination(InstallationError): + def __str__(self) -> str: + before = ", ".join(str(a) for a in self.args[:-1]) + return f"Cannot set {before} and {self.args[-1]} together" + + +class DistributionNotFound(InstallationError): + """Raised when a distribution cannot be found to satisfy a requirement""" + + +class RequirementsFileParseError(InstallationError): + """Raised when a general error occurs parsing a requirements file line.""" + + +class BestVersionAlreadyInstalled(PipError): + """Raised when the most up-to-date version of a package is already + installed.""" + + +class BadCommand(PipError): + """Raised when virtualenv or a command is not found""" + + +class CommandError(PipError): + """Raised when there is an error in command-line arguments""" + + +class PreviousBuildDirError(PipError): + """Raised when there's a previous conflicting build directory""" + + +class NetworkConnectionError(PipError): + """HTTP connection error""" + + def __init__( + self, + error_msg: str, + response: Optional[Response] = None, + request: Optional[Request] = None, + ) -> None: + """ + Initialize NetworkConnectionError with `request` and `response` + objects. + """ + self.response = response + self.request = request + self.error_msg = error_msg + if ( + self.response is not None + and not self.request + and hasattr(response, "request") + ): + self.request = self.response.request + super().__init__(error_msg, response, request) + + def __str__(self) -> str: + return str(self.error_msg) + + +class InvalidWheelFilename(InstallationError): + """Invalid wheel filename.""" + + +class UnsupportedWheel(InstallationError): + """Unsupported wheel.""" + + +class InvalidWheel(InstallationError): + """Invalid (e.g. corrupt) wheel.""" + + def __init__(self, location: str, name: str): + self.location = location + self.name = name + + def __str__(self) -> str: + return f"Wheel '{self.name}' located at {self.location} is invalid." + + +class MetadataInconsistent(InstallationError): + """Built metadata contains inconsistent information. + + This is raised when the metadata contains values (e.g. name and version) + that do not match the information previously obtained from sdist filename, + user-supplied ``#egg=`` value, or an install requirement name. + """ + + def __init__( + self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str + ) -> None: + self.ireq = ireq + self.field = field + self.f_val = f_val + self.m_val = m_val + + def __str__(self) -> str: + return ( + f"Requested {self.ireq} has inconsistent {self.field}: " + f"expected {self.f_val!r}, but metadata has {self.m_val!r}" + ) + + +class InstallationSubprocessError(DiagnosticPipError, InstallationError): + """A subprocess call failed.""" + + reference = "subprocess-exited-with-error" + + def __init__( + self, + *, + command_description: str, + exit_code: int, + output_lines: Optional[List[str]], + ) -> None: + if output_lines is None: + output_prompt = Text("See above for output.") + else: + output_prompt = ( + Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n") + + Text("".join(output_lines)) + + Text.from_markup(R"[red]\[end of output][/]") + ) + + super().__init__( + message=( + f"[green]{escape(command_description)}[/] did not run successfully.\n" + f"exit code: {exit_code}" + ), + context=output_prompt, + hint_stmt=None, + note_stmt=( + "This error originates from a subprocess, and is likely not a " + "problem with pip." + ), + ) + + self.command_description = command_description + self.exit_code = exit_code + + def __str__(self) -> str: + return f"{self.command_description} exited with {self.exit_code}" + + +class MetadataGenerationFailed(InstallationSubprocessError, InstallationError): + reference = "metadata-generation-failed" + + def __init__( + self, + *, + package_details: str, + ) -> None: + super(InstallationSubprocessError, self).__init__( + message="Encountered error while generating package metadata.", + context=escape(package_details), + hint_stmt="See above for details.", + note_stmt="This is an issue with the package mentioned above, not pip.", + ) + + def __str__(self) -> str: + return "metadata generation failed" + + +class HashErrors(InstallationError): + """Multiple HashError instances rolled into one for reporting""" + + def __init__(self) -> None: + self.errors: List["HashError"] = [] + + def append(self, error: "HashError") -> None: + self.errors.append(error) + + def __str__(self) -> str: + lines = [] + self.errors.sort(key=lambda e: e.order) + for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): + lines.append(cls.head) + lines.extend(e.body() for e in errors_of_cls) + if lines: + return "\n".join(lines) + return "" + + def __bool__(self) -> bool: + return bool(self.errors) + + +class HashError(InstallationError): + """ + A failure to verify a package against known-good hashes + + :cvar order: An int sorting hash exception classes by difficulty of + recovery (lower being harder), so the user doesn't bother fretting + about unpinned packages when he has deeper issues, like VCS + dependencies, to deal with. Also keeps error reports in a + deterministic order. + :cvar head: A section heading for display above potentially many + exceptions of this kind + :ivar req: The InstallRequirement that triggered this error. This is + pasted on after the exception is instantiated, because it's not + typically available earlier. + + """ + + req: Optional["InstallRequirement"] = None + head = "" + order: int = -1 + + def body(self) -> str: + """Return a summary of me for display under the heading. + + This default implementation simply prints a description of the + triggering requirement. + + :param req: The InstallRequirement that provoked this error, with + its link already populated by the resolver's _populate_link(). + + """ + return f" {self._requirement_name()}" + + def __str__(self) -> str: + return f"{self.head}\n{self.body()}" + + def _requirement_name(self) -> str: + """Return a description of the requirement that triggered me. + + This default implementation returns long description of the req, with + line numbers + + """ + return str(self.req) if self.req else "unknown package" + + +class VcsHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 0 + head = ( + "Can't verify hashes for these requirements because we don't " + "have a way to hash version control repositories:" + ) + + +class DirectoryUrlHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 1 + head = ( + "Can't verify hashes for these file:// requirements because they " + "point to directories:" + ) + + +class HashMissing(HashError): + """A hash was needed for a requirement but is absent.""" + + order = 2 + head = ( + "Hashes are required in --require-hashes mode, but they are " + "missing from some requirements. Here is a list of those " + "requirements along with the hashes their downloaded archives " + "actually had. Add lines like these to your requirements files to " + "prevent tampering. (If you did not enable --require-hashes " + "manually, note that it turns on automatically when any package " + "has a hash.)" + ) + + def __init__(self, gotten_hash: str) -> None: + """ + :param gotten_hash: The hash of the (possibly malicious) archive we + just downloaded + """ + self.gotten_hash = gotten_hash + + def body(self) -> str: + # Dodge circular import. + from pip._internal.utils.hashes import FAVORITE_HASH + + package = None + if self.req: + # In the case of URL-based requirements, display the original URL + # seen in the requirements file rather than the package name, + # so the output can be directly copied into the requirements file. + package = ( + self.req.original_link + if self.req.is_direct + # In case someone feeds something downright stupid + # to InstallRequirement's constructor. + else getattr(self.req, "req", None) + ) + return " {} --hash={}:{}".format( + package or "unknown package", FAVORITE_HASH, self.gotten_hash + ) + + +class HashUnpinned(HashError): + """A requirement had a hash specified but was not pinned to a specific + version.""" + + order = 3 + head = ( + "In --require-hashes mode, all requirements must have their " + "versions pinned with ==. These do not:" + ) + + +class HashMismatch(HashError): + """ + Distribution file hash values don't match. + + :ivar package_name: The name of the package that triggered the hash + mismatch. Feel free to write to this after the exception is raise to + improve its error message. + + """ + + order = 4 + head = ( + "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS " + "FILE. If you have updated the package versions, please update " + "the hashes. Otherwise, examine the package contents carefully; " + "someone may have tampered with them." + ) + + def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None: + """ + :param allowed: A dict of algorithm names pointing to lists of allowed + hex digests + :param gots: A dict of algorithm names pointing to hashes we + actually got from the files under suspicion + """ + self.allowed = allowed + self.gots = gots + + def body(self) -> str: + return f" {self._requirement_name()}:\n{self._hash_comparison()}" + + def _hash_comparison(self) -> str: + """ + Return a comparison of actual and expected hash values. + + Example:: + + Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde + or 123451234512345123451234512345123451234512345 + Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef + + """ + + def hash_then_or(hash_name: str) -> "chain[str]": + # For now, all the decent hashes have 6-char names, so we can get + # away with hard-coding space literals. + return chain([hash_name], repeat(" or")) + + lines: List[str] = [] + for hash_name, expecteds in self.allowed.items(): + prefix = hash_then_or(hash_name) + lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds) + lines.append( + f" Got {self.gots[hash_name].hexdigest()}\n" + ) + return "\n".join(lines) + + +class UnsupportedPythonVersion(InstallationError): + """Unsupported python version according to Requires-Python package + metadata.""" + + +class ConfigurationFileCouldNotBeLoaded(ConfigurationError): + """When there are errors while loading a configuration file""" + + def __init__( + self, + reason: str = "could not be loaded", + fname: Optional[str] = None, + error: Optional[configparser.Error] = None, + ) -> None: + super().__init__(error) + self.reason = reason + self.fname = fname + self.error = error + + def __str__(self) -> str: + if self.fname is not None: + message_part = f" in {self.fname}." + else: + assert self.error is not None + message_part = f".\n{self.error}\n" + return f"Configuration file {self.reason}{message_part}" + + +_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\ +The Python environment under {sys.prefix} is managed externally, and may not be +manipulated by the user. Please use specific tooling from the distributor of +the Python installation to interact with this environment instead. +""" + + +class ExternallyManagedEnvironment(DiagnosticPipError): + """The current environment is externally managed. + + This is raised when the current environment is externally managed, as + defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked + and displayed when the error is bubbled up to the user. + + :param error: The error message read from ``EXTERNALLY-MANAGED``. + """ + + reference = "externally-managed-environment" + + def __init__(self, error: Optional[str]) -> None: + if error is None: + context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR) + else: + context = Text(error) + super().__init__( + message="This environment is externally managed", + context=context, + note_stmt=( + "If you believe this is a mistake, please contact your " + "Python installation or OS distribution provider. " + "You can override this, at the risk of breaking your Python " + "installation or OS, by passing --break-system-packages." + ), + hint_stmt=Text("See PEP 668 for the detailed specification."), + ) + + @staticmethod + def _iter_externally_managed_error_keys() -> Iterator[str]: + # LC_MESSAGES is in POSIX, but not the C standard. The most common + # platform that does not implement this category is Windows, where + # using other categories for console message localization is equally + # unreliable, so we fall back to the locale-less vendor message. This + # can always be re-evaluated when a vendor proposes a new alternative. + try: + category = locale.LC_MESSAGES + except AttributeError: + lang: Optional[str] = None + else: + lang, _ = locale.getlocale(category) + if lang is not None: + yield f"Error-{lang}" + for sep in ("-", "_"): + before, found, _ = lang.partition(sep) + if not found: + continue + yield f"Error-{before}" + yield "Error" + + @classmethod + def from_config( + cls, + config: Union[pathlib.Path, str], + ) -> "ExternallyManagedEnvironment": + parser = configparser.ConfigParser(interpolation=None) + try: + parser.read(config, encoding="utf-8") + section = parser["externally-managed"] + for key in cls._iter_externally_managed_error_keys(): + with contextlib.suppress(KeyError): + return cls(section[key]) + except KeyError: + pass + except (OSError, UnicodeDecodeError, configparser.ParsingError): + from pip._internal.utils._log import VERBOSE + + exc_info = logger.isEnabledFor(VERBOSE) + logger.warning("Failed to read %s", config, exc_info=exc_info) + return cls(None) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/index/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/index/__init__.py new file mode 100644 index 0000000..7a17b7b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/index/__init__.py @@ -0,0 +1,2 @@ +"""Index interaction code +""" diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c31c25d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/collector.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/collector.cpython-312.pyc new file mode 100644 index 0000000..6e678e0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/collector.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-312.pyc new file mode 100644 index 0000000..ec151e3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/sources.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/sources.cpython-312.pyc new file mode 100644 index 0000000..0df9868 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/sources.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/index/collector.py b/myenv/lib/python3.12/site-packages/pip/_internal/index/collector.py new file mode 100644 index 0000000..08c8bdd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/index/collector.py @@ -0,0 +1,507 @@ +""" +The main purpose of this module is to expose LinkCollector.collect_sources(). +""" + +import collections +import email.message +import functools +import itertools +import json +import logging +import os +import urllib.parse +import urllib.request +from html.parser import HTMLParser +from optparse import Values +from typing import ( + TYPE_CHECKING, + Callable, + Dict, + Iterable, + List, + MutableMapping, + NamedTuple, + Optional, + Sequence, + Tuple, + Union, +) + +from pip._vendor import requests +from pip._vendor.requests import Response +from pip._vendor.requests.exceptions import RetryError, SSLError + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.filetypes import is_archive_file +from pip._internal.utils.misc import redact_auth_from_url +from pip._internal.vcs import vcs + +from .sources import CandidatesFromPage, LinkSource, build_source + +if TYPE_CHECKING: + from typing import Protocol +else: + Protocol = object + +logger = logging.getLogger(__name__) + +ResponseHeaders = MutableMapping[str, str] + + +def _match_vcs_scheme(url: str) -> Optional[str]: + """Look for VCS schemes in the URL. + + Returns the matched VCS scheme, or None if there's no match. + """ + for scheme in vcs.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in "+:": + return scheme + return None + + +class _NotAPIContent(Exception): + def __init__(self, content_type: str, request_desc: str) -> None: + super().__init__(content_type, request_desc) + self.content_type = content_type + self.request_desc = request_desc + + +def _ensure_api_header(response: Response) -> None: + """ + Check the Content-Type header to ensure the response contains a Simple + API Response. + + Raises `_NotAPIContent` if the content type is not a valid content-type. + """ + content_type = response.headers.get("Content-Type", "Unknown") + + content_type_l = content_type.lower() + if content_type_l.startswith( + ( + "text/html", + "application/vnd.pypi.simple.v1+html", + "application/vnd.pypi.simple.v1+json", + ) + ): + return + + raise _NotAPIContent(content_type, response.request.method) + + +class _NotHTTP(Exception): + pass + + +def _ensure_api_response(url: str, session: PipSession) -> None: + """ + Send a HEAD request to the URL, and ensure the response contains a simple + API Response. + + Raises `_NotHTTP` if the URL is not available for a HEAD request, or + `_NotAPIContent` if the content type is not a valid content type. + """ + scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url) + if scheme not in {"http", "https"}: + raise _NotHTTP() + + resp = session.head(url, allow_redirects=True) + raise_for_status(resp) + + _ensure_api_header(resp) + + +def _get_simple_response(url: str, session: PipSession) -> Response: + """Access an Simple API response with GET, and return the response. + + This consists of three parts: + + 1. If the URL looks suspiciously like an archive, send a HEAD first to + check the Content-Type is HTML or Simple API, to avoid downloading a + large file. Raise `_NotHTTP` if the content type cannot be determined, or + `_NotAPIContent` if it is not HTML or a Simple API. + 2. Actually perform the request. Raise HTTP exceptions on network failures. + 3. Check the Content-Type header to make sure we got a Simple API response, + and raise `_NotAPIContent` otherwise. + """ + if is_archive_file(Link(url).filename): + _ensure_api_response(url, session=session) + + logger.debug("Getting page %s", redact_auth_from_url(url)) + + resp = session.get( + url, + headers={ + "Accept": ", ".join( + [ + "application/vnd.pypi.simple.v1+json", + "application/vnd.pypi.simple.v1+html; q=0.1", + "text/html; q=0.01", + ] + ), + # We don't want to blindly returned cached data for + # /simple/, because authors generally expecting that + # twine upload && pip install will function, but if + # they've done a pip install in the last ~10 minutes + # it won't. Thus by setting this to zero we will not + # blindly use any cached data, however the benefit of + # using max-age=0 instead of no-cache, is that we will + # still support conditional requests, so we will still + # minimize traffic sent in cases where the page hasn't + # changed at all, we will just always incur the round + # trip for the conditional GET now instead of only + # once per 10 minutes. + # For more information, please see pypa/pip#5670. + "Cache-Control": "max-age=0", + }, + ) + raise_for_status(resp) + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is a + # Simple API response or not. However we can check after we've + # downloaded it. + _ensure_api_header(resp) + + logger.debug( + "Fetched page %s as %s", + redact_auth_from_url(url), + resp.headers.get("Content-Type", "Unknown"), + ) + + return resp + + +def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]: + """Determine if we have any encoding information in our headers.""" + if headers and "Content-Type" in headers: + m = email.message.Message() + m["content-type"] = headers["Content-Type"] + charset = m.get_param("charset") + if charset: + return str(charset) + return None + + +class CacheablePageContent: + def __init__(self, page: "IndexContent") -> None: + assert page.cache_link_parsing + self.page = page + + def __eq__(self, other: object) -> bool: + return isinstance(other, type(self)) and self.page.url == other.page.url + + def __hash__(self) -> int: + return hash(self.page.url) + + +class ParseLinks(Protocol): + def __call__(self, page: "IndexContent") -> Iterable[Link]: + ... + + +def with_cached_index_content(fn: ParseLinks) -> ParseLinks: + """ + Given a function that parses an Iterable[Link] from an IndexContent, cache the + function's result (keyed by CacheablePageContent), unless the IndexContent + `page` has `page.cache_link_parsing == False`. + """ + + @functools.lru_cache(maxsize=None) + def wrapper(cacheable_page: CacheablePageContent) -> List[Link]: + return list(fn(cacheable_page.page)) + + @functools.wraps(fn) + def wrapper_wrapper(page: "IndexContent") -> List[Link]: + if page.cache_link_parsing: + return wrapper(CacheablePageContent(page)) + return list(fn(page)) + + return wrapper_wrapper + + +@with_cached_index_content +def parse_links(page: "IndexContent") -> Iterable[Link]: + """ + Parse a Simple API's Index Content, and yield its anchor elements as Link objects. + """ + + content_type_l = page.content_type.lower() + if content_type_l.startswith("application/vnd.pypi.simple.v1+json"): + data = json.loads(page.content) + for file in data.get("files", []): + link = Link.from_json(file, page.url) + if link is None: + continue + yield link + return + + parser = HTMLLinkParser(page.url) + encoding = page.encoding or "utf-8" + parser.feed(page.content.decode(encoding)) + + url = page.url + base_url = parser.base_url or url + for anchor in parser.anchors: + link = Link.from_element(anchor, page_url=url, base_url=base_url) + if link is None: + continue + yield link + + +class IndexContent: + """Represents one response (or page), along with its URL""" + + def __init__( + self, + content: bytes, + content_type: str, + encoding: Optional[str], + url: str, + cache_link_parsing: bool = True, + ) -> None: + """ + :param encoding: the encoding to decode the given content. + :param url: the URL from which the HTML was downloaded. + :param cache_link_parsing: whether links parsed from this page's url + should be cached. PyPI index urls should + have this set to False, for example. + """ + self.content = content + self.content_type = content_type + self.encoding = encoding + self.url = url + self.cache_link_parsing = cache_link_parsing + + def __str__(self) -> str: + return redact_auth_from_url(self.url) + + +class HTMLLinkParser(HTMLParser): + """ + HTMLParser that keeps the first base HREF and a list of all anchor + elements' attributes. + """ + + def __init__(self, url: str) -> None: + super().__init__(convert_charrefs=True) + + self.url: str = url + self.base_url: Optional[str] = None + self.anchors: List[Dict[str, Optional[str]]] = [] + + def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None: + if tag == "base" and self.base_url is None: + href = self.get_href(attrs) + if href is not None: + self.base_url = href + elif tag == "a": + self.anchors.append(dict(attrs)) + + def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]: + for name, value in attrs: + if name == "href": + return value + return None + + +def _handle_get_simple_fail( + link: Link, + reason: Union[str, Exception], + meth: Optional[Callable[..., None]] = None, +) -> None: + if meth is None: + meth = logger.debug + meth("Could not fetch URL %s: %s - skipping", link, reason) + + +def _make_index_content( + response: Response, cache_link_parsing: bool = True +) -> IndexContent: + encoding = _get_encoding_from_headers(response.headers) + return IndexContent( + response.content, + response.headers["Content-Type"], + encoding=encoding, + url=response.url, + cache_link_parsing=cache_link_parsing, + ) + + +def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]: + url = link.url.split("#", 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + vcs_scheme = _match_vcs_scheme(url) + if vcs_scheme: + logger.warning( + "Cannot look at %s URL %s because it does not support lookup as web pages.", + vcs_scheme, + link, + ) + return None + + # Tack index.html onto file:// URLs that point to directories + scheme, _, path, _, _, _ = urllib.parse.urlparse(url) + if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith("/"): + url += "/" + # TODO: In the future, it would be nice if pip supported PEP 691 + # style responses in the file:// URLs, however there's no + # standard file extension for application/vnd.pypi.simple.v1+json + # so we'll need to come up with something on our own. + url = urllib.parse.urljoin(url, "index.html") + logger.debug(" file: URL is directory, getting %s", url) + + try: + resp = _get_simple_response(url, session=session) + except _NotHTTP: + logger.warning( + "Skipping page %s because it looks like an archive, and cannot " + "be checked by a HTTP HEAD request.", + link, + ) + except _NotAPIContent as exc: + logger.warning( + "Skipping page %s because the %s request got Content-Type: %s. " + "The only supported Content-Types are application/vnd.pypi.simple.v1+json, " + "application/vnd.pypi.simple.v1+html, and text/html", + link, + exc.request_desc, + exc.content_type, + ) + except NetworkConnectionError as exc: + _handle_get_simple_fail(link, exc) + except RetryError as exc: + _handle_get_simple_fail(link, exc) + except SSLError as exc: + reason = "There was a problem confirming the ssl certificate: " + reason += str(exc) + _handle_get_simple_fail(link, reason, meth=logger.info) + except requests.ConnectionError as exc: + _handle_get_simple_fail(link, f"connection error: {exc}") + except requests.Timeout: + _handle_get_simple_fail(link, "timed out") + else: + return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing) + return None + + +class CollectedSources(NamedTuple): + find_links: Sequence[Optional[LinkSource]] + index_urls: Sequence[Optional[LinkSource]] + + +class LinkCollector: + + """ + Responsible for collecting Link objects from all configured locations, + making network requests as needed. + + The class's main method is its collect_sources() method. + """ + + def __init__( + self, + session: PipSession, + search_scope: SearchScope, + ) -> None: + self.search_scope = search_scope + self.session = session + + @classmethod + def create( + cls, + session: PipSession, + options: Values, + suppress_no_index: bool = False, + ) -> "LinkCollector": + """ + :param session: The Session to use to make requests. + :param suppress_no_index: Whether to ignore the --no-index option + when constructing the SearchScope object. + """ + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index and not suppress_no_index: + logger.debug( + "Ignoring indexes: %s", + ",".join(redact_auth_from_url(url) for url in index_urls), + ) + index_urls = [] + + # Make sure find_links is a list before passing to create(). + find_links = options.find_links or [] + + search_scope = SearchScope.create( + find_links=find_links, + index_urls=index_urls, + no_index=options.no_index, + ) + link_collector = LinkCollector( + session=session, + search_scope=search_scope, + ) + return link_collector + + @property + def find_links(self) -> List[str]: + return self.search_scope.find_links + + def fetch_response(self, location: Link) -> Optional[IndexContent]: + """ + Fetch an HTML page containing package links. + """ + return _get_index_content(location, session=self.session) + + def collect_sources( + self, + project_name: str, + candidates_from_page: CandidatesFromPage, + ) -> CollectedSources: + # The OrderedDict calls deduplicate sources by URL. + index_url_sources = collections.OrderedDict( + build_source( + loc, + candidates_from_page=candidates_from_page, + page_validator=self.session.is_secure_origin, + expand_dir=False, + cache_link_parsing=False, + project_name=project_name, + ) + for loc in self.search_scope.get_index_urls_locations(project_name) + ).values() + find_links_sources = collections.OrderedDict( + build_source( + loc, + candidates_from_page=candidates_from_page, + page_validator=self.session.is_secure_origin, + expand_dir=True, + cache_link_parsing=True, + project_name=project_name, + ) + for loc in self.find_links + ).values() + + if logger.isEnabledFor(logging.DEBUG): + lines = [ + f"* {s.link}" + for s in itertools.chain(find_links_sources, index_url_sources) + if s is not None and s.link is not None + ] + lines = [ + f"{len(lines)} location(s) to search " + f"for versions of {project_name}:" + ] + lines + logger.debug("\n".join(lines)) + + return CollectedSources( + find_links=list(find_links_sources), + index_urls=list(index_url_sources), + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/index/package_finder.py b/myenv/lib/python3.12/site-packages/pip/_internal/index/package_finder.py new file mode 100644 index 0000000..ec9ebc3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/index/package_finder.py @@ -0,0 +1,1027 @@ +"""Routines related to PyPI, indexes""" + +import enum +import functools +import itertools +import logging +import re +from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.tags import Tag +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import _BaseVersion +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + InvalidWheelFilename, + UnsupportedWheel, +) +from pip._internal.index.collector import LinkCollector, parse_links +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.format_control import FormatControl +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.models.wheel import Wheel +from pip._internal.req import InstallRequirement +from pip._internal.utils._log import getLogger +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import build_netloc +from pip._internal.utils.packaging import check_requires_python +from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS + +if TYPE_CHECKING: + from pip._vendor.typing_extensions import TypeGuard + +__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"] + + +logger = getLogger(__name__) + +BuildTag = Union[Tuple[()], Tuple[int, str]] +CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag] + + +def _check_link_requires_python( + link: Link, + version_info: Tuple[int, int, int], + ignore_requires_python: bool = False, +) -> bool: + """ + Return whether the given Python version is compatible with a link's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + """ + try: + is_compatible = check_requires_python( + link.requires_python, + version_info=version_info, + ) + except specifiers.InvalidSpecifier: + logger.debug( + "Ignoring invalid Requires-Python (%r) for link: %s", + link.requires_python, + link, + ) + else: + if not is_compatible: + version = ".".join(map(str, version_info)) + if not ignore_requires_python: + logger.verbose( + "Link requires a different Python (%s not in: %r): %s", + version, + link.requires_python, + link, + ) + return False + + logger.debug( + "Ignoring failed Requires-Python check (%s not in: %r) for link: %s", + version, + link.requires_python, + link, + ) + + return True + + +class LinkType(enum.Enum): + candidate = enum.auto() + different_project = enum.auto() + yanked = enum.auto() + format_unsupported = enum.auto() + format_invalid = enum.auto() + platform_mismatch = enum.auto() + requires_python_mismatch = enum.auto() + + +class LinkEvaluator: + + """ + Responsible for evaluating links for a particular project. + """ + + _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$") + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + project_name: str, + canonical_name: str, + formats: FrozenSet[str], + target_python: TargetPython, + allow_yanked: bool, + ignore_requires_python: Optional[bool] = None, + ) -> None: + """ + :param project_name: The user supplied package name. + :param canonical_name: The canonical package name. + :param formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. + :param target_python: The target Python interpreter to use when + evaluating link compatibility. This is used, for example, to + check wheel compatibility, as well as when checking the Python + version, e.g. the Python version embedded in a link filename + (or egg fragment) and against an HTML link's optional PEP 503 + "data-requires-python" attribute. + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param ignore_requires_python: Whether to ignore incompatible + PEP 503 "data-requires-python" values in HTML links. Defaults + to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self._allow_yanked = allow_yanked + self._canonical_name = canonical_name + self._ignore_requires_python = ignore_requires_python + self._formats = formats + self._target_python = target_python + + self.project_name = project_name + + def evaluate_link(self, link: Link) -> Tuple[LinkType, str]: + """ + Determine whether a link is a candidate for installation. + + :return: A tuple (result, detail), where *result* is an enum + representing whether the evaluation found a candidate, or the reason + why one is not found. If a candidate is found, *detail* will be the + candidate's version string; if one is not found, it contains the + reason the link fails to qualify. + """ + version = None + if link.is_yanked and not self._allow_yanked: + reason = link.yanked_reason or "" + return (LinkType.yanked, f"yanked for reason: {reason}") + + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + return (LinkType.format_unsupported, "not a file") + if ext not in SUPPORTED_EXTENSIONS: + return ( + LinkType.format_unsupported, + f"unsupported archive format: {ext}", + ) + if "binary" not in self._formats and ext == WHEEL_EXTENSION: + reason = f"No binaries permitted for {self.project_name}" + return (LinkType.format_unsupported, reason) + if "macosx10" in link.path and ext == ".zip": + return (LinkType.format_unsupported, "macosx10 one") + if ext == WHEEL_EXTENSION: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + return ( + LinkType.format_invalid, + "invalid wheel filename", + ) + if canonicalize_name(wheel.name) != self._canonical_name: + reason = f"wrong project name (not {self.project_name})" + return (LinkType.different_project, reason) + + supported_tags = self._target_python.get_unsorted_tags() + if not wheel.supported(supported_tags): + # Include the wheel's tags in the reason string to + # simplify troubleshooting compatibility issues. + file_tags = ", ".join(wheel.get_formatted_file_tags()) + reason = ( + f"none of the wheel's tags ({file_tags}) are compatible " + f"(run pip debug --verbose to show compatible tags)" + ) + return (LinkType.platform_mismatch, reason) + + version = wheel.version + + # This should be up by the self.ok_binary check, but see issue 2700. + if "source" not in self._formats and ext != WHEEL_EXTENSION: + reason = f"No sources permitted for {self.project_name}" + return (LinkType.format_unsupported, reason) + + if not version: + version = _extract_version_from_fragment( + egg_info, + self._canonical_name, + ) + if not version: + reason = f"Missing project version for {self.project_name}" + return (LinkType.format_invalid, reason) + + match = self._py_version_re.search(version) + if match: + version = version[: match.start()] + py_version = match.group(1) + if py_version != self._target_python.py_version: + return ( + LinkType.platform_mismatch, + "Python version is incorrect", + ) + + supports_python = _check_link_requires_python( + link, + version_info=self._target_python.py_version_info, + ignore_requires_python=self._ignore_requires_python, + ) + if not supports_python: + reason = f"{version} Requires-Python {link.requires_python}" + return (LinkType.requires_python_mismatch, reason) + + logger.debug("Found link %s, version: %s", link, version) + + return (LinkType.candidate, version) + + +def filter_unallowed_hashes( + candidates: List[InstallationCandidate], + hashes: Optional[Hashes], + project_name: str, +) -> List[InstallationCandidate]: + """ + Filter out candidates whose hashes aren't allowed, and return a new + list of candidates. + + If at least one candidate has an allowed hash, then all candidates with + either an allowed hash or no hash specified are returned. Otherwise, + the given candidates are returned. + + Including the candidates with no hash specified when there is a match + allows a warning to be logged if there is a more preferred candidate + with no hash specified. Returning all candidates in the case of no + matches lets pip report the hash of the candidate that would otherwise + have been installed (e.g. permitting the user to more easily update + their requirements file with the desired hash). + """ + if not hashes: + logger.debug( + "Given no hashes to check %s links for project %r: " + "discarding no candidates", + len(candidates), + project_name, + ) + # Make sure we're not returning back the given value. + return list(candidates) + + matches_or_no_digest = [] + # Collect the non-matches for logging purposes. + non_matches = [] + match_count = 0 + for candidate in candidates: + link = candidate.link + if not link.has_hash: + pass + elif link.is_hash_allowed(hashes=hashes): + match_count += 1 + else: + non_matches.append(candidate) + continue + + matches_or_no_digest.append(candidate) + + if match_count: + filtered = matches_or_no_digest + else: + # Make sure we're not returning back the given value. + filtered = list(candidates) + + if len(filtered) == len(candidates): + discard_message = "discarding no candidates" + else: + discard_message = "discarding {} non-matches:\n {}".format( + len(non_matches), + "\n ".join(str(candidate.link) for candidate in non_matches), + ) + + logger.debug( + "Checked %s links for project %r against %s hashes " + "(%s matches, %s no digest): %s", + len(candidates), + project_name, + hashes.digest_count, + match_count, + len(matches_or_no_digest) - match_count, + discard_message, + ) + + return filtered + + +class CandidatePreferences: + + """ + Encapsulates some of the preferences for filtering and sorting + InstallationCandidate objects. + """ + + def __init__( + self, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + ) -> None: + """ + :param allow_all_prereleases: Whether to allow all pre-releases. + """ + self.allow_all_prereleases = allow_all_prereleases + self.prefer_binary = prefer_binary + + +class BestCandidateResult: + """A collection of candidates, returned by `PackageFinder.find_best_candidate`. + + This class is only intended to be instantiated by CandidateEvaluator's + `compute_best_candidate()` method. + """ + + def __init__( + self, + candidates: List[InstallationCandidate], + applicable_candidates: List[InstallationCandidate], + best_candidate: Optional[InstallationCandidate], + ) -> None: + """ + :param candidates: A sequence of all available candidates found. + :param applicable_candidates: The applicable candidates. + :param best_candidate: The most preferred candidate found, or None + if no applicable candidates were found. + """ + assert set(applicable_candidates) <= set(candidates) + + if best_candidate is None: + assert not applicable_candidates + else: + assert best_candidate in applicable_candidates + + self._applicable_candidates = applicable_candidates + self._candidates = candidates + + self.best_candidate = best_candidate + + def iter_all(self) -> Iterable[InstallationCandidate]: + """Iterate through all candidates.""" + return iter(self._candidates) + + def iter_applicable(self) -> Iterable[InstallationCandidate]: + """Iterate through the applicable candidates.""" + return iter(self._applicable_candidates) + + +class CandidateEvaluator: + + """ + Responsible for filtering and sorting candidates for installation based + on what tags are valid. + """ + + @classmethod + def create( + cls, + project_name: str, + target_python: Optional[TargetPython] = None, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> "CandidateEvaluator": + """Create a CandidateEvaluator object. + + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + :param hashes: An optional collection of allowed hashes. + """ + if target_python is None: + target_python = TargetPython() + if specifier is None: + specifier = specifiers.SpecifierSet() + + supported_tags = target_python.get_sorted_tags() + + return cls( + project_name=project_name, + supported_tags=supported_tags, + specifier=specifier, + prefer_binary=prefer_binary, + allow_all_prereleases=allow_all_prereleases, + hashes=hashes, + ) + + def __init__( + self, + project_name: str, + supported_tags: List[Tag], + specifier: specifiers.BaseSpecifier, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + hashes: Optional[Hashes] = None, + ) -> None: + """ + :param supported_tags: The PEP 425 tags supported by the target + Python in order of preference (most preferred first). + """ + self._allow_all_prereleases = allow_all_prereleases + self._hashes = hashes + self._prefer_binary = prefer_binary + self._project_name = project_name + self._specifier = specifier + self._supported_tags = supported_tags + # Since the index of the tag in the _supported_tags list is used + # as a priority, precompute a map from tag to index/priority to be + # used in wheel.find_most_preferred_tag. + self._wheel_tag_preferences = { + tag: idx for idx, tag in enumerate(supported_tags) + } + + def get_applicable_candidates( + self, + candidates: List[InstallationCandidate], + ) -> List[InstallationCandidate]: + """ + Return the applicable candidates from a list of candidates. + """ + # Using None infers from the specifier instead. + allow_prereleases = self._allow_all_prereleases or None + specifier = self._specifier + versions = { + str(v) + for v in specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + (str(c.version) for c in candidates), + prereleases=allow_prereleases, + ) + } + + # Again, converting version to str to deal with debundling. + applicable_candidates = [c for c in candidates if str(c.version) in versions] + + filtered_applicable_candidates = filter_unallowed_hashes( + candidates=applicable_candidates, + hashes=self._hashes, + project_name=self._project_name, + ) + + return sorted(filtered_applicable_candidates, key=self._sort_key) + + def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey: + """ + Function to pass as the `key` argument to a call to sorted() to sort + InstallationCandidates by preference. + + Returns a tuple such that tuples sorting as greater using Python's + default comparison operator are more preferred. + + The preference is as follows: + + First and foremost, candidates with allowed (matching) hashes are + always preferred over candidates without matching hashes. This is + because e.g. if the only candidate with an allowed hash is yanked, + we still want to use that candidate. + + Second, excepting hash considerations, candidates that have been + yanked (in the sense of PEP 592) are always less preferred than + candidates that haven't been yanked. Then: + + If not finding wheels, they are sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min(self._supported_tags) + 3. source archives + If prefer_binary was set, then all wheels are sorted above sources. + + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + valid_tags = self._supported_tags + support_num = len(valid_tags) + build_tag: BuildTag = () + binary_preference = 0 + link = candidate.link + if link.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(link.filename) + try: + pri = -( + wheel.find_most_preferred_tag( + valid_tags, self._wheel_tag_preferences + ) + ) + except ValueError: + raise UnsupportedWheel( + f"{wheel.filename} is not a supported wheel for this platform. It " + "can't be sorted." + ) + if self._prefer_binary: + binary_preference = 1 + if wheel.build_tag is not None: + match = re.match(r"^(\d+)(.*)$", wheel.build_tag) + assert match is not None, "guaranteed by filename validation" + build_tag_groups = match.groups() + build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) + else: # sdist + pri = -(support_num) + has_allowed_hash = int(link.is_hash_allowed(self._hashes)) + yank_value = -1 * int(link.is_yanked) # -1 for yanked. + return ( + has_allowed_hash, + yank_value, + binary_preference, + candidate.version, + pri, + build_tag, + ) + + def sort_best_candidate( + self, + candidates: List[InstallationCandidate], + ) -> Optional[InstallationCandidate]: + """ + Return the best candidate per the instance's sort order, or None if + no candidate is acceptable. + """ + if not candidates: + return None + best_candidate = max(candidates, key=self._sort_key) + return best_candidate + + def compute_best_candidate( + self, + candidates: List[InstallationCandidate], + ) -> BestCandidateResult: + """ + Compute and return a `BestCandidateResult` instance. + """ + applicable_candidates = self.get_applicable_candidates(candidates) + + best_candidate = self.sort_best_candidate(applicable_candidates) + + return BestCandidateResult( + candidates, + applicable_candidates=applicable_candidates, + best_candidate=best_candidate, + ) + + +class PackageFinder: + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__( + self, + link_collector: LinkCollector, + target_python: TargetPython, + allow_yanked: bool, + format_control: Optional[FormatControl] = None, + candidate_prefs: Optional[CandidatePreferences] = None, + ignore_requires_python: Optional[bool] = None, + ) -> None: + """ + This constructor is primarily meant to be used by the create() class + method and from tests. + + :param format_control: A FormatControl object, used to control + the selection of source packages / binary packages when consulting + the index and links. + :param candidate_prefs: Options to use when creating a + CandidateEvaluator object. + """ + if candidate_prefs is None: + candidate_prefs = CandidatePreferences() + + format_control = format_control or FormatControl(set(), set()) + + self._allow_yanked = allow_yanked + self._candidate_prefs = candidate_prefs + self._ignore_requires_python = ignore_requires_python + self._link_collector = link_collector + self._target_python = target_python + + self.format_control = format_control + + # These are boring links that have already been logged somehow. + self._logged_links: Set[Tuple[Link, LinkType, str]] = set() + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + @classmethod + def create( + cls, + link_collector: LinkCollector, + selection_prefs: SelectionPreferences, + target_python: Optional[TargetPython] = None, + ) -> "PackageFinder": + """Create a PackageFinder. + + :param selection_prefs: The candidate selection preferences, as a + SelectionPreferences object. + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + """ + if target_python is None: + target_python = TargetPython() + + candidate_prefs = CandidatePreferences( + prefer_binary=selection_prefs.prefer_binary, + allow_all_prereleases=selection_prefs.allow_all_prereleases, + ) + + return cls( + candidate_prefs=candidate_prefs, + link_collector=link_collector, + target_python=target_python, + allow_yanked=selection_prefs.allow_yanked, + format_control=selection_prefs.format_control, + ignore_requires_python=selection_prefs.ignore_requires_python, + ) + + @property + def target_python(self) -> TargetPython: + return self._target_python + + @property + def search_scope(self) -> SearchScope: + return self._link_collector.search_scope + + @search_scope.setter + def search_scope(self, search_scope: SearchScope) -> None: + self._link_collector.search_scope = search_scope + + @property + def find_links(self) -> List[str]: + return self._link_collector.find_links + + @property + def index_urls(self) -> List[str]: + return self.search_scope.index_urls + + @property + def trusted_hosts(self) -> Iterable[str]: + for host_port in self._link_collector.session.pip_trusted_origins: + yield build_netloc(*host_port) + + @property + def allow_all_prereleases(self) -> bool: + return self._candidate_prefs.allow_all_prereleases + + def set_allow_all_prereleases(self) -> None: + self._candidate_prefs.allow_all_prereleases = True + + @property + def prefer_binary(self) -> bool: + return self._candidate_prefs.prefer_binary + + def set_prefer_binary(self) -> None: + self._candidate_prefs.prefer_binary = True + + def requires_python_skipped_reasons(self) -> List[str]: + reasons = { + detail + for _, result, detail in self._logged_links + if result == LinkType.requires_python_mismatch + } + return sorted(reasons) + + def make_link_evaluator(self, project_name: str) -> LinkEvaluator: + canonical_name = canonicalize_name(project_name) + formats = self.format_control.get_allowed_formats(canonical_name) + + return LinkEvaluator( + project_name=project_name, + canonical_name=canonical_name, + formats=formats, + target_python=self._target_python, + allow_yanked=self._allow_yanked, + ignore_requires_python=self._ignore_requires_python, + ) + + def _sort_links(self, links: Iterable[Link]) -> List[Link]: + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen: Set[Link] = set() + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None: + entry = (link, result, detail) + if entry not in self._logged_links: + # Put the link at the end so the reason is more visible and because + # the link string is usually very long. + logger.debug("Skipping link: %s: %s", detail, link) + self._logged_links.add(entry) + + def get_install_candidate( + self, link_evaluator: LinkEvaluator, link: Link + ) -> Optional[InstallationCandidate]: + """ + If the link is a candidate for install, convert it to an + InstallationCandidate and return it. Otherwise, return None. + """ + result, detail = link_evaluator.evaluate_link(link) + if result != LinkType.candidate: + self._log_skipped_link(link, result, detail) + return None + + return InstallationCandidate( + name=link_evaluator.project_name, + link=link, + version=detail, + ) + + def evaluate_links( + self, link_evaluator: LinkEvaluator, links: Iterable[Link] + ) -> List[InstallationCandidate]: + """ + Convert links that are candidates to InstallationCandidate objects. + """ + candidates = [] + for link in self._sort_links(links): + candidate = self.get_install_candidate(link_evaluator, link) + if candidate is not None: + candidates.append(candidate) + + return candidates + + def process_project_url( + self, project_url: Link, link_evaluator: LinkEvaluator + ) -> List[InstallationCandidate]: + logger.debug( + "Fetching project page and analyzing links: %s", + project_url, + ) + index_response = self._link_collector.fetch_response(project_url) + if index_response is None: + return [] + + page_links = list(parse_links(index_response)) + + with indent_log(): + package_links = self.evaluate_links( + link_evaluator, + links=page_links, + ) + + return package_links + + @functools.lru_cache(maxsize=None) + def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]: + """Find all available InstallationCandidate for project_name + + This checks index_urls and find_links. + All versions found are returned as an InstallationCandidate list. + + See LinkEvaluator.evaluate_link() for details on which files + are accepted. + """ + link_evaluator = self.make_link_evaluator(project_name) + + collected_sources = self._link_collector.collect_sources( + project_name=project_name, + candidates_from_page=functools.partial( + self.process_project_url, + link_evaluator=link_evaluator, + ), + ) + + page_candidates_it = itertools.chain.from_iterable( + source.page_candidates() + for sources in collected_sources + for source in sources + if source is not None + ) + page_candidates = list(page_candidates_it) + + file_links_it = itertools.chain.from_iterable( + source.file_links() + for sources in collected_sources + for source in sources + if source is not None + ) + file_candidates = self.evaluate_links( + link_evaluator, + sorted(file_links_it, reverse=True), + ) + + if logger.isEnabledFor(logging.DEBUG) and file_candidates: + paths = [] + for candidate in file_candidates: + assert candidate.link.url # we need to have a URL + try: + paths.append(candidate.link.file_path) + except Exception: + paths.append(candidate.link.url) # it's not a local file + + logger.debug("Local files found: %s", ", ".join(paths)) + + # This is an intentional priority ordering + return file_candidates + page_candidates + + def make_candidate_evaluator( + self, + project_name: str, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> CandidateEvaluator: + """Create a CandidateEvaluator object to use.""" + candidate_prefs = self._candidate_prefs + return CandidateEvaluator.create( + project_name=project_name, + target_python=self._target_python, + prefer_binary=candidate_prefs.prefer_binary, + allow_all_prereleases=candidate_prefs.allow_all_prereleases, + specifier=specifier, + hashes=hashes, + ) + + @functools.lru_cache(maxsize=None) + def find_best_candidate( + self, + project_name: str, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> BestCandidateResult: + """Find matches for the given project and specifier. + + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + + :return: A `BestCandidateResult` instance. + """ + candidates = self.find_all_candidates(project_name) + candidate_evaluator = self.make_candidate_evaluator( + project_name=project_name, + specifier=specifier, + hashes=hashes, + ) + return candidate_evaluator.compute_best_candidate(candidates) + + def find_requirement( + self, req: InstallRequirement, upgrade: bool + ) -> Optional[InstallationCandidate]: + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a InstallationCandidate if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + hashes = req.hashes(trust_internet=False) + best_candidate_result = self.find_best_candidate( + req.name, + specifier=req.specifier, + hashes=hashes, + ) + best_candidate = best_candidate_result.best_candidate + + installed_version: Optional[_BaseVersion] = None + if req.satisfied_by is not None: + installed_version = req.satisfied_by.version + + def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str: + # This repeated parse_version and str() conversion is needed to + # handle different vendoring sources from pip and pkg_resources. + # If we stop using the pkg_resources provided specifier and start + # using our own, we can drop the cast to str(). + return ( + ", ".join( + sorted( + {str(c.version) for c in cand_iter}, + key=parse_version, + ) + ) + or "none" + ) + + if installed_version is None and best_candidate is None: + logger.critical( + "Could not find a version that satisfies the requirement %s " + "(from versions: %s)", + req, + _format_versions(best_candidate_result.iter_all()), + ) + + raise DistributionNotFound(f"No matching distribution found for {req}") + + def _should_install_candidate( + candidate: Optional[InstallationCandidate], + ) -> "TypeGuard[InstallationCandidate]": + if installed_version is None: + return True + if best_candidate is None: + return False + return best_candidate.version > installed_version + + if not upgrade and installed_version is not None: + if _should_install_candidate(best_candidate): + logger.debug( + "Existing installed version (%s) satisfies requirement " + "(most up-to-date version is %s)", + installed_version, + best_candidate.version, + ) + else: + logger.debug( + "Existing installed version (%s) is most up-to-date and " + "satisfies requirement", + installed_version, + ) + return None + + if _should_install_candidate(best_candidate): + logger.debug( + "Using version %s (newest of versions: %s)", + best_candidate.version, + _format_versions(best_candidate_result.iter_applicable()), + ) + return best_candidate + + # We have an existing version, and its the best version + logger.debug( + "Installed version (%s) is most up-to-date (past versions: %s)", + installed_version, + _format_versions(best_candidate_result.iter_applicable()), + ) + raise BestVersionAlreadyInstalled + + +def _find_name_version_sep(fragment: str, canonical_name: str) -> int: + """Find the separator's index based on the package's canonical name. + + :param fragment: A + filename "fragment" (stem) or + egg fragment. + :param canonical_name: The package's canonical name. + + This function is needed since the canonicalized name does not necessarily + have the same length as the egg info's name part. An example:: + + >>> fragment = 'foo__bar-1.0' + >>> canonical_name = 'foo-bar' + >>> _find_name_version_sep(fragment, canonical_name) + 8 + """ + # Project name and version must be separated by one single dash. Find all + # occurrences of dashes; if the string in front of it matches the canonical + # name, this is the one separating the name and version parts. + for i, c in enumerate(fragment): + if c != "-": + continue + if canonicalize_name(fragment[:i]) == canonical_name: + return i + raise ValueError(f"{fragment} does not match {canonical_name}") + + +def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]: + """Parse the version string from a + filename + "fragment" (stem) or egg fragment. + + :param fragment: The string to parse. E.g. foo-2.1 + :param canonical_name: The canonicalized name of the package this + belongs to. + """ + try: + version_start = _find_name_version_sep(fragment, canonical_name) + 1 + except ValueError: + return None + version = fragment[version_start:] + if not version: + return None + return version diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/index/sources.py b/myenv/lib/python3.12/site-packages/pip/_internal/index/sources.py new file mode 100644 index 0000000..f4626d7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/index/sources.py @@ -0,0 +1,285 @@ +import logging +import mimetypes +import os +from collections import defaultdict +from typing import Callable, Dict, Iterable, List, Optional, Tuple + +from pip._vendor.packaging.utils import ( + InvalidSdistFilename, + InvalidVersion, + InvalidWheelFilename, + canonicalize_name, + parse_sdist_filename, + parse_wheel_filename, +) + +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.link import Link +from pip._internal.utils.urls import path_to_url, url_to_path +from pip._internal.vcs import is_url + +logger = logging.getLogger(__name__) + +FoundCandidates = Iterable[InstallationCandidate] +FoundLinks = Iterable[Link] +CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]] +PageValidator = Callable[[Link], bool] + + +class LinkSource: + @property + def link(self) -> Optional[Link]: + """Returns the underlying link, if there's one.""" + raise NotImplementedError() + + def page_candidates(self) -> FoundCandidates: + """Candidates found by parsing an archive listing HTML file.""" + raise NotImplementedError() + + def file_links(self) -> FoundLinks: + """Links found by specifying archives directly.""" + raise NotImplementedError() + + +def _is_html_file(file_url: str) -> bool: + return mimetypes.guess_type(file_url, strict=False)[0] == "text/html" + + +class _FlatDirectoryToUrls: + """Scans directory and caches results""" + + def __init__(self, path: str) -> None: + self._path = path + self._page_candidates: List[str] = [] + self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list) + self._scanned_directory = False + + def _scan_directory(self) -> None: + """Scans directory once and populates both page_candidates + and project_name_to_urls at the same time + """ + for entry in os.scandir(self._path): + url = path_to_url(entry.path) + if _is_html_file(url): + self._page_candidates.append(url) + continue + + # File must have a valid wheel or sdist name, + # otherwise not worth considering as a package + try: + project_filename = parse_wheel_filename(entry.name)[0] + except (InvalidWheelFilename, InvalidVersion): + try: + project_filename = parse_sdist_filename(entry.name)[0] + except (InvalidSdistFilename, InvalidVersion): + continue + + self._project_name_to_urls[project_filename].append(url) + self._scanned_directory = True + + @property + def page_candidates(self) -> List[str]: + if not self._scanned_directory: + self._scan_directory() + + return self._page_candidates + + @property + def project_name_to_urls(self) -> Dict[str, List[str]]: + if not self._scanned_directory: + self._scan_directory() + + return self._project_name_to_urls + + +class _FlatDirectorySource(LinkSource): + """Link source specified by ``--find-links=``. + + This looks the content of the directory, and returns: + + * ``page_candidates``: Links listed on each HTML file in the directory. + * ``file_candidates``: Archives in the directory. + """ + + _paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {} + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + path: str, + project_name: str, + ) -> None: + self._candidates_from_page = candidates_from_page + self._project_name = canonicalize_name(project_name) + + # Get existing instance of _FlatDirectoryToUrls if it exists + if path in self._paths_to_urls: + self._path_to_urls = self._paths_to_urls[path] + else: + self._path_to_urls = _FlatDirectoryToUrls(path=path) + self._paths_to_urls[path] = self._path_to_urls + + @property + def link(self) -> Optional[Link]: + return None + + def page_candidates(self) -> FoundCandidates: + for url in self._path_to_urls.page_candidates: + yield from self._candidates_from_page(Link(url)) + + def file_links(self) -> FoundLinks: + for url in self._path_to_urls.project_name_to_urls[self._project_name]: + yield Link(url) + + +class _LocalFileSource(LinkSource): + """``--find-links=`` or ``--[extra-]index-url=``. + + If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to + the option, it is converted to a URL first. This returns: + + * ``page_candidates``: Links listed on an HTML file. + * ``file_candidates``: The non-HTML file. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + if not _is_html_file(self._link.url): + return + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + if _is_html_file(self._link.url): + return + yield self._link + + +class _RemoteFileSource(LinkSource): + """``--find-links=`` or ``--[extra-]index-url=``. + + This returns: + + * ``page_candidates``: Links listed on an HTML file. + * ``file_candidates``: The non-HTML file. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + page_validator: PageValidator, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._page_validator = page_validator + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + if not self._page_validator(self._link): + return + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + yield self._link + + +class _IndexDirectorySource(LinkSource): + """``--[extra-]index-url=``. + + This is treated like a remote URL; ``candidates_from_page`` contains logic + for this by appending ``index.html`` to the link. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + return () + + +def build_source( + location: str, + *, + candidates_from_page: CandidatesFromPage, + page_validator: PageValidator, + expand_dir: bool, + cache_link_parsing: bool, + project_name: str, +) -> Tuple[Optional[str], Optional[LinkSource]]: + path: Optional[str] = None + url: Optional[str] = None + if os.path.exists(location): # Is a local path. + url = path_to_url(location) + path = location + elif location.startswith("file:"): # A file: URL. + url = location + path = url_to_path(location) + elif is_url(location): + url = location + + if url is None: + msg = ( + "Location '%s' is ignored: " + "it is either a non-existing path or lacks a specific scheme." + ) + logger.warning(msg, location) + return (None, None) + + if path is None: + source: LinkSource = _RemoteFileSource( + candidates_from_page=candidates_from_page, + page_validator=page_validator, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + + if os.path.isdir(path): + if expand_dir: + source = _FlatDirectorySource( + candidates_from_page=candidates_from_page, + path=path, + project_name=project_name, + ) + else: + source = _IndexDirectorySource( + candidates_from_page=candidates_from_page, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + elif os.path.isfile(path): + source = _LocalFileSource( + candidates_from_page=candidates_from_page, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + logger.warning( + "Location '%s' is ignored: it is neither a file nor a directory.", + location, + ) + return (url, None) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/locations/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/locations/__init__.py new file mode 100644 index 0000000..d54bc63 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/locations/__init__.py @@ -0,0 +1,467 @@ +import functools +import logging +import os +import pathlib +import sys +import sysconfig +from typing import Any, Dict, Generator, Optional, Tuple + +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.virtualenv import running_under_virtualenv + +from . import _sysconfig +from .base import ( + USER_CACHE_DIR, + get_major_minor_version, + get_src_prefix, + is_osx_framework, + site_packages, + user_site, +) + +__all__ = [ + "USER_CACHE_DIR", + "get_bin_prefix", + "get_bin_user", + "get_major_minor_version", + "get_platlib", + "get_purelib", + "get_scheme", + "get_src_prefix", + "site_packages", + "user_site", +] + + +logger = logging.getLogger(__name__) + + +_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib") + +_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10) + + +def _should_use_sysconfig() -> bool: + """This function determines the value of _USE_SYSCONFIG. + + By default, pip uses sysconfig on Python 3.10+. + But Python distributors can override this decision by setting: + sysconfig._PIP_USE_SYSCONFIG = True / False + Rationale in https://github.com/pypa/pip/issues/10647 + + This is a function for testability, but should be constant during any one + run. + """ + return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT)) + + +_USE_SYSCONFIG = _should_use_sysconfig() + +if not _USE_SYSCONFIG: + # Import distutils lazily to avoid deprecation warnings, + # but import it soon enough that it is in memory and available during + # a pip reinstall. + from . import _distutils + +# Be noisy about incompatibilities if this platforms "should" be using +# sysconfig, but is explicitly opting out and using distutils instead. +if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG: + _MISMATCH_LEVEL = logging.WARNING +else: + _MISMATCH_LEVEL = logging.DEBUG + + +def _looks_like_bpo_44860() -> bool: + """The resolution to bpo-44860 will change this incorrect platlib. + + See . + """ + from distutils.command.install import INSTALL_SCHEMES + + try: + unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"] + except KeyError: + return False + return unix_user_platlib == "$usersite" + + +def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool: + platlib = scheme["platlib"] + if "/$platlibdir/" in platlib: + platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/") + if "/lib64/" not in platlib: + return False + unpatched = platlib.replace("/lib64/", "/lib/") + return unpatched.replace("$platbase/", "$base/") == scheme["purelib"] + + +@functools.lru_cache(maxsize=None) +def _looks_like_red_hat_lib() -> bool: + """Red Hat patches platlib in unix_prefix and unix_home, but not purelib. + + This is the only way I can see to tell a Red Hat-patched Python. + """ + from distutils.command.install import INSTALL_SCHEMES + + return all( + k in INSTALL_SCHEMES + and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k]) + for k in ("unix_prefix", "unix_home") + ) + + +@functools.lru_cache(maxsize=None) +def _looks_like_debian_scheme() -> bool: + """Debian adds two additional schemes.""" + from distutils.command.install import INSTALL_SCHEMES + + return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES + + +@functools.lru_cache(maxsize=None) +def _looks_like_red_hat_scheme() -> bool: + """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``. + + Red Hat's ``00251-change-user-install-location.patch`` changes the install + command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is + (fortunately?) done quite unconditionally, so we create a default command + object without any configuration to detect this. + """ + from distutils.command.install import install + from distutils.dist import Distribution + + cmd: Any = install(Distribution()) + cmd.finalize_options() + return ( + cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local" + and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local" + ) + + +@functools.lru_cache(maxsize=None) +def _looks_like_slackware_scheme() -> bool: + """Slackware patches sysconfig but fails to patch distutils and site. + + Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib + path, but does not do the same to the site module. + """ + if user_site is None: # User-site not available. + return False + try: + paths = sysconfig.get_paths(scheme="posix_user", expand=False) + except KeyError: # User-site not available. + return False + return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site + + +@functools.lru_cache(maxsize=None) +def _looks_like_msys2_mingw_scheme() -> bool: + """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme. + + However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is + likely going to be included in their 3.10 release, so we ignore the warning. + See msys2/MINGW-packages#9319. + + MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase, + and is missing the final ``"site-packages"``. + """ + paths = sysconfig.get_paths("nt", expand=False) + return all( + "Lib" not in p and "lib" in p and not p.endswith("site-packages") + for p in (paths[key] for key in ("platlib", "purelib")) + ) + + +def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]: + ldversion = sysconfig.get_config_var("LDVERSION") + abiflags = getattr(sys, "abiflags", None) + + # LDVERSION does not end with sys.abiflags. Just return the path unchanged. + if not ldversion or not abiflags or not ldversion.endswith(abiflags): + yield from parts + return + + # Strip sys.abiflags from LDVERSION-based path components. + for part in parts: + if part.endswith(ldversion): + part = part[: (0 - len(abiflags))] + yield part + + +@functools.lru_cache(maxsize=None) +def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None: + issue_url = "https://github.com/pypa/pip/issues/10151" + message = ( + "Value for %s does not match. Please report this to <%s>" + "\ndistutils: %s" + "\nsysconfig: %s" + ) + logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new) + + +def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool: + if old == new: + return False + _warn_mismatched(old, new, key=key) + return True + + +@functools.lru_cache(maxsize=None) +def _log_context( + *, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + prefix: Optional[str] = None, +) -> None: + parts = [ + "Additional context:", + "user = %r", + "home = %r", + "root = %r", + "prefix = %r", + ] + + logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix) + + +def get_scheme( + dist_name: str, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, +) -> Scheme: + new = _sysconfig.get_scheme( + dist_name, + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + if _USE_SYSCONFIG: + return new + + old = _distutils.get_scheme( + dist_name, + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + + warning_contexts = [] + for k in SCHEME_KEYS: + old_v = pathlib.Path(getattr(old, k)) + new_v = pathlib.Path(getattr(new, k)) + + if old_v == new_v: + continue + + # distutils incorrectly put PyPy packages under ``site-packages/python`` + # in the ``posix_home`` scheme, but PyPy devs said they expect the + # directory name to be ``pypy`` instead. So we treat this as a bug fix + # and not warn about it. See bpo-43307 and python/cpython#24628. + skip_pypy_special_case = ( + sys.implementation.name == "pypy" + and home is not None + and k in ("platlib", "purelib") + and old_v.parent == new_v.parent + and old_v.name.startswith("python") + and new_v.name.startswith("pypy") + ) + if skip_pypy_special_case: + continue + + # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in + # the ``include`` value, but distutils's ``headers`` does. We'll let + # CPython decide whether this is a bug or feature. See bpo-43948. + skip_osx_framework_user_special_case = ( + user + and is_osx_framework() + and k == "headers" + and old_v.parent.parent == new_v.parent + and old_v.parent.name.startswith("python") + ) + if skip_osx_framework_user_special_case: + continue + + # On Red Hat and derived Linux distributions, distutils is patched to + # use "lib64" instead of "lib" for platlib. + if k == "platlib" and _looks_like_red_hat_lib(): + continue + + # On Python 3.9+, sysconfig's posix_user scheme sets platlib against + # sys.platlibdir, but distutils's unix_user incorrectly coninutes + # using the same $usersite for both platlib and purelib. This creates a + # mismatch when sys.platlibdir is not "lib". + skip_bpo_44860 = ( + user + and k == "platlib" + and not WINDOWS + and sys.version_info >= (3, 9) + and _PLATLIBDIR != "lib" + and _looks_like_bpo_44860() + ) + if skip_bpo_44860: + continue + + # Slackware incorrectly patches posix_user to use lib64 instead of lib, + # but not usersite to match the location. + skip_slackware_user_scheme = ( + user + and k in ("platlib", "purelib") + and not WINDOWS + and _looks_like_slackware_scheme() + ) + if skip_slackware_user_scheme: + continue + + # Both Debian and Red Hat patch Python to place the system site under + # /usr/local instead of /usr. Debian also places lib in dist-packages + # instead of site-packages, but the /usr/local check should cover it. + skip_linux_system_special_case = ( + not (user or home or prefix or running_under_virtualenv()) + and old_v.parts[1:3] == ("usr", "local") + and len(new_v.parts) > 1 + and new_v.parts[1] == "usr" + and (len(new_v.parts) < 3 or new_v.parts[2] != "local") + and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme()) + ) + if skip_linux_system_special_case: + continue + + # On Python 3.7 and earlier, sysconfig does not include sys.abiflags in + # the "pythonX.Y" part of the path, but distutils does. + skip_sysconfig_abiflag_bug = ( + sys.version_info < (3, 8) + and not WINDOWS + and k in ("headers", "platlib", "purelib") + and tuple(_fix_abiflags(old_v.parts)) == new_v.parts + ) + if skip_sysconfig_abiflag_bug: + continue + + # MSYS2 MINGW's sysconfig patch does not include the "site-packages" + # part of the path. This is incorrect and will be fixed in MSYS. + skip_msys2_mingw_bug = ( + WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme() + ) + if skip_msys2_mingw_bug: + continue + + # CPython's POSIX install script invokes pip (via ensurepip) against the + # interpreter located in the source tree, not the install site. This + # triggers special logic in sysconfig that's not present in distutils. + # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194 + skip_cpython_build = ( + sysconfig.is_python_build(check_home=True) + and not WINDOWS + and k in ("headers", "include", "platinclude") + ) + if skip_cpython_build: + continue + + warning_contexts.append((old_v, new_v, f"scheme.{k}")) + + if not warning_contexts: + return old + + # Check if this path mismatch is caused by distutils config files. Those + # files will no longer work once we switch to sysconfig, so this raises a + # deprecation message for them. + default_old = _distutils.distutils_scheme( + dist_name, + user, + home, + root, + isolated, + prefix, + ignore_config_files=True, + ) + if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS): + deprecated( + reason=( + "Configuring installation scheme with distutils config files " + "is deprecated and will no longer work in the near future. If you " + "are using a Homebrew or Linuxbrew Python, please see discussion " + "at https://github.com/Homebrew/homebrew-core/issues/76621" + ), + replacement=None, + gone_in=None, + ) + return old + + # Post warnings about this mismatch so user can report them back. + for old_v, new_v, key in warning_contexts: + _warn_mismatched(old_v, new_v, key=key) + _log_context(user=user, home=home, root=root, prefix=prefix) + + return old + + +def get_bin_prefix() -> str: + new = _sysconfig.get_bin_prefix() + if _USE_SYSCONFIG: + return new + + old = _distutils.get_bin_prefix() + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"): + _log_context() + return old + + +def get_bin_user() -> str: + return _sysconfig.get_scheme("", user=True).scripts + + +def _looks_like_deb_system_dist_packages(value: str) -> bool: + """Check if the value is Debian's APT-controlled dist-packages. + + Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the + default package path controlled by APT, but does not patch ``sysconfig`` to + do the same. This is similar to the bug worked around in ``get_scheme()``, + but here the default is ``deb_system`` instead of ``unix_local``. Ultimately + we can't do anything about this Debian bug, and this detection allows us to + skip the warning when needed. + """ + if not _looks_like_debian_scheme(): + return False + if value == "/usr/lib/python3/dist-packages": + return True + return False + + +def get_purelib() -> str: + """Return the default pure-Python lib location.""" + new = _sysconfig.get_purelib() + if _USE_SYSCONFIG: + return new + + old = _distutils.get_purelib() + if _looks_like_deb_system_dist_packages(old): + return old + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"): + _log_context() + return old + + +def get_platlib() -> str: + """Return the default platform-shared lib location.""" + new = _sysconfig.get_platlib() + if _USE_SYSCONFIG: + return new + + from . import _distutils + + old = _distutils.get_platlib() + if _looks_like_deb_system_dist_packages(old): + return old + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"): + _log_context() + return old diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..20bdd4b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc new file mode 100644 index 0000000..32b9a2b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc new file mode 100644 index 0000000..51ee7c0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..90a2230 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/locations/_distutils.py b/myenv/lib/python3.12/site-packages/pip/_internal/locations/_distutils.py new file mode 100644 index 0000000..0e18c6e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/locations/_distutils.py @@ -0,0 +1,172 @@ +"""Locations where we look for configs, install stuff, etc""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +# If pip's going to use distutils, it should not be using the copy that setuptools +# might have injected into the environment. This is done by removing the injected +# shim, if it's injected. +# +# See https://github.com/pypa/pip/issues/8761 for the original discussion and +# rationale for why this is done within pip. +try: + __import__("_distutils_hack").remove_shim() +except (ImportError, AttributeError): + pass + +import logging +import os +import sys +from distutils.cmd import Command as DistutilsCommand +from distutils.command.install import SCHEME_KEYS +from distutils.command.install import install as distutils_install_command +from distutils.sysconfig import get_python_lib +from typing import Dict, List, Optional, Union, cast + +from pip._internal.models.scheme import Scheme +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import get_major_minor_version + +logger = logging.getLogger(__name__) + + +def distutils_scheme( + dist_name: str, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, + *, + ignore_config_files: bool = False, +) -> Dict[str, str]: + """ + Return a distutils install scheme + """ + from distutils.dist import Distribution + + dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name} + if isolated: + dist_args["script_args"] = ["--no-user-cfg"] + + d = Distribution(dist_args) + if not ignore_config_files: + try: + d.parse_config_files() + except UnicodeDecodeError: + paths = d.find_config_files() + logger.warning( + "Ignore distutils configs in %s due to encoding errors.", + ", ".join(os.path.basename(p) for p in paths), + ) + obj: Optional[DistutilsCommand] = None + obj = d.get_command_obj("install", create=True) + assert obj is not None + i = cast(distutils_install_command, obj) + # NOTE: setting user or home has the side-effect of creating the home dir + # or user base for installations during finalize_options() + # ideally, we'd prefer a scheme class that has no side-effects. + assert not (user and prefix), f"user={user} prefix={prefix}" + assert not (home and prefix), f"home={home} prefix={prefix}" + i.user = user or i.user + if user or home: + i.prefix = "" + i.prefix = prefix or i.prefix + i.home = home or i.home + i.root = root or i.root + i.finalize_options() + + scheme = {} + for key in SCHEME_KEYS: + scheme[key] = getattr(i, "install_" + key) + + # install_lib specified in setup.cfg should install *everything* + # into there (i.e. it takes precedence over both purelib and + # platlib). Note, i.install_lib is *always* set after + # finalize_options(); we only want to override here if the user + # has explicitly requested it hence going back to the config + if "install_lib" in d.get_option_dict("install"): + scheme.update({"purelib": i.install_lib, "platlib": i.install_lib}) + + if running_under_virtualenv(): + if home: + prefix = home + elif user: + prefix = i.install_userbase + else: + prefix = i.prefix + scheme["headers"] = os.path.join( + prefix, + "include", + "site", + f"python{get_major_minor_version()}", + dist_name, + ) + + if root is not None: + path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1] + scheme["headers"] = os.path.join(root, path_no_drive[1:]) + + return scheme + + +def get_scheme( + dist_name: str, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, +) -> Scheme: + """ + Get the "scheme" corresponding to the input parameters. The distutils + documentation provides the context for the available schemes: + https://docs.python.org/3/install/index.html#alternate-installation + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme and provides the base + directory for the same + :param root: root under which other directories are re-based + :param isolated: equivalent to --no-user-cfg, i.e. do not consider + ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for + scheme paths + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix) + return Scheme( + platlib=scheme["platlib"], + purelib=scheme["purelib"], + headers=scheme["headers"], + scripts=scheme["scripts"], + data=scheme["data"], + ) + + +def get_bin_prefix() -> str: + # XXX: In old virtualenv versions, sys.prefix can contain '..' components, + # so we need to call normpath to eliminate them. + prefix = os.path.normpath(sys.prefix) + if WINDOWS: + bin_py = os.path.join(prefix, "Scripts") + # buildout uses 'bin' on Windows too? + if not os.path.exists(bin_py): + bin_py = os.path.join(prefix, "bin") + return bin_py + # Forcing to use /usr/local/bin for standard macOS framework installs + # Also log to ~/Library/Logs/ for use with the Console.app log viewer + if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/": + return "/usr/local/bin" + return os.path.join(prefix, "bin") + + +def get_purelib() -> str: + return get_python_lib(plat_specific=False) + + +def get_platlib() -> str: + return get_python_lib(plat_specific=True) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/locations/_sysconfig.py b/myenv/lib/python3.12/site-packages/pip/_internal/locations/_sysconfig.py new file mode 100644 index 0000000..97aef1f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/locations/_sysconfig.py @@ -0,0 +1,213 @@ +import logging +import os +import sys +import sysconfig +import typing + +from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import change_root, get_major_minor_version, is_osx_framework + +logger = logging.getLogger(__name__) + + +# Notes on _infer_* functions. +# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no +# way to ask things like "what is the '_prefix' scheme on this platform". These +# functions try to answer that with some heuristics while accounting for ad-hoc +# platforms not covered by CPython's default sysconfig implementation. If the +# ad-hoc implementation does not fully implement sysconfig, we'll fall back to +# a POSIX scheme. + +_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names()) + +_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None) + + +def _should_use_osx_framework_prefix() -> bool: + """Check for Apple's ``osx_framework_library`` scheme. + + Python distributed by Apple's Command Line Tools has this special scheme + that's used when: + + * This is a framework build. + * We are installing into the system prefix. + + This does not account for ``pip install --prefix`` (also means we're not + installing to the system prefix), which should use ``posix_prefix``, but + logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But + since ``prefix`` is not available for ``sysconfig.get_default_scheme()``, + which is the stdlib replacement for ``_infer_prefix()``, presumably Apple + wouldn't be able to magically switch between ``osx_framework_library`` and + ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library`` + means its behavior is consistent whether we use the stdlib implementation + or our own, and we deal with this special case in ``get_scheme()`` instead. + """ + return ( + "osx_framework_library" in _AVAILABLE_SCHEMES + and not running_under_virtualenv() + and is_osx_framework() + ) + + +def _infer_prefix() -> str: + """Try to find a prefix scheme for the current platform. + + This tries: + + * A special ``osx_framework_library`` for Python distributed by Apple's + Command Line Tools, when not running in a virtual environment. + * Implementation + OS, used by PyPy on Windows (``pypy_nt``). + * Implementation without OS, used by PyPy on POSIX (``pypy``). + * OS + "prefix", used by CPython on POSIX (``posix_prefix``). + * Just the OS name, used by CPython on Windows (``nt``). + + If none of the above works, fall back to ``posix_prefix``. + """ + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("prefix") + if _should_use_osx_framework_prefix(): + return "osx_framework_library" + implementation_suffixed = f"{sys.implementation.name}_{os.name}" + if implementation_suffixed in _AVAILABLE_SCHEMES: + return implementation_suffixed + if sys.implementation.name in _AVAILABLE_SCHEMES: + return sys.implementation.name + suffixed = f"{os.name}_prefix" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt". + return os.name + return "posix_prefix" + + +def _infer_user() -> str: + """Try to find a user scheme for the current platform.""" + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("user") + if is_osx_framework() and not running_under_virtualenv(): + suffixed = "osx_framework_user" + else: + suffixed = f"{os.name}_user" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable. + raise UserInstallationInvalid() + return "posix_user" + + +def _infer_home() -> str: + """Try to find a home for the current platform.""" + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("home") + suffixed = f"{os.name}_home" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + return "posix_home" + + +# Update these keys if the user sets a custom home. +_HOME_KEYS = [ + "installed_base", + "base", + "installed_platbase", + "platbase", + "prefix", + "exec_prefix", +] +if sysconfig.get_config_var("userbase") is not None: + _HOME_KEYS.append("userbase") + + +def get_scheme( + dist_name: str, + user: bool = False, + home: typing.Optional[str] = None, + root: typing.Optional[str] = None, + isolated: bool = False, + prefix: typing.Optional[str] = None, +) -> Scheme: + """ + Get the "scheme" corresponding to the input parameters. + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme + :param root: root under which other directories are re-based + :param isolated: ignored, but kept for distutils compatibility (where + this controls whether the user-site pydistutils.cfg is honored) + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + if user and prefix: + raise InvalidSchemeCombination("--user", "--prefix") + if home and prefix: + raise InvalidSchemeCombination("--home", "--prefix") + + if home is not None: + scheme_name = _infer_home() + elif user: + scheme_name = _infer_user() + else: + scheme_name = _infer_prefix() + + # Special case: When installing into a custom prefix, use posix_prefix + # instead of osx_framework_library. See _should_use_osx_framework_prefix() + # docstring for details. + if prefix is not None and scheme_name == "osx_framework_library": + scheme_name = "posix_prefix" + + if home is not None: + variables = {k: home for k in _HOME_KEYS} + elif prefix is not None: + variables = {k: prefix for k in _HOME_KEYS} + else: + variables = {} + + paths = sysconfig.get_paths(scheme=scheme_name, vars=variables) + + # Logic here is very arbitrary, we're doing it for compatibility, don't ask. + # 1. Pip historically uses a special header path in virtual environments. + # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We + # only do the same when not running in a virtual environment because + # pip's historical header path logic (see point 1) did not do this. + if running_under_virtualenv(): + if user: + base = variables.get("userbase", sys.prefix) + else: + base = variables.get("base", sys.prefix) + python_xy = f"python{get_major_minor_version()}" + paths["include"] = os.path.join(base, "include", "site", python_xy) + elif not dist_name: + dist_name = "UNKNOWN" + + scheme = Scheme( + platlib=paths["platlib"], + purelib=paths["purelib"], + headers=os.path.join(paths["include"], dist_name), + scripts=paths["scripts"], + data=paths["data"], + ) + if root is not None: + for key in SCHEME_KEYS: + value = change_root(root, getattr(scheme, key)) + setattr(scheme, key, value) + return scheme + + +def get_bin_prefix() -> str: + # Forcing to use /usr/local/bin for standard macOS framework installs. + if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/": + return "/usr/local/bin" + return sysconfig.get_paths()["scripts"] + + +def get_purelib() -> str: + return sysconfig.get_paths()["purelib"] + + +def get_platlib() -> str: + return sysconfig.get_paths()["platlib"] diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/locations/base.py b/myenv/lib/python3.12/site-packages/pip/_internal/locations/base.py new file mode 100644 index 0000000..3f9f896 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/locations/base.py @@ -0,0 +1,81 @@ +import functools +import os +import site +import sys +import sysconfig +import typing + +from pip._internal.exceptions import InstallationError +from pip._internal.utils import appdirs +from pip._internal.utils.virtualenv import running_under_virtualenv + +# Application Directories +USER_CACHE_DIR = appdirs.user_cache_dir("pip") + +# FIXME doesn't account for venv linked to global site-packages +site_packages: str = sysconfig.get_path("purelib") + + +def get_major_minor_version() -> str: + """ + Return the major-minor version of the current Python as a string, e.g. + "3.7" or "3.10". + """ + return "{}.{}".format(*sys.version_info) + + +def change_root(new_root: str, pathname: str) -> str: + """Return 'pathname' with 'new_root' prepended. + + If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname). + Otherwise, it requires making 'pathname' relative and then joining the + two, which is tricky on DOS/Windows and Mac OS. + + This is borrowed from Python's standard library's distutils module. + """ + if os.name == "posix": + if not os.path.isabs(pathname): + return os.path.join(new_root, pathname) + else: + return os.path.join(new_root, pathname[1:]) + + elif os.name == "nt": + (drive, path) = os.path.splitdrive(pathname) + if path[0] == "\\": + path = path[1:] + return os.path.join(new_root, path) + + else: + raise InstallationError( + f"Unknown platform: {os.name}\n" + "Can not change root path prefix on unknown platform." + ) + + +def get_src_prefix() -> str: + if running_under_virtualenv(): + src_prefix = os.path.join(sys.prefix, "src") + else: + # FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), "src") + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit("The folder you are executing pip from can no longer be found.") + + # under macOS + virtualenv sys.prefix is not properly resolved + # it is something like /path/to/python/bin/.. + return os.path.abspath(src_prefix) + + +try: + # Use getusersitepackages if this is present, as it ensures that the + # value is initialised properly. + user_site: typing.Optional[str] = site.getusersitepackages() +except AttributeError: + user_site = site.USER_SITE + + +@functools.lru_cache(maxsize=None) +def is_osx_framework() -> bool: + return bool(sysconfig.get_config_var("PYTHONFRAMEWORK")) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/main.py b/myenv/lib/python3.12/site-packages/pip/_internal/main.py new file mode 100644 index 0000000..33c6d24 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/main.py @@ -0,0 +1,12 @@ +from typing import List, Optional + + +def main(args: Optional[List[str]] = None) -> int: + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__init__.py new file mode 100644 index 0000000..aa232b6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__init__.py @@ -0,0 +1,128 @@ +import contextlib +import functools +import os +import sys +from typing import TYPE_CHECKING, List, Optional, Type, cast + +from pip._internal.utils.misc import strtobool + +from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel + +if TYPE_CHECKING: + from typing import Literal, Protocol +else: + Protocol = object + +__all__ = [ + "BaseDistribution", + "BaseEnvironment", + "FilesystemWheel", + "MemoryWheel", + "Wheel", + "get_default_environment", + "get_environment", + "get_wheel_distribution", + "select_backend", +] + + +def _should_use_importlib_metadata() -> bool: + """Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend. + + By default, pip uses ``importlib.metadata`` on Python 3.11+, and + ``pkg_resourcess`` otherwise. This can be overridden by a couple of ways: + + * If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it + dictates whether ``importlib.metadata`` is used, regardless of Python + version. + * On Python 3.11+, Python distributors can patch ``importlib.metadata`` + to add a global constant ``_PIP_USE_IMPORTLIB_METADATA = False``. This + makes pip use ``pkg_resources`` (unless the user set the aforementioned + environment variable to *True*). + """ + with contextlib.suppress(KeyError, ValueError): + return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"])) + if sys.version_info < (3, 11): + return False + import importlib.metadata + + return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True)) + + +class Backend(Protocol): + NAME: 'Literal["importlib", "pkg_resources"]' + Distribution: Type[BaseDistribution] + Environment: Type[BaseEnvironment] + + +@functools.lru_cache(maxsize=None) +def select_backend() -> Backend: + if _should_use_importlib_metadata(): + from . import importlib + + return cast(Backend, importlib) + from . import pkg_resources + + return cast(Backend, pkg_resources) + + +def get_default_environment() -> BaseEnvironment: + """Get the default representation for the current environment. + + This returns an Environment instance from the chosen backend. The default + Environment instance should be built from ``sys.path`` and may use caching + to share instance state accorss calls. + """ + return select_backend().Environment.default() + + +def get_environment(paths: Optional[List[str]]) -> BaseEnvironment: + """Get a representation of the environment specified by ``paths``. + + This returns an Environment instance from the chosen backend based on the + given import paths. The backend must build a fresh instance representing + the state of installed distributions when this function is called. + """ + return select_backend().Environment.from_paths(paths) + + +def get_directory_distribution(directory: str) -> BaseDistribution: + """Get the distribution metadata representation in the specified directory. + + This returns a Distribution instance from the chosen backend based on + the given on-disk ``.dist-info`` directory. + """ + return select_backend().Distribution.from_directory(directory) + + +def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution: + """Get the representation of the specified wheel's distribution metadata. + + This returns a Distribution instance from the chosen backend based on + the given wheel's ``.dist-info`` directory. + + :param canonical_name: Normalized project name of the given wheel. + """ + return select_backend().Distribution.from_wheel(wheel, canonical_name) + + +def get_metadata_distribution( + metadata_contents: bytes, + filename: str, + canonical_name: str, +) -> BaseDistribution: + """Get the dist representation of the specified METADATA file contents. + + This returns a Distribution instance from the chosen backend sourced from the data + in `metadata_contents`. + + :param metadata_contents: Contents of a METADATA file within a dist, or one served + via PEP 658. + :param filename: Filename for the dist this metadata represents. + :param canonical_name: Normalized project name of the given dist. + """ + return select_backend().Distribution.from_metadata_file_contents( + metadata_contents, + filename, + canonical_name, + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..63bba47 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-312.pyc new file mode 100644 index 0000000..3f77a47 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..163198f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc new file mode 100644 index 0000000..de29988 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/_json.py b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/_json.py new file mode 100644 index 0000000..27362fc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/_json.py @@ -0,0 +1,84 @@ +# Extracted from https://github.com/pfmoore/pkg_metadata + +from email.header import Header, decode_header, make_header +from email.message import Message +from typing import Any, Dict, List, Union + +METADATA_FIELDS = [ + # Name, Multiple-Use + ("Metadata-Version", False), + ("Name", False), + ("Version", False), + ("Dynamic", True), + ("Platform", True), + ("Supported-Platform", True), + ("Summary", False), + ("Description", False), + ("Description-Content-Type", False), + ("Keywords", False), + ("Home-page", False), + ("Download-URL", False), + ("Author", False), + ("Author-email", False), + ("Maintainer", False), + ("Maintainer-email", False), + ("License", False), + ("Classifier", True), + ("Requires-Dist", True), + ("Requires-Python", False), + ("Requires-External", True), + ("Project-URL", True), + ("Provides-Extra", True), + ("Provides-Dist", True), + ("Obsoletes-Dist", True), +] + + +def json_name(field: str) -> str: + return field.lower().replace("-", "_") + + +def msg_to_json(msg: Message) -> Dict[str, Any]: + """Convert a Message object into a JSON-compatible dictionary.""" + + def sanitise_header(h: Union[Header, str]) -> str: + if isinstance(h, Header): + chunks = [] + for bytes, encoding in decode_header(h): + if encoding == "unknown-8bit": + try: + # See if UTF-8 works + bytes.decode("utf-8") + encoding = "utf-8" + except UnicodeDecodeError: + # If not, latin1 at least won't fail + encoding = "latin1" + chunks.append((bytes, encoding)) + return str(make_header(chunks)) + return str(h) + + result = {} + for field, multi in METADATA_FIELDS: + if field not in msg: + continue + key = json_name(field) + if multi: + value: Union[str, List[str]] = [ + sanitise_header(v) for v in msg.get_all(field) # type: ignore + ] + else: + value = sanitise_header(msg.get(field)) # type: ignore + if key == "keywords": + # Accept both comma-separated and space-separated + # forms, for better compatibility with old data. + if "," in value: + value = [v.strip() for v in value.split(",")] + else: + value = value.split() + result[key] = value + + payload = msg.get_payload() + if payload: + result["description"] = payload + + return result diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/base.py b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/base.py new file mode 100644 index 0000000..9249124 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/base.py @@ -0,0 +1,702 @@ +import csv +import email.message +import functools +import json +import logging +import pathlib +import re +import zipfile +from typing import ( + IO, + TYPE_CHECKING, + Any, + Collection, + Container, + Dict, + Iterable, + Iterator, + List, + NamedTuple, + Optional, + Tuple, + Union, +) + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import LegacyVersion, Version + +from pip._internal.exceptions import NoneMetadataError +from pip._internal.locations import site_packages, user_site +from pip._internal.models.direct_url import ( + DIRECT_URL_METADATA_NAME, + DirectUrl, + DirectUrlValidationError, +) +from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here. +from pip._internal.utils.egg_link import egg_link_path_from_sys_path +from pip._internal.utils.misc import is_local, normalize_path +from pip._internal.utils.urls import url_to_path + +from ._json import msg_to_json + +if TYPE_CHECKING: + from typing import Protocol +else: + Protocol = object + +DistributionVersion = Union[LegacyVersion, Version] + +InfoPath = Union[str, pathlib.PurePath] + +logger = logging.getLogger(__name__) + + +class BaseEntryPoint(Protocol): + @property + def name(self) -> str: + raise NotImplementedError() + + @property + def value(self) -> str: + raise NotImplementedError() + + @property + def group(self) -> str: + raise NotImplementedError() + + +def _convert_installed_files_path( + entry: Tuple[str, ...], + info: Tuple[str, ...], +) -> str: + """Convert a legacy installed-files.txt path into modern RECORD path. + + The legacy format stores paths relative to the info directory, while the + modern format stores paths relative to the package root, e.g. the + site-packages directory. + + :param entry: Path parts of the installed-files.txt entry. + :param info: Path parts of the egg-info directory relative to package root. + :returns: The converted entry. + + For best compatibility with symlinks, this does not use ``abspath()`` or + ``Path.resolve()``, but tries to work with path parts: + + 1. While ``entry`` starts with ``..``, remove the equal amounts of parts + from ``info``; if ``info`` is empty, start appending ``..`` instead. + 2. Join the two directly. + """ + while entry and entry[0] == "..": + if not info or info[-1] == "..": + info += ("..",) + else: + info = info[:-1] + entry = entry[1:] + return str(pathlib.Path(*info, *entry)) + + +class RequiresEntry(NamedTuple): + requirement: str + extra: str + marker: str + + +class BaseDistribution(Protocol): + @classmethod + def from_directory(cls, directory: str) -> "BaseDistribution": + """Load the distribution from a metadata directory. + + :param directory: Path to a metadata directory, e.g. ``.dist-info``. + """ + raise NotImplementedError() + + @classmethod + def from_metadata_file_contents( + cls, + metadata_contents: bytes, + filename: str, + project_name: str, + ) -> "BaseDistribution": + """Load the distribution from the contents of a METADATA file. + + This is used to implement PEP 658 by generating a "shallow" dist object that can + be used for resolution without downloading or building the actual dist yet. + + :param metadata_contents: The contents of a METADATA file. + :param filename: File name for the dist with this metadata. + :param project_name: Name of the project this dist represents. + """ + raise NotImplementedError() + + @classmethod + def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution": + """Load the distribution from a given wheel. + + :param wheel: A concrete wheel definition. + :param name: File name of the wheel. + + :raises InvalidWheel: Whenever loading of the wheel causes a + :py:exc:`zipfile.BadZipFile` exception to be thrown. + :raises UnsupportedWheel: If the wheel is a valid zip, but malformed + internally. + """ + raise NotImplementedError() + + def __repr__(self) -> str: + return f"{self.raw_name} {self.version} ({self.location})" + + def __str__(self) -> str: + return f"{self.raw_name} {self.version}" + + @property + def location(self) -> Optional[str]: + """Where the distribution is loaded from. + + A string value is not necessarily a filesystem path, since distributions + can be loaded from other sources, e.g. arbitrary zip archives. ``None`` + means the distribution is created in-memory. + + Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If + this is a symbolic link, we want to preserve the relative path between + it and files in the distribution. + """ + raise NotImplementedError() + + @property + def editable_project_location(self) -> Optional[str]: + """The project location for editable distributions. + + This is the directory where pyproject.toml or setup.py is located. + None if the distribution is not installed in editable mode. + """ + # TODO: this property is relatively costly to compute, memoize it ? + direct_url = self.direct_url + if direct_url: + if direct_url.is_local_editable(): + return url_to_path(direct_url.url) + else: + # Search for an .egg-link file by walking sys.path, as it was + # done before by dist_is_editable(). + egg_link_path = egg_link_path_from_sys_path(self.raw_name) + if egg_link_path: + # TODO: get project location from second line of egg_link file + # (https://github.com/pypa/pip/issues/10243) + return self.location + return None + + @property + def installed_location(self) -> Optional[str]: + """The distribution's "installed" location. + + This should generally be a ``site-packages`` directory. This is + usually ``dist.location``, except for legacy develop-installed packages, + where ``dist.location`` is the source code location, and this is where + the ``.egg-link`` file is. + + The returned location is normalized (in particular, with symlinks removed). + """ + raise NotImplementedError() + + @property + def info_location(self) -> Optional[str]: + """Location of the .[egg|dist]-info directory or file. + + Similarly to ``location``, a string value is not necessarily a + filesystem path. ``None`` means the distribution is created in-memory. + + For a modern .dist-info installation on disk, this should be something + like ``{location}/{raw_name}-{version}.dist-info``. + + Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If + this is a symbolic link, we want to preserve the relative path between + it and other files in the distribution. + """ + raise NotImplementedError() + + @property + def installed_by_distutils(self) -> bool: + """Whether this distribution is installed with legacy distutils format. + + A distribution installed with "raw" distutils not patched by setuptools + uses one single file at ``info_location`` to store metadata. We need to + treat this specially on uninstallation. + """ + info_location = self.info_location + if not info_location: + return False + return pathlib.Path(info_location).is_file() + + @property + def installed_as_egg(self) -> bool: + """Whether this distribution is installed as an egg. + + This usually indicates the distribution was installed by (older versions + of) easy_install. + """ + location = self.location + if not location: + return False + return location.endswith(".egg") + + @property + def installed_with_setuptools_egg_info(self) -> bool: + """Whether this distribution is installed with the ``.egg-info`` format. + + This usually indicates the distribution was installed with setuptools + with an old pip version or with ``single-version-externally-managed``. + + Note that this ensure the metadata store is a directory. distutils can + also installs an ``.egg-info``, but as a file, not a directory. This + property is *False* for that case. Also see ``installed_by_distutils``. + """ + info_location = self.info_location + if not info_location: + return False + if not info_location.endswith(".egg-info"): + return False + return pathlib.Path(info_location).is_dir() + + @property + def installed_with_dist_info(self) -> bool: + """Whether this distribution is installed with the "modern format". + + This indicates a "modern" installation, e.g. storing metadata in the + ``.dist-info`` directory. This applies to installations made by + setuptools (but through pip, not directly), or anything using the + standardized build backend interface (PEP 517). + """ + info_location = self.info_location + if not info_location: + return False + if not info_location.endswith(".dist-info"): + return False + return pathlib.Path(info_location).is_dir() + + @property + def canonical_name(self) -> NormalizedName: + raise NotImplementedError() + + @property + def version(self) -> DistributionVersion: + raise NotImplementedError() + + @property + def setuptools_filename(self) -> str: + """Convert a project name to its setuptools-compatible filename. + + This is a copy of ``pkg_resources.to_filename()`` for compatibility. + """ + return self.raw_name.replace("-", "_") + + @property + def direct_url(self) -> Optional[DirectUrl]: + """Obtain a DirectUrl from this distribution. + + Returns None if the distribution has no `direct_url.json` metadata, + or if `direct_url.json` is invalid. + """ + try: + content = self.read_text(DIRECT_URL_METADATA_NAME) + except FileNotFoundError: + return None + try: + return DirectUrl.from_json(content) + except ( + UnicodeDecodeError, + json.JSONDecodeError, + DirectUrlValidationError, + ) as e: + logger.warning( + "Error parsing %s for %s: %s", + DIRECT_URL_METADATA_NAME, + self.canonical_name, + e, + ) + return None + + @property + def installer(self) -> str: + try: + installer_text = self.read_text("INSTALLER") + except (OSError, ValueError, NoneMetadataError): + return "" # Fail silently if the installer file cannot be read. + for line in installer_text.splitlines(): + cleaned_line = line.strip() + if cleaned_line: + return cleaned_line + return "" + + @property + def requested(self) -> bool: + return self.is_file("REQUESTED") + + @property + def editable(self) -> bool: + return bool(self.editable_project_location) + + @property + def local(self) -> bool: + """If distribution is installed in the current virtual environment. + + Always True if we're not in a virtualenv. + """ + if self.installed_location is None: + return False + return is_local(self.installed_location) + + @property + def in_usersite(self) -> bool: + if self.installed_location is None or user_site is None: + return False + return self.installed_location.startswith(normalize_path(user_site)) + + @property + def in_site_packages(self) -> bool: + if self.installed_location is None or site_packages is None: + return False + return self.installed_location.startswith(normalize_path(site_packages)) + + def is_file(self, path: InfoPath) -> bool: + """Check whether an entry in the info directory is a file.""" + raise NotImplementedError() + + def iter_distutils_script_names(self) -> Iterator[str]: + """Find distutils 'scripts' entries metadata. + + If 'scripts' is supplied in ``setup.py``, distutils records those in the + installed distribution's ``scripts`` directory, a file for each script. + """ + raise NotImplementedError() + + def read_text(self, path: InfoPath) -> str: + """Read a file in the info directory. + + :raise FileNotFoundError: If ``path`` does not exist in the directory. + :raise NoneMetadataError: If ``path`` exists in the info directory, but + cannot be read. + """ + raise NotImplementedError() + + def iter_entry_points(self) -> Iterable[BaseEntryPoint]: + raise NotImplementedError() + + def _metadata_impl(self) -> email.message.Message: + raise NotImplementedError() + + @functools.lru_cache(maxsize=1) + def _metadata_cached(self) -> email.message.Message: + # When we drop python 3.7 support, move this to the metadata property and use + # functools.cached_property instead of lru_cache. + metadata = self._metadata_impl() + self._add_egg_info_requires(metadata) + return metadata + + @property + def metadata(self) -> email.message.Message: + """Metadata of distribution parsed from e.g. METADATA or PKG-INFO. + + This should return an empty message if the metadata file is unavailable. + + :raises NoneMetadataError: If the metadata file is available, but does + not contain valid metadata. + """ + return self._metadata_cached() + + @property + def metadata_dict(self) -> Dict[str, Any]: + """PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO. + + This should return an empty dict if the metadata file is unavailable. + + :raises NoneMetadataError: If the metadata file is available, but does + not contain valid metadata. + """ + return msg_to_json(self.metadata) + + @property + def metadata_version(self) -> Optional[str]: + """Value of "Metadata-Version:" in distribution metadata, if available.""" + return self.metadata.get("Metadata-Version") + + @property + def raw_name(self) -> str: + """Value of "Name:" in distribution metadata.""" + # The metadata should NEVER be missing the Name: key, but if it somehow + # does, fall back to the known canonical name. + return self.metadata.get("Name", self.canonical_name) + + @property + def requires_python(self) -> SpecifierSet: + """Value of "Requires-Python:" in distribution metadata. + + If the key does not exist or contains an invalid value, an empty + SpecifierSet should be returned. + """ + value = self.metadata.get("Requires-Python") + if value is None: + return SpecifierSet() + try: + # Convert to str to satisfy the type checker; this can be a Header object. + spec = SpecifierSet(str(value)) + except InvalidSpecifier as e: + message = "Package %r has an invalid Requires-Python: %s" + logger.warning(message, self.raw_name, e) + return SpecifierSet() + return spec + + def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: + """Dependencies of this distribution. + + For modern .dist-info distributions, this is the collection of + "Requires-Dist:" entries in distribution metadata. + """ + raise NotImplementedError() + + def iter_provided_extras(self) -> Iterable[str]: + """Extras provided by this distribution. + + For modern .dist-info distributions, this is the collection of + "Provides-Extra:" entries in distribution metadata. + + The return value of this function is not particularly useful other than + display purposes due to backward compatibility issues and the extra + names being poorly normalized prior to PEP 685. If you want to perform + logic operations on extras, use :func:`is_extra_provided` instead. + """ + raise NotImplementedError() + + def is_extra_provided(self, extra: str) -> bool: + """Check whether an extra is provided by this distribution. + + This is needed mostly for compatibility issues with pkg_resources not + following the extra normalization rules defined in PEP 685. + """ + raise NotImplementedError() + + def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]: + try: + text = self.read_text("RECORD") + except FileNotFoundError: + return None + # This extra Path-str cast normalizes entries. + return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines())) + + def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]: + try: + text = self.read_text("installed-files.txt") + except FileNotFoundError: + return None + paths = (p for p in text.splitlines(keepends=False) if p) + root = self.location + info = self.info_location + if root is None or info is None: + return paths + try: + info_rel = pathlib.Path(info).relative_to(root) + except ValueError: # info is not relative to root. + return paths + if not info_rel.parts: # info *is* root. + return paths + return ( + _convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts) + for p in paths + ) + + def iter_declared_entries(self) -> Optional[Iterator[str]]: + """Iterate through file entries declared in this distribution. + + For modern .dist-info distributions, this is the files listed in the + ``RECORD`` metadata file. For legacy setuptools distributions, this + comes from ``installed-files.txt``, with entries normalized to be + compatible with the format used by ``RECORD``. + + :return: An iterator for listed entries, or None if the distribution + contains neither ``RECORD`` nor ``installed-files.txt``. + """ + return ( + self._iter_declared_entries_from_record() + or self._iter_declared_entries_from_legacy() + ) + + def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]: + """Parse a ``requires.txt`` in an egg-info directory. + + This is an INI-ish format where an egg-info stores dependencies. A + section name describes extra other environment markers, while each entry + is an arbitrary string (not a key-value pair) representing a dependency + as a requirement string (no markers). + + There is a construct in ``importlib.metadata`` called ``Sectioned`` that + does mostly the same, but the format is currently considered private. + """ + try: + content = self.read_text("requires.txt") + except FileNotFoundError: + return + extra = marker = "" # Section-less entries don't have markers. + for line in content.splitlines(): + line = line.strip() + if not line or line.startswith("#"): # Comment; ignored. + continue + if line.startswith("[") and line.endswith("]"): # A section header. + extra, _, marker = line.strip("[]").partition(":") + continue + yield RequiresEntry(requirement=line, extra=extra, marker=marker) + + def _iter_egg_info_extras(self) -> Iterable[str]: + """Get extras from the egg-info directory.""" + known_extras = {""} + for entry in self._iter_requires_txt_entries(): + extra = canonicalize_name(entry.extra) + if extra in known_extras: + continue + known_extras.add(extra) + yield extra + + def _iter_egg_info_dependencies(self) -> Iterable[str]: + """Get distribution dependencies from the egg-info directory. + + To ease parsing, this converts a legacy dependency entry into a PEP 508 + requirement string. Like ``_iter_requires_txt_entries()``, there is code + in ``importlib.metadata`` that does mostly the same, but not do exactly + what we need. + + Namely, ``importlib.metadata`` does not normalize the extra name before + putting it into the requirement string, which causes marker comparison + to fail because the dist-info format do normalize. This is consistent in + all currently available PEP 517 backends, although not standardized. + """ + for entry in self._iter_requires_txt_entries(): + extra = canonicalize_name(entry.extra) + if extra and entry.marker: + marker = f'({entry.marker}) and extra == "{extra}"' + elif extra: + marker = f'extra == "{extra}"' + elif entry.marker: + marker = entry.marker + else: + marker = "" + if marker: + yield f"{entry.requirement} ; {marker}" + else: + yield entry.requirement + + def _add_egg_info_requires(self, metadata: email.message.Message) -> None: + """Add egg-info requires.txt information to the metadata.""" + if not metadata.get_all("Requires-Dist"): + for dep in self._iter_egg_info_dependencies(): + metadata["Requires-Dist"] = dep + if not metadata.get_all("Provides-Extra"): + for extra in self._iter_egg_info_extras(): + metadata["Provides-Extra"] = extra + + +class BaseEnvironment: + """An environment containing distributions to introspect.""" + + @classmethod + def default(cls) -> "BaseEnvironment": + raise NotImplementedError() + + @classmethod + def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment": + raise NotImplementedError() + + def get_distribution(self, name: str) -> Optional["BaseDistribution"]: + """Given a requirement name, return the installed distributions. + + The name may not be normalized. The implementation must canonicalize + it for lookup. + """ + raise NotImplementedError() + + def _iter_distributions(self) -> Iterator["BaseDistribution"]: + """Iterate through installed distributions. + + This function should be implemented by subclass, but never called + directly. Use the public ``iter_distribution()`` instead, which + implements additional logic to make sure the distributions are valid. + """ + raise NotImplementedError() + + def iter_all_distributions(self) -> Iterator[BaseDistribution]: + """Iterate through all installed distributions without any filtering.""" + for dist in self._iter_distributions(): + # Make sure the distribution actually comes from a valid Python + # packaging distribution. Pip's AdjacentTempDirectory leaves folders + # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The + # valid project name pattern is taken from PEP 508. + project_name_valid = re.match( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", + dist.canonical_name, + flags=re.IGNORECASE, + ) + if not project_name_valid: + logger.warning( + "Ignoring invalid distribution %s (%s)", + dist.canonical_name, + dist.location, + ) + continue + yield dist + + def iter_installed_distributions( + self, + local_only: bool = True, + skip: Container[str] = stdlib_pkgs, + include_editables: bool = True, + editables_only: bool = False, + user_only: bool = False, + ) -> Iterator[BaseDistribution]: + """Return a list of installed distributions. + + This is based on ``iter_all_distributions()`` with additional filtering + options. Note that ``iter_installed_distributions()`` without arguments + is *not* equal to ``iter_all_distributions()``, since some of the + configurations exclude packages by default. + + :param local_only: If True (default), only return installations + local to the current virtualenv, if in a virtualenv. + :param skip: An iterable of canonicalized project names to ignore; + defaults to ``stdlib_pkgs``. + :param include_editables: If False, don't report editables. + :param editables_only: If True, only report editables. + :param user_only: If True, only report installations in the user + site directory. + """ + it = self.iter_all_distributions() + if local_only: + it = (d for d in it if d.local) + if not include_editables: + it = (d for d in it if not d.editable) + if editables_only: + it = (d for d in it if d.editable) + if user_only: + it = (d for d in it if d.in_usersite) + return (d for d in it if d.canonical_name not in skip) + + +class Wheel(Protocol): + location: str + + def as_zipfile(self) -> zipfile.ZipFile: + raise NotImplementedError() + + +class FilesystemWheel(Wheel): + def __init__(self, location: str) -> None: + self.location = location + + def as_zipfile(self) -> zipfile.ZipFile: + return zipfile.ZipFile(self.location, allowZip64=True) + + +class MemoryWheel(Wheel): + def __init__(self, location: str, stream: IO[bytes]) -> None: + self.location = location + self.stream = stream + + def as_zipfile(self) -> zipfile.ZipFile: + return zipfile.ZipFile(self.stream, allowZip64=True) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__init__.py new file mode 100644 index 0000000..a779138 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__init__.py @@ -0,0 +1,6 @@ +from ._dists import Distribution +from ._envs import Environment + +__all__ = ["NAME", "Distribution", "Environment"] + +NAME = "importlib" diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1e37aa4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 0000000..5e66bb7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc new file mode 100644 index 0000000..5633f42 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc new file mode 100644 index 0000000..6a2ecb2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_compat.py b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_compat.py new file mode 100644 index 0000000..593bff2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_compat.py @@ -0,0 +1,55 @@ +import importlib.metadata +from typing import Any, Optional, Protocol, cast + + +class BadMetadata(ValueError): + def __init__(self, dist: importlib.metadata.Distribution, *, reason: str) -> None: + self.dist = dist + self.reason = reason + + def __str__(self) -> str: + return f"Bad metadata in {self.dist} ({self.reason})" + + +class BasePath(Protocol): + """A protocol that various path objects conform. + + This exists because importlib.metadata uses both ``pathlib.Path`` and + ``zipfile.Path``, and we need a common base for type hints (Union does not + work well since ``zipfile.Path`` is too new for our linter setup). + + This does not mean to be exhaustive, but only contains things that present + in both classes *that we need*. + """ + + @property + def name(self) -> str: + raise NotImplementedError() + + @property + def parent(self) -> "BasePath": + raise NotImplementedError() + + +def get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]: + """Find the path to the distribution's metadata directory. + + HACK: This relies on importlib.metadata's private ``_path`` attribute. Not + all distributions exist on disk, so importlib.metadata is correct to not + expose the attribute as public. But pip's code base is old and not as clean, + so we do this to avoid having to rewrite too many things. Hopefully we can + eliminate this some day. + """ + return getattr(d, "_path", None) + + +def get_dist_name(dist: importlib.metadata.Distribution) -> str: + """Get the distribution's project name. + + The ``name`` attribute is only available in Python 3.10 or later. We are + targeting exactly that, but Mypy does not know this. + """ + name = cast(Any, dist).name + if not isinstance(name, str): + raise BadMetadata(dist, reason="invalid metadata entry 'name'") + return name diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_dists.py b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_dists.py new file mode 100644 index 0000000..26370fa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_dists.py @@ -0,0 +1,227 @@ +import email.message +import importlib.metadata +import os +import pathlib +import zipfile +from typing import ( + Collection, + Dict, + Iterable, + Iterator, + Mapping, + Optional, + Sequence, + cast, +) + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import InvalidWheel, UnsupportedWheel +from pip._internal.metadata.base import ( + BaseDistribution, + BaseEntryPoint, + DistributionVersion, + InfoPath, + Wheel, +) +from pip._internal.utils.misc import normalize_path +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file + +from ._compat import BasePath, get_dist_name + + +class WheelDistribution(importlib.metadata.Distribution): + """An ``importlib.metadata.Distribution`` read from a wheel. + + Although ``importlib.metadata.PathDistribution`` accepts ``zipfile.Path``, + its implementation is too "lazy" for pip's needs (we can't keep the ZipFile + handle open for the entire lifetime of the distribution object). + + This implementation eagerly reads the entire metadata directory into the + memory instead, and operates from that. + """ + + def __init__( + self, + files: Mapping[pathlib.PurePosixPath, bytes], + info_location: pathlib.PurePosixPath, + ) -> None: + self._files = files + self.info_location = info_location + + @classmethod + def from_zipfile( + cls, + zf: zipfile.ZipFile, + name: str, + location: str, + ) -> "WheelDistribution": + info_dir, _ = parse_wheel(zf, name) + paths = ( + (name, pathlib.PurePosixPath(name.split("/", 1)[-1])) + for name in zf.namelist() + if name.startswith(f"{info_dir}/") + ) + files = { + relpath: read_wheel_metadata_file(zf, fullpath) + for fullpath, relpath in paths + } + info_location = pathlib.PurePosixPath(location, info_dir) + return cls(files, info_location) + + def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]: + # Only allow iterating through the metadata directory. + if pathlib.PurePosixPath(str(path)) in self._files: + return iter(self._files) + raise FileNotFoundError(path) + + def read_text(self, filename: str) -> Optional[str]: + try: + data = self._files[pathlib.PurePosixPath(filename)] + except KeyError: + return None + try: + text = data.decode("utf-8") + except UnicodeDecodeError as e: + wheel = self.info_location.parent + error = f"Error decoding metadata for {wheel}: {e} in {filename} file" + raise UnsupportedWheel(error) + return text + + +class Distribution(BaseDistribution): + def __init__( + self, + dist: importlib.metadata.Distribution, + info_location: Optional[BasePath], + installed_location: Optional[BasePath], + ) -> None: + self._dist = dist + self._info_location = info_location + self._installed_location = installed_location + + @classmethod + def from_directory(cls, directory: str) -> BaseDistribution: + info_location = pathlib.Path(directory) + dist = importlib.metadata.Distribution.at(info_location) + return cls(dist, info_location, info_location.parent) + + @classmethod + def from_metadata_file_contents( + cls, + metadata_contents: bytes, + filename: str, + project_name: str, + ) -> BaseDistribution: + # Generate temp dir to contain the metadata file, and write the file contents. + temp_dir = pathlib.Path( + TempDirectory(kind="metadata", globally_managed=True).path + ) + metadata_path = temp_dir / "METADATA" + metadata_path.write_bytes(metadata_contents) + # Construct dist pointing to the newly created directory. + dist = importlib.metadata.Distribution.at(metadata_path.parent) + return cls(dist, metadata_path.parent, None) + + @classmethod + def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: + try: + with wheel.as_zipfile() as zf: + dist = WheelDistribution.from_zipfile(zf, name, wheel.location) + except zipfile.BadZipFile as e: + raise InvalidWheel(wheel.location, name) from e + except UnsupportedWheel as e: + raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") + return cls(dist, dist.info_location, pathlib.PurePosixPath(wheel.location)) + + @property + def location(self) -> Optional[str]: + if self._info_location is None: + return None + return str(self._info_location.parent) + + @property + def info_location(self) -> Optional[str]: + if self._info_location is None: + return None + return str(self._info_location) + + @property + def installed_location(self) -> Optional[str]: + if self._installed_location is None: + return None + return normalize_path(str(self._installed_location)) + + def _get_dist_name_from_location(self) -> Optional[str]: + """Try to get the name from the metadata directory name. + + This is much faster than reading metadata. + """ + if self._info_location is None: + return None + stem, suffix = os.path.splitext(self._info_location.name) + if suffix not in (".dist-info", ".egg-info"): + return None + return stem.split("-", 1)[0] + + @property + def canonical_name(self) -> NormalizedName: + name = self._get_dist_name_from_location() or get_dist_name(self._dist) + return canonicalize_name(name) + + @property + def version(self) -> DistributionVersion: + return parse_version(self._dist.version) + + def is_file(self, path: InfoPath) -> bool: + return self._dist.read_text(str(path)) is not None + + def iter_distutils_script_names(self) -> Iterator[str]: + # A distutils installation is always "flat" (not in e.g. egg form), so + # if this distribution's info location is NOT a pathlib.Path (but e.g. + # zipfile.Path), it can never contain any distutils scripts. + if not isinstance(self._info_location, pathlib.Path): + return + for child in self._info_location.joinpath("scripts").iterdir(): + yield child.name + + def read_text(self, path: InfoPath) -> str: + content = self._dist.read_text(str(path)) + if content is None: + raise FileNotFoundError(path) + return content + + def iter_entry_points(self) -> Iterable[BaseEntryPoint]: + # importlib.metadata's EntryPoint structure sasitfies BaseEntryPoint. + return self._dist.entry_points + + def _metadata_impl(self) -> email.message.Message: + # From Python 3.10+, importlib.metadata declares PackageMetadata as the + # return type. This protocol is unfortunately a disaster now and misses + # a ton of fields that we need, including get() and get_payload(). We + # rely on the implementation that the object is actually a Message now, + # until upstream can improve the protocol. (python/cpython#94952) + return cast(email.message.Message, self._dist.metadata) + + def iter_provided_extras(self) -> Iterable[str]: + return self.metadata.get_all("Provides-Extra", []) + + def is_extra_provided(self, extra: str) -> bool: + return any( + canonicalize_name(provided_extra) == canonicalize_name(extra) + for provided_extra in self.metadata.get_all("Provides-Extra", []) + ) + + def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: + contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras] + for req_string in self.metadata.get_all("Requires-Dist", []): + req = Requirement(req_string) + if not req.marker: + yield req + elif not extras and req.marker.evaluate({"extra": ""}): + yield req + elif any(req.marker.evaluate(context) for context in contexts): + yield req diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_envs.py b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_envs.py new file mode 100644 index 0000000..048dc55 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_envs.py @@ -0,0 +1,189 @@ +import functools +import importlib.metadata +import logging +import os +import pathlib +import sys +import zipfile +import zipimport +from typing import Iterator, List, Optional, Sequence, Set, Tuple + +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name + +from pip._internal.metadata.base import BaseDistribution, BaseEnvironment +from pip._internal.models.wheel import Wheel +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filetypes import WHEEL_EXTENSION + +from ._compat import BadMetadata, BasePath, get_dist_name, get_info_location +from ._dists import Distribution + +logger = logging.getLogger(__name__) + + +def _looks_like_wheel(location: str) -> bool: + if not location.endswith(WHEEL_EXTENSION): + return False + if not os.path.isfile(location): + return False + if not Wheel.wheel_file_re.match(os.path.basename(location)): + return False + return zipfile.is_zipfile(location) + + +class _DistributionFinder: + """Finder to locate distributions. + + The main purpose of this class is to memoize found distributions' names, so + only one distribution is returned for each package name. At lot of pip code + assumes this (because it is setuptools's behavior), and not doing the same + can potentially cause a distribution in lower precedence path to override a + higher precedence one if the caller is not careful. + + Eventually we probably want to make it possible to see lower precedence + installations as well. It's useful feature, after all. + """ + + FoundResult = Tuple[importlib.metadata.Distribution, Optional[BasePath]] + + def __init__(self) -> None: + self._found_names: Set[NormalizedName] = set() + + def _find_impl(self, location: str) -> Iterator[FoundResult]: + """Find distributions in a location.""" + # Skip looking inside a wheel. Since a package inside a wheel is not + # always valid (due to .data directories etc.), its .dist-info entry + # should not be considered an installed distribution. + if _looks_like_wheel(location): + return + # To know exactly where we find a distribution, we have to feed in the + # paths one by one, instead of dumping the list to importlib.metadata. + for dist in importlib.metadata.distributions(path=[location]): + info_location = get_info_location(dist) + try: + raw_name = get_dist_name(dist) + except BadMetadata as e: + logger.warning("Skipping %s due to %s", info_location, e.reason) + continue + normalized_name = canonicalize_name(raw_name) + if normalized_name in self._found_names: + continue + self._found_names.add(normalized_name) + yield dist, info_location + + def find(self, location: str) -> Iterator[BaseDistribution]: + """Find distributions in a location. + + The path can be either a directory, or a ZIP archive. + """ + for dist, info_location in self._find_impl(location): + if info_location is None: + installed_location: Optional[BasePath] = None + else: + installed_location = info_location.parent + yield Distribution(dist, info_location, installed_location) + + def find_linked(self, location: str) -> Iterator[BaseDistribution]: + """Read location in egg-link files and return distributions in there. + + The path should be a directory; otherwise this returns nothing. This + follows how setuptools does this for compatibility. The first non-empty + line in the egg-link is read as a path (resolved against the egg-link's + containing directory if relative). Distributions found at that linked + location are returned. + """ + path = pathlib.Path(location) + if not path.is_dir(): + return + for child in path.iterdir(): + if child.suffix != ".egg-link": + continue + with child.open() as f: + lines = (line.strip() for line in f) + target_rel = next((line for line in lines if line), "") + if not target_rel: + continue + target_location = str(path.joinpath(target_rel)) + for dist, info_location in self._find_impl(target_location): + yield Distribution(dist, info_location, path) + + def _find_eggs_in_dir(self, location: str) -> Iterator[BaseDistribution]: + from pip._vendor.pkg_resources import find_distributions + + from pip._internal.metadata import pkg_resources as legacy + + with os.scandir(location) as it: + for entry in it: + if not entry.name.endswith(".egg"): + continue + for dist in find_distributions(entry.path): + yield legacy.Distribution(dist) + + def _find_eggs_in_zip(self, location: str) -> Iterator[BaseDistribution]: + from pip._vendor.pkg_resources import find_eggs_in_zip + + from pip._internal.metadata import pkg_resources as legacy + + try: + importer = zipimport.zipimporter(location) + except zipimport.ZipImportError: + return + for dist in find_eggs_in_zip(importer, location): + yield legacy.Distribution(dist) + + def find_eggs(self, location: str) -> Iterator[BaseDistribution]: + """Find eggs in a location. + + This actually uses the old *pkg_resources* backend. We likely want to + deprecate this so we can eventually remove the *pkg_resources* + dependency entirely. Before that, this should first emit a deprecation + warning for some versions when using the fallback since importing + *pkg_resources* is slow for those who don't need it. + """ + if os.path.isdir(location): + yield from self._find_eggs_in_dir(location) + if zipfile.is_zipfile(location): + yield from self._find_eggs_in_zip(location) + + +@functools.lru_cache(maxsize=None) # Warn a distribution exactly once. +def _emit_egg_deprecation(location: Optional[str]) -> None: + deprecated( + reason=f"Loading egg at {location} is deprecated.", + replacement="to use pip for package installation.", + gone_in="24.3", + issue=12330, + ) + + +class Environment(BaseEnvironment): + def __init__(self, paths: Sequence[str]) -> None: + self._paths = paths + + @classmethod + def default(cls) -> BaseEnvironment: + return cls(sys.path) + + @classmethod + def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment: + if paths is None: + return cls(sys.path) + return cls(paths) + + def _iter_distributions(self) -> Iterator[BaseDistribution]: + finder = _DistributionFinder() + for location in self._paths: + yield from finder.find(location) + for dist in finder.find_eggs(location): + _emit_egg_deprecation(dist.location) + yield dist + # This must go last because that's how pkg_resources tie-breaks. + yield from finder.find_linked(location) + + def get_distribution(self, name: str) -> Optional[BaseDistribution]: + matches = ( + distribution + for distribution in self.iter_all_distributions() + if distribution.canonical_name == canonicalize_name(name) + ) + return next(matches, None) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/metadata/pkg_resources.py b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/pkg_resources.py new file mode 100644 index 0000000..bb11e5b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/metadata/pkg_resources.py @@ -0,0 +1,278 @@ +import email.message +import email.parser +import logging +import os +import zipfile +from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional + +from pip._vendor import pkg_resources +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel +from pip._internal.utils.egg_link import egg_link_path_from_location +from pip._internal.utils.misc import display_path, normalize_path +from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file + +from .base import ( + BaseDistribution, + BaseEntryPoint, + BaseEnvironment, + DistributionVersion, + InfoPath, + Wheel, +) + +__all__ = ["NAME", "Distribution", "Environment"] + +logger = logging.getLogger(__name__) + +NAME = "pkg_resources" + + +class EntryPoint(NamedTuple): + name: str + value: str + group: str + + +class InMemoryMetadata: + """IMetadataProvider that reads metadata files from a dictionary. + + This also maps metadata decoding exceptions to our internal exception type. + """ + + def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None: + self._metadata = metadata + self._wheel_name = wheel_name + + def has_metadata(self, name: str) -> bool: + return name in self._metadata + + def get_metadata(self, name: str) -> str: + try: + return self._metadata[name].decode() + except UnicodeDecodeError as e: + # Augment the default error with the origin of the file. + raise UnsupportedWheel( + f"Error decoding metadata for {self._wheel_name}: {e} in {name} file" + ) + + def get_metadata_lines(self, name: str) -> Iterable[str]: + return pkg_resources.yield_lines(self.get_metadata(name)) + + def metadata_isdir(self, name: str) -> bool: + return False + + def metadata_listdir(self, name: str) -> List[str]: + return [] + + def run_script(self, script_name: str, namespace: str) -> None: + pass + + +class Distribution(BaseDistribution): + def __init__(self, dist: pkg_resources.Distribution) -> None: + self._dist = dist + + @classmethod + def from_directory(cls, directory: str) -> BaseDistribution: + dist_dir = directory.rstrip(os.sep) + + # Build a PathMetadata object, from path to metadata. :wink: + base_dir, dist_dir_name = os.path.split(dist_dir) + metadata = pkg_resources.PathMetadata(base_dir, dist_dir) + + # Determine the correct Distribution object type. + if dist_dir.endswith(".egg-info"): + dist_cls = pkg_resources.Distribution + dist_name = os.path.splitext(dist_dir_name)[0] + else: + assert dist_dir.endswith(".dist-info") + dist_cls = pkg_resources.DistInfoDistribution + dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0] + + dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata) + return cls(dist) + + @classmethod + def from_metadata_file_contents( + cls, + metadata_contents: bytes, + filename: str, + project_name: str, + ) -> BaseDistribution: + metadata_dict = { + "METADATA": metadata_contents, + } + dist = pkg_resources.DistInfoDistribution( + location=filename, + metadata=InMemoryMetadata(metadata_dict, filename), + project_name=project_name, + ) + return cls(dist) + + @classmethod + def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: + try: + with wheel.as_zipfile() as zf: + info_dir, _ = parse_wheel(zf, name) + metadata_dict = { + path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path) + for path in zf.namelist() + if path.startswith(f"{info_dir}/") + } + except zipfile.BadZipFile as e: + raise InvalidWheel(wheel.location, name) from e + except UnsupportedWheel as e: + raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") + dist = pkg_resources.DistInfoDistribution( + location=wheel.location, + metadata=InMemoryMetadata(metadata_dict, wheel.location), + project_name=name, + ) + return cls(dist) + + @property + def location(self) -> Optional[str]: + return self._dist.location + + @property + def installed_location(self) -> Optional[str]: + egg_link = egg_link_path_from_location(self.raw_name) + if egg_link: + location = egg_link + elif self.location: + location = self.location + else: + return None + return normalize_path(location) + + @property + def info_location(self) -> Optional[str]: + return self._dist.egg_info + + @property + def installed_by_distutils(self) -> bool: + # A distutils-installed distribution is provided by FileMetadata. This + # provider has a "path" attribute not present anywhere else. Not the + # best introspection logic, but pip has been doing this for a long time. + try: + return bool(self._dist._provider.path) + except AttributeError: + return False + + @property + def canonical_name(self) -> NormalizedName: + return canonicalize_name(self._dist.project_name) + + @property + def version(self) -> DistributionVersion: + return parse_version(self._dist.version) + + def is_file(self, path: InfoPath) -> bool: + return self._dist.has_metadata(str(path)) + + def iter_distutils_script_names(self) -> Iterator[str]: + yield from self._dist.metadata_listdir("scripts") + + def read_text(self, path: InfoPath) -> str: + name = str(path) + if not self._dist.has_metadata(name): + raise FileNotFoundError(name) + content = self._dist.get_metadata(name) + if content is None: + raise NoneMetadataError(self, name) + return content + + def iter_entry_points(self) -> Iterable[BaseEntryPoint]: + for group, entries in self._dist.get_entry_map().items(): + for name, entry_point in entries.items(): + name, _, value = str(entry_point).partition("=") + yield EntryPoint(name=name.strip(), value=value.strip(), group=group) + + def _metadata_impl(self) -> email.message.Message: + """ + :raises NoneMetadataError: if the distribution reports `has_metadata()` + True but `get_metadata()` returns None. + """ + if isinstance(self._dist, pkg_resources.DistInfoDistribution): + metadata_name = "METADATA" + else: + metadata_name = "PKG-INFO" + try: + metadata = self.read_text(metadata_name) + except FileNotFoundError: + if self.location: + displaying_path = display_path(self.location) + else: + displaying_path = repr(self.location) + logger.warning("No metadata found in %s", displaying_path) + metadata = "" + feed_parser = email.parser.FeedParser() + feed_parser.feed(metadata) + return feed_parser.close() + + def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: + if extras: # pkg_resources raises on invalid extras, so we sanitize. + extras = frozenset(pkg_resources.safe_extra(e) for e in extras) + extras = extras.intersection(self._dist.extras) + return self._dist.requires(extras) + + def iter_provided_extras(self) -> Iterable[str]: + return self._dist.extras + + def is_extra_provided(self, extra: str) -> bool: + return pkg_resources.safe_extra(extra) in self._dist.extras + + +class Environment(BaseEnvironment): + def __init__(self, ws: pkg_resources.WorkingSet) -> None: + self._ws = ws + + @classmethod + def default(cls) -> BaseEnvironment: + return cls(pkg_resources.working_set) + + @classmethod + def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment: + return cls(pkg_resources.WorkingSet(paths)) + + def _iter_distributions(self) -> Iterator[BaseDistribution]: + for dist in self._ws: + yield Distribution(dist) + + def _search_distribution(self, name: str) -> Optional[BaseDistribution]: + """Find a distribution matching the ``name`` in the environment. + + This searches from *all* distributions available in the environment, to + match the behavior of ``pkg_resources.get_distribution()``. + """ + canonical_name = canonicalize_name(name) + for dist in self.iter_all_distributions(): + if dist.canonical_name == canonical_name: + return dist + return None + + def get_distribution(self, name: str) -> Optional[BaseDistribution]: + # Search the distribution by looking through the working set. + dist = self._search_distribution(name) + if dist: + return dist + + # If distribution could not be found, call working_set.require to + # update the working set, and try to find the distribution again. + # This might happen for e.g. when you install a package twice, once + # using setup.py develop and again using setup.py install. Now when + # running pip uninstall twice, the package gets removed from the + # working set in the first uninstall, so we have to populate the + # working set again so that pip knows about it and the packages gets + # picked up and is successfully uninstalled the second time too. + try: + # We didn't pass in any version specifiers, so this can never + # raise pkg_resources.VersionConflict. + self._ws.require(name) + except pkg_resources.DistributionNotFound: + return None + return self._search_distribution(name) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/__init__.py new file mode 100644 index 0000000..7855226 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/__init__.py @@ -0,0 +1,2 @@ +"""A package that contains models that represent entities. +""" diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6808b37 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/candidate.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/candidate.cpython-312.pyc new file mode 100644 index 0000000..3e62728 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/candidate.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-312.pyc new file mode 100644 index 0000000..ccfa31d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/format_control.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/format_control.cpython-312.pyc new file mode 100644 index 0000000..39db30b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/format_control.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/index.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/index.cpython-312.pyc new file mode 100644 index 0000000..e84324d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/index.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-312.pyc new file mode 100644 index 0000000..fbc7deb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/link.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/link.cpython-312.pyc new file mode 100644 index 0000000..de996fe Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/link.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/scheme.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/scheme.cpython-312.pyc new file mode 100644 index 0000000..2f99927 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/scheme.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-312.pyc new file mode 100644 index 0000000..96fc9f6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc new file mode 100644 index 0000000..27dfe0f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/target_python.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/target_python.cpython-312.pyc new file mode 100644 index 0000000..df98c17 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/target_python.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/wheel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..075fca1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/wheel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/candidate.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/candidate.py new file mode 100644 index 0000000..9184a90 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/candidate.py @@ -0,0 +1,30 @@ +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.models.link import Link +from pip._internal.utils.models import KeyBasedCompareMixin + + +class InstallationCandidate(KeyBasedCompareMixin): + """Represents a potential "candidate" for installation.""" + + __slots__ = ["name", "version", "link"] + + def __init__(self, name: str, version: str, link: Link) -> None: + self.name = name + self.version = parse_version(version) + self.link = link + + super().__init__( + key=(self.name, self.version, self.link), + defining_class=InstallationCandidate, + ) + + def __repr__(self) -> str: + return "".format( + self.name, + self.version, + self.link, + ) + + def __str__(self) -> str: + return f"{self.name!r} candidate (version {self.version} at {self.link})" diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/direct_url.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/direct_url.py new file mode 100644 index 0000000..0af884b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/direct_url.py @@ -0,0 +1,235 @@ +""" PEP 610 """ +import json +import re +import urllib.parse +from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union + +__all__ = [ + "DirectUrl", + "DirectUrlValidationError", + "DirInfo", + "ArchiveInfo", + "VcsInfo", +] + +T = TypeVar("T") + +DIRECT_URL_METADATA_NAME = "direct_url.json" +ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$") + + +class DirectUrlValidationError(Exception): + pass + + +def _get( + d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None +) -> Optional[T]: + """Get value from dictionary and verify expected type.""" + if key not in d: + return default + value = d[key] + if not isinstance(value, expected_type): + raise DirectUrlValidationError( + f"{value!r} has unexpected type for {key} (expected {expected_type})" + ) + return value + + +def _get_required( + d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None +) -> T: + value = _get(d, expected_type, key, default) + if value is None: + raise DirectUrlValidationError(f"{key} must have a value") + return value + + +def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType": + infos = [info for info in infos if info is not None] + if not infos: + raise DirectUrlValidationError( + "missing one of archive_info, dir_info, vcs_info" + ) + if len(infos) > 1: + raise DirectUrlValidationError( + "more than one of archive_info, dir_info, vcs_info" + ) + assert infos[0] is not None + return infos[0] + + +def _filter_none(**kwargs: Any) -> Dict[str, Any]: + """Make dict excluding None values.""" + return {k: v for k, v in kwargs.items() if v is not None} + + +class VcsInfo: + name = "vcs_info" + + def __init__( + self, + vcs: str, + commit_id: str, + requested_revision: Optional[str] = None, + ) -> None: + self.vcs = vcs + self.requested_revision = requested_revision + self.commit_id = commit_id + + @classmethod + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]: + if d is None: + return None + return cls( + vcs=_get_required(d, str, "vcs"), + commit_id=_get_required(d, str, "commit_id"), + requested_revision=_get(d, str, "requested_revision"), + ) + + def _to_dict(self) -> Dict[str, Any]: + return _filter_none( + vcs=self.vcs, + requested_revision=self.requested_revision, + commit_id=self.commit_id, + ) + + +class ArchiveInfo: + name = "archive_info" + + def __init__( + self, + hash: Optional[str] = None, + hashes: Optional[Dict[str, str]] = None, + ) -> None: + # set hashes before hash, since the hash setter will further populate hashes + self.hashes = hashes + self.hash = hash + + @property + def hash(self) -> Optional[str]: + return self._hash + + @hash.setter + def hash(self, value: Optional[str]) -> None: + if value is not None: + # Auto-populate the hashes key to upgrade to the new format automatically. + # We don't back-populate the legacy hash key from hashes. + try: + hash_name, hash_value = value.split("=", 1) + except ValueError: + raise DirectUrlValidationError( + f"invalid archive_info.hash format: {value!r}" + ) + if self.hashes is None: + self.hashes = {hash_name: hash_value} + elif hash_name not in self.hashes: + self.hashes = self.hashes.copy() + self.hashes[hash_name] = hash_value + self._hash = value + + @classmethod + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]: + if d is None: + return None + return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes")) + + def _to_dict(self) -> Dict[str, Any]: + return _filter_none(hash=self.hash, hashes=self.hashes) + + +class DirInfo: + name = "dir_info" + + def __init__( + self, + editable: bool = False, + ) -> None: + self.editable = editable + + @classmethod + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]: + if d is None: + return None + return cls(editable=_get_required(d, bool, "editable", default=False)) + + def _to_dict(self) -> Dict[str, Any]: + return _filter_none(editable=self.editable or None) + + +InfoType = Union[ArchiveInfo, DirInfo, VcsInfo] + + +class DirectUrl: + def __init__( + self, + url: str, + info: InfoType, + subdirectory: Optional[str] = None, + ) -> None: + self.url = url + self.info = info + self.subdirectory = subdirectory + + def _remove_auth_from_netloc(self, netloc: str) -> str: + if "@" not in netloc: + return netloc + user_pass, netloc_no_user_pass = netloc.split("@", 1) + if ( + isinstance(self.info, VcsInfo) + and self.info.vcs == "git" + and user_pass == "git" + ): + return netloc + if ENV_VAR_RE.match(user_pass): + return netloc + return netloc_no_user_pass + + @property + def redacted_url(self) -> str: + """url with user:password part removed unless it is formed with + environment variables as specified in PEP 610, or it is ``git`` + in the case of a git URL. + """ + purl = urllib.parse.urlsplit(self.url) + netloc = self._remove_auth_from_netloc(purl.netloc) + surl = urllib.parse.urlunsplit( + (purl.scheme, netloc, purl.path, purl.query, purl.fragment) + ) + return surl + + def validate(self) -> None: + self.from_dict(self.to_dict()) + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl": + return DirectUrl( + url=_get_required(d, str, "url"), + subdirectory=_get(d, str, "subdirectory"), + info=_exactly_one_of( + [ + ArchiveInfo._from_dict(_get(d, dict, "archive_info")), + DirInfo._from_dict(_get(d, dict, "dir_info")), + VcsInfo._from_dict(_get(d, dict, "vcs_info")), + ] + ), + ) + + def to_dict(self) -> Dict[str, Any]: + res = _filter_none( + url=self.redacted_url, + subdirectory=self.subdirectory, + ) + res[self.info.name] = self.info._to_dict() + return res + + @classmethod + def from_json(cls, s: str) -> "DirectUrl": + return cls.from_dict(json.loads(s)) + + def to_json(self) -> str: + return json.dumps(self.to_dict(), sort_keys=True) + + def is_local_editable(self) -> bool: + return isinstance(self.info, DirInfo) and self.info.editable diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/format_control.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/format_control.py new file mode 100644 index 0000000..ccd1127 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/format_control.py @@ -0,0 +1,78 @@ +from typing import FrozenSet, Optional, Set + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import CommandError + + +class FormatControl: + """Helper for managing formats from which a package can be installed.""" + + __slots__ = ["no_binary", "only_binary"] + + def __init__( + self, + no_binary: Optional[Set[str]] = None, + only_binary: Optional[Set[str]] = None, + ) -> None: + if no_binary is None: + no_binary = set() + if only_binary is None: + only_binary = set() + + self.no_binary = no_binary + self.only_binary = only_binary + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return NotImplemented + + if self.__slots__ != other.__slots__: + return False + + return all(getattr(self, k) == getattr(other, k) for k in self.__slots__) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})" + + @staticmethod + def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None: + if value.startswith("-"): + raise CommandError( + "--no-binary / --only-binary option requires 1 argument." + ) + new = value.split(",") + while ":all:" in new: + other.clear() + target.clear() + target.add(":all:") + del new[: new.index(":all:") + 1] + # Without a none, we want to discard everything as :all: covers it + if ":none:" not in new: + return + for name in new: + if name == ":none:": + target.clear() + continue + name = canonicalize_name(name) + other.discard(name) + target.add(name) + + def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]: + result = {"binary", "source"} + if canonical_name in self.only_binary: + result.discard("source") + elif canonical_name in self.no_binary: + result.discard("binary") + elif ":all:" in self.only_binary: + result.discard("source") + elif ":all:" in self.no_binary: + result.discard("binary") + return frozenset(result) + + def disallow_binaries(self) -> None: + self.handle_mutual_excludes( + ":all:", + self.no_binary, + self.only_binary, + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/index.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/index.py new file mode 100644 index 0000000..b94c325 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/index.py @@ -0,0 +1,28 @@ +import urllib.parse + + +class PackageIndex: + """Represents a Package Index and provides easier access to endpoints""" + + __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"] + + def __init__(self, url: str, file_storage_domain: str) -> None: + super().__init__() + self.url = url + self.netloc = urllib.parse.urlsplit(url).netloc + self.simple_url = self._url_for_path("simple") + self.pypi_url = self._url_for_path("pypi") + + # This is part of a temporary hack used to block installs of PyPI + # packages which depend on external urls only necessary until PyPI can + # block such packages themselves + self.file_storage_domain = file_storage_domain + + def _url_for_path(self, path: str) -> str: + return urllib.parse.urljoin(self.url, path) + + +PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org") +TestPyPI = PackageIndex( + "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org" +) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/installation_report.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/installation_report.py new file mode 100644 index 0000000..b9c6330 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/installation_report.py @@ -0,0 +1,56 @@ +from typing import Any, Dict, Sequence + +from pip._vendor.packaging.markers import default_environment + +from pip import __version__ +from pip._internal.req.req_install import InstallRequirement + + +class InstallationReport: + def __init__(self, install_requirements: Sequence[InstallRequirement]): + self._install_requirements = install_requirements + + @classmethod + def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: + assert ireq.download_info, f"No download_info for {ireq}" + res = { + # PEP 610 json for the download URL. download_info.archive_info.hashes may + # be absent when the requirement was installed from the wheel cache + # and the cache entry was populated by an older pip version that did not + # record origin.json. + "download_info": ireq.download_info.to_dict(), + # is_direct is true if the requirement was a direct URL reference (which + # includes editable requirements), and false if the requirement was + # downloaded from a PEP 503 index or --find-links. + "is_direct": ireq.is_direct, + # is_yanked is true if the requirement was yanked from the index, but + # was still selected by pip to conform to PEP 592. + "is_yanked": ireq.link.is_yanked if ireq.link else False, + # requested is true if the requirement was specified by the user (aka + # top level requirement), and false if it was installed as a dependency of a + # requirement. https://peps.python.org/pep-0376/#requested + "requested": ireq.user_supplied, + # PEP 566 json encoding for metadata + # https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata + "metadata": ireq.get_dist().metadata_dict, + } + if ireq.user_supplied and ireq.extras: + # For top level requirements, the list of requested extras, if any. + res["requested_extras"] = sorted(ireq.extras) + return res + + def to_dict(self) -> Dict[str, Any]: + return { + "version": "1", + "pip_version": __version__, + "install": [ + self._install_req_to_dict(ireq) for ireq in self._install_requirements + ], + # https://peps.python.org/pep-0508/#environment-markers + # TODO: currently, the resolver uses the default environment to evaluate + # environment markers, so that is what we report here. In the future, it + # should also take into account options such as --python-version or + # --platform, perhaps under the form of an environment_override field? + # https://github.com/pypa/pip/issues/11198 + "environment": default_environment(), + } diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/link.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/link.py new file mode 100644 index 0000000..73041b8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/link.py @@ -0,0 +1,579 @@ +import functools +import itertools +import logging +import os +import posixpath +import re +import urllib.parse +from dataclasses import dataclass +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Mapping, + NamedTuple, + Optional, + Tuple, + Union, +) + +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + pairwise, + redact_auth_from_url, + split_auth_from_netloc, + splitext, +) +from pip._internal.utils.models import KeyBasedCompareMixin +from pip._internal.utils.urls import path_to_url, url_to_path + +if TYPE_CHECKING: + from pip._internal.index.collector import IndexContent + +logger = logging.getLogger(__name__) + + +# Order matters, earlier hashes have a precedence over later hashes for what +# we will pick to use. +_SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5") + + +@dataclass(frozen=True) +class LinkHash: + """Links to content may have embedded hash values. This class parses those. + + `name` must be any member of `_SUPPORTED_HASHES`. + + This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to + be JSON-serializable to conform to PEP 610, this class contains the logic for + parsing a hash name and value for correctness, and then checking whether that hash + conforms to a schema with `.is_hash_allowed()`.""" + + name: str + value: str + + _hash_url_fragment_re = re.compile( + # NB: we do not validate that the second group (.*) is a valid hex + # digest. Instead, we simply keep that string in this class, and then check it + # against Hashes when hash-checking is needed. This is easier to debug than + # proactively discarding an invalid hex digest, as we handle incorrect hashes + # and malformed hashes in the same place. + r"[#&]({choices})=([^&]*)".format( + choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES) + ), + ) + + def __post_init__(self) -> None: + assert self.name in _SUPPORTED_HASHES + + @classmethod + @functools.lru_cache(maxsize=None) + def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]: + """Search a string for a checksum algorithm name and encoded output value.""" + match = cls._hash_url_fragment_re.search(url) + if match is None: + return None + name, value = match.groups() + return cls(name=name, value=value) + + def as_dict(self) -> Dict[str, str]: + return {self.name: self.value} + + def as_hashes(self) -> Hashes: + """Return a Hashes instance which checks only for the current hash.""" + return Hashes({self.name: [self.value]}) + + def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: + """ + Return True if the current hash is allowed by `hashes`. + """ + if hashes is None: + return False + return hashes.is_hash_allowed(self.name, hex_digest=self.value) + + +@dataclass(frozen=True) +class MetadataFile: + """Information about a core metadata file associated with a distribution.""" + + hashes: Optional[Dict[str, str]] + + def __post_init__(self) -> None: + if self.hashes is not None: + assert all(name in _SUPPORTED_HASHES for name in self.hashes) + + +def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]: + # Remove any unsupported hash types from the mapping. If this leaves no + # supported hashes, return None + if hashes is None: + return None + hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES} + if not hashes: + return None + return hashes + + +def _clean_url_path_part(part: str) -> str: + """ + Clean a "part" of a URL path (i.e. after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + return urllib.parse.quote(urllib.parse.unquote(part)) + + +def _clean_file_url_path(part: str) -> str: + """ + Clean the first part of a URL path that corresponds to a local + filesystem path (i.e. the first part after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + # Also, on Windows the path part might contain a drive letter which + # should not be quoted. On Linux where drive letters do not + # exist, the colon should be quoted. We rely on urllib.request + # to do the right thing here. + return urllib.request.pathname2url(urllib.request.url2pathname(part)) + + +# percent-encoded: / +_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE) + + +def _clean_url_path(path: str, is_local_path: bool) -> str: + """ + Clean the path portion of a URL. + """ + if is_local_path: + clean_func = _clean_file_url_path + else: + clean_func = _clean_url_path_part + + # Split on the reserved characters prior to cleaning so that + # revision strings in VCS URLs are properly preserved. + parts = _reserved_chars_re.split(path) + + cleaned_parts = [] + for to_clean, reserved in pairwise(itertools.chain(parts, [""])): + cleaned_parts.append(clean_func(to_clean)) + # Normalize %xx escapes (e.g. %2f -> %2F) + cleaned_parts.append(reserved.upper()) + + return "".join(cleaned_parts) + + +def _ensure_quoted_url(url: str) -> str: + """ + Make sure a link is fully quoted. + For example, if ' ' occurs in the URL, it will be replaced with "%20", + and without double-quoting other characters. + """ + # Split the URL into parts according to the general structure + # `scheme://netloc/path;parameters?query#fragment`. + result = urllib.parse.urlparse(url) + # If the netloc is empty, then the URL refers to a local filesystem path. + is_local_path = not result.netloc + path = _clean_url_path(result.path, is_local_path=is_local_path) + return urllib.parse.urlunparse(result._replace(path=path)) + + +class Link(KeyBasedCompareMixin): + """Represents a parsed link from a Package Index's simple URL""" + + __slots__ = [ + "_parsed_url", + "_url", + "_hashes", + "comes_from", + "requires_python", + "yanked_reason", + "metadata_file_data", + "cache_link_parsing", + "egg_fragment", + ] + + def __init__( + self, + url: str, + comes_from: Optional[Union[str, "IndexContent"]] = None, + requires_python: Optional[str] = None, + yanked_reason: Optional[str] = None, + metadata_file_data: Optional[MetadataFile] = None, + cache_link_parsing: bool = True, + hashes: Optional[Mapping[str, str]] = None, + ) -> None: + """ + :param url: url of the resource pointed to (href of the link) + :param comes_from: instance of IndexContent where the link was found, + or string. + :param requires_python: String containing the `Requires-Python` + metadata field, specified in PEP 345. This may be specified by + a data-requires-python attribute in the HTML link tag, as + described in PEP 503. + :param yanked_reason: the reason the file has been yanked, if the + file has been yanked, or None if the file hasn't been yanked. + This is the value of the "data-yanked" attribute, if present, in + a simple repository HTML link. If the file has been yanked but + no reason was provided, this should be the empty string. See + PEP 592 for more information and the specification. + :param metadata_file_data: the metadata attached to the file, or None if + no such metadata is provided. This argument, if not None, indicates + that a separate metadata file exists, and also optionally supplies + hashes for that file. + :param cache_link_parsing: A flag that is used elsewhere to determine + whether resources retrieved from this link should be cached. PyPI + URLs should generally have this set to False, for example. + :param hashes: A mapping of hash names to digests to allow us to + determine the validity of a download. + """ + + # The comes_from, requires_python, and metadata_file_data arguments are + # only used by classmethods of this class, and are not used in client + # code directly. + + # url can be a UNC windows share + if url.startswith("\\\\"): + url = path_to_url(url) + + self._parsed_url = urllib.parse.urlsplit(url) + # Store the url as a private attribute to prevent accidentally + # trying to set a new value. + self._url = url + + link_hash = LinkHash.find_hash_url_fragment(url) + hashes_from_link = {} if link_hash is None else link_hash.as_dict() + if hashes is None: + self._hashes = hashes_from_link + else: + self._hashes = {**hashes, **hashes_from_link} + + self.comes_from = comes_from + self.requires_python = requires_python if requires_python else None + self.yanked_reason = yanked_reason + self.metadata_file_data = metadata_file_data + + super().__init__(key=url, defining_class=Link) + + self.cache_link_parsing = cache_link_parsing + self.egg_fragment = self._egg_fragment() + + @classmethod + def from_json( + cls, + file_data: Dict[str, Any], + page_url: str, + ) -> Optional["Link"]: + """ + Convert an pypi json document from a simple repository page into a Link. + """ + file_url = file_data.get("url") + if file_url is None: + return None + + url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url)) + pyrequire = file_data.get("requires-python") + yanked_reason = file_data.get("yanked") + hashes = file_data.get("hashes", {}) + + # PEP 714: Indexes must use the name core-metadata, but + # clients should support the old name as a fallback for compatibility. + metadata_info = file_data.get("core-metadata") + if metadata_info is None: + metadata_info = file_data.get("dist-info-metadata") + + # The metadata info value may be a boolean, or a dict of hashes. + if isinstance(metadata_info, dict): + # The file exists, and hashes have been supplied + metadata_file_data = MetadataFile(supported_hashes(metadata_info)) + elif metadata_info: + # The file exists, but there are no hashes + metadata_file_data = MetadataFile(None) + else: + # False or not present: the file does not exist + metadata_file_data = None + + # The Link.yanked_reason expects an empty string instead of a boolean. + if yanked_reason and not isinstance(yanked_reason, str): + yanked_reason = "" + # The Link.yanked_reason expects None instead of False. + elif not yanked_reason: + yanked_reason = None + + return cls( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + hashes=hashes, + metadata_file_data=metadata_file_data, + ) + + @classmethod + def from_element( + cls, + anchor_attribs: Dict[str, Optional[str]], + page_url: str, + base_url: str, + ) -> Optional["Link"]: + """ + Convert an anchor element's attributes in a simple repository page to a Link. + """ + href = anchor_attribs.get("href") + if not href: + return None + + url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href)) + pyrequire = anchor_attribs.get("data-requires-python") + yanked_reason = anchor_attribs.get("data-yanked") + + # PEP 714: Indexes must use the name data-core-metadata, but + # clients should support the old name as a fallback for compatibility. + metadata_info = anchor_attribs.get("data-core-metadata") + if metadata_info is None: + metadata_info = anchor_attribs.get("data-dist-info-metadata") + # The metadata info value may be the string "true", or a string of + # the form "hashname=hashval" + if metadata_info == "true": + # The file exists, but there are no hashes + metadata_file_data = MetadataFile(None) + elif metadata_info is None: + # The file does not exist + metadata_file_data = None + else: + # The file exists, and hashes have been supplied + hashname, sep, hashval = metadata_info.partition("=") + if sep == "=": + metadata_file_data = MetadataFile(supported_hashes({hashname: hashval})) + else: + # Error - data is wrong. Treat as no hashes supplied. + logger.debug( + "Index returned invalid data-dist-info-metadata value: %s", + metadata_info, + ) + metadata_file_data = MetadataFile(None) + + return cls( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + metadata_file_data=metadata_file_data, + ) + + def __str__(self) -> str: + if self.requires_python: + rp = f" (requires-python:{self.requires_python})" + else: + rp = "" + if self.comes_from: + return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}" + else: + return redact_auth_from_url(str(self._url)) + + def __repr__(self) -> str: + return f"" + + @property + def url(self) -> str: + return self._url + + @property + def filename(self) -> str: + path = self.path.rstrip("/") + name = posixpath.basename(path) + if not name: + # Make sure we don't leak auth information if the netloc + # includes a username and password. + netloc, user_pass = split_auth_from_netloc(self.netloc) + return netloc + + name = urllib.parse.unquote(name) + assert name, f"URL {self._url!r} produced no filename" + return name + + @property + def file_path(self) -> str: + return url_to_path(self.url) + + @property + def scheme(self) -> str: + return self._parsed_url.scheme + + @property + def netloc(self) -> str: + """ + This can contain auth information. + """ + return self._parsed_url.netloc + + @property + def path(self) -> str: + return urllib.parse.unquote(self._parsed_url.path) + + def splitext(self) -> Tuple[str, str]: + return splitext(posixpath.basename(self.path.rstrip("/"))) + + @property + def ext(self) -> str: + return self.splitext()[1] + + @property + def url_without_fragment(self) -> str: + scheme, netloc, path, query, fragment = self._parsed_url + return urllib.parse.urlunsplit((scheme, netloc, path, query, "")) + + _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") + + # Per PEP 508. + _project_name_re = re.compile( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE + ) + + def _egg_fragment(self) -> Optional[str]: + match = self._egg_fragment_re.search(self._url) + if not match: + return None + + # An egg fragment looks like a PEP 508 project name, along with + # an optional extras specifier. Anything else is invalid. + project_name = match.group(1) + if not self._project_name_re.match(project_name): + deprecated( + reason=f"{self} contains an egg fragment with a non-PEP 508 name", + replacement="to use the req @ url syntax, and remove the egg fragment", + gone_in="25.0", + issue=11617, + ) + + return project_name + + _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)") + + @property + def subdirectory_fragment(self) -> Optional[str]: + match = self._subdirectory_fragment_re.search(self._url) + if not match: + return None + return match.group(1) + + def metadata_link(self) -> Optional["Link"]: + """Return a link to the associated core metadata file (if any).""" + if self.metadata_file_data is None: + return None + metadata_url = f"{self.url_without_fragment}.metadata" + if self.metadata_file_data.hashes is None: + return Link(metadata_url) + return Link(metadata_url, hashes=self.metadata_file_data.hashes) + + def as_hashes(self) -> Hashes: + return Hashes({k: [v] for k, v in self._hashes.items()}) + + @property + def hash(self) -> Optional[str]: + return next(iter(self._hashes.values()), None) + + @property + def hash_name(self) -> Optional[str]: + return next(iter(self._hashes), None) + + @property + def show_url(self) -> str: + return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0]) + + @property + def is_file(self) -> bool: + return self.scheme == "file" + + def is_existing_dir(self) -> bool: + return self.is_file and os.path.isdir(self.file_path) + + @property + def is_wheel(self) -> bool: + return self.ext == WHEEL_EXTENSION + + @property + def is_vcs(self) -> bool: + from pip._internal.vcs import vcs + + return self.scheme in vcs.all_schemes + + @property + def is_yanked(self) -> bool: + return self.yanked_reason is not None + + @property + def has_hash(self) -> bool: + return bool(self._hashes) + + def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: + """ + Return True if the link has a hash and it is allowed by `hashes`. + """ + if hashes is None: + return False + return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items()) + + +class _CleanResult(NamedTuple): + """Convert link for equivalency check. + + This is used in the resolver to check whether two URL-specified requirements + likely point to the same distribution and can be considered equivalent. This + equivalency logic avoids comparing URLs literally, which can be too strict + (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users. + + Currently this does three things: + + 1. Drop the basic auth part. This is technically wrong since a server can + serve different content based on auth, but if it does that, it is even + impossible to guarantee two URLs without auth are equivalent, since + the user can input different auth information when prompted. So the + practical solution is to assume the auth doesn't affect the response. + 2. Parse the query to avoid the ordering issue. Note that ordering under the + same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are + still considered different. + 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and + hash values, since it should have no impact the downloaded content. Note + that this drops the "egg=" part historically used to denote the requested + project (and extras), which is wrong in the strictest sense, but too many + people are supplying it inconsistently to cause superfluous resolution + conflicts, so we choose to also ignore them. + """ + + parsed: urllib.parse.SplitResult + query: Dict[str, List[str]] + subdirectory: str + hashes: Dict[str, str] + + +def _clean_link(link: Link) -> _CleanResult: + parsed = link._parsed_url + netloc = parsed.netloc.rsplit("@", 1)[-1] + # According to RFC 8089, an empty host in file: means localhost. + if parsed.scheme == "file" and not netloc: + netloc = "localhost" + fragment = urllib.parse.parse_qs(parsed.fragment) + if "egg" in fragment: + logger.debug("Ignoring egg= fragment in %s", link) + try: + # If there are multiple subdirectory values, use the first one. + # This matches the behavior of Link.subdirectory_fragment. + subdirectory = fragment["subdirectory"][0] + except (IndexError, KeyError): + subdirectory = "" + # If there are multiple hash values under the same algorithm, use the + # first one. This matches the behavior of Link.hash_value. + hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment} + return _CleanResult( + parsed=parsed._replace(netloc=netloc, query="", fragment=""), + query=urllib.parse.parse_qs(parsed.query), + subdirectory=subdirectory, + hashes=hashes, + ) + + +@functools.lru_cache(maxsize=None) +def links_equivalent(link1: Link, link2: Link) -> bool: + return _clean_link(link1) == _clean_link(link2) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/scheme.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/scheme.py new file mode 100644 index 0000000..f51190a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/scheme.py @@ -0,0 +1,31 @@ +""" +For types associated with installation schemes. + +For a general overview of available schemes and their context, see +https://docs.python.org/3/install/index.html#alternate-installation. +""" + + +SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"] + + +class Scheme: + """A Scheme holds paths which are used as the base directories for + artifacts associated with a Python package. + """ + + __slots__ = SCHEME_KEYS + + def __init__( + self, + platlib: str, + purelib: str, + headers: str, + scripts: str, + data: str, + ) -> None: + self.platlib = platlib + self.purelib = purelib + self.headers = headers + self.scripts = scripts + self.data = data diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/search_scope.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/search_scope.py new file mode 100644 index 0000000..fe61e81 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/search_scope.py @@ -0,0 +1,132 @@ +import itertools +import logging +import os +import posixpath +import urllib.parse +from typing import List + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.models.index import PyPI +from pip._internal.utils.compat import has_tls +from pip._internal.utils.misc import normalize_path, redact_auth_from_url + +logger = logging.getLogger(__name__) + + +class SearchScope: + + """ + Encapsulates the locations that pip is configured to search. + """ + + __slots__ = ["find_links", "index_urls", "no_index"] + + @classmethod + def create( + cls, + find_links: List[str], + index_urls: List[str], + no_index: bool, + ) -> "SearchScope": + """ + Create a SearchScope object after normalizing the `find_links`. + """ + # Build find_links. If an argument starts with ~, it may be + # a local file relative to a home directory. So try normalizing + # it and if it exists, use the normalized version. + # This is deliberately conservative - it might be fine just to + # blindly normalize anything starting with a ~... + built_find_links: List[str] = [] + for link in find_links: + if link.startswith("~"): + new_link = normalize_path(link) + if os.path.exists(new_link): + link = new_link + built_find_links.append(link) + + # If we don't have TLS enabled, then WARN if anyplace we're looking + # relies on TLS. + if not has_tls(): + for link in itertools.chain(index_urls, built_find_links): + parsed = urllib.parse.urlparse(link) + if parsed.scheme == "https": + logger.warning( + "pip is configured with locations that require " + "TLS/SSL, however the ssl module in Python is not " + "available." + ) + break + + return cls( + find_links=built_find_links, + index_urls=index_urls, + no_index=no_index, + ) + + def __init__( + self, + find_links: List[str], + index_urls: List[str], + no_index: bool, + ) -> None: + self.find_links = find_links + self.index_urls = index_urls + self.no_index = no_index + + def get_formatted_locations(self) -> str: + lines = [] + redacted_index_urls = [] + if self.index_urls and self.index_urls != [PyPI.simple_url]: + for url in self.index_urls: + redacted_index_url = redact_auth_from_url(url) + + # Parse the URL + purl = urllib.parse.urlsplit(redacted_index_url) + + # URL is generally invalid if scheme and netloc is missing + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not purl.scheme and not purl.netloc: + logger.warning( + 'The index url "%s" seems invalid, please provide a scheme.', + redacted_index_url, + ) + + redacted_index_urls.append(redacted_index_url) + + lines.append( + "Looking in indexes: {}".format(", ".join(redacted_index_urls)) + ) + + if self.find_links: + lines.append( + "Looking in links: {}".format( + ", ".join(redact_auth_from_url(url) for url in self.find_links) + ) + ) + return "\n".join(lines) + + def get_index_urls_locations(self, project_name: str) -> List[str]: + """Returns the locations found via self.index_urls + + Checks the url_name on the main (first in the list) index and + use this url_name to produce all locations + """ + + def mkurl_pypi_url(url: str) -> str: + loc = posixpath.join( + url, urllib.parse.quote(canonicalize_name(project_name)) + ) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's + # behavior. + if not loc.endswith("/"): + loc = loc + "/" + return loc + + return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/selection_prefs.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/selection_prefs.py new file mode 100644 index 0000000..977bc4c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/selection_prefs.py @@ -0,0 +1,51 @@ +from typing import Optional + +from pip._internal.models.format_control import FormatControl + + +class SelectionPreferences: + """ + Encapsulates the candidate selection preferences for downloading + and installing files. + """ + + __slots__ = [ + "allow_yanked", + "allow_all_prereleases", + "format_control", + "prefer_binary", + "ignore_requires_python", + ] + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + allow_yanked: bool, + allow_all_prereleases: bool = False, + format_control: Optional[FormatControl] = None, + prefer_binary: bool = False, + ignore_requires_python: Optional[bool] = None, + ) -> None: + """Create a SelectionPreferences object. + + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param format_control: A FormatControl object or None. Used to control + the selection of source packages / binary packages when consulting + the index and links. + :param prefer_binary: Whether to prefer an old, but valid, binary + dist over a new source dist. + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self.allow_yanked = allow_yanked + self.allow_all_prereleases = allow_all_prereleases + self.format_control = format_control + self.prefer_binary = prefer_binary + self.ignore_requires_python = ignore_requires_python diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/target_python.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/target_python.py new file mode 100644 index 0000000..67ea5da --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/target_python.py @@ -0,0 +1,122 @@ +import sys +from typing import List, Optional, Set, Tuple + +from pip._vendor.packaging.tags import Tag + +from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot +from pip._internal.utils.misc import normalize_version_info + + +class TargetPython: + + """ + Encapsulates the properties of a Python interpreter one is targeting + for a package install, download, etc. + """ + + __slots__ = [ + "_given_py_version_info", + "abis", + "implementation", + "platforms", + "py_version", + "py_version_info", + "_valid_tags", + "_valid_tags_set", + ] + + def __init__( + self, + platforms: Optional[List[str]] = None, + py_version_info: Optional[Tuple[int, ...]] = None, + abis: Optional[List[str]] = None, + implementation: Optional[str] = None, + ) -> None: + """ + :param platforms: A list of strings or None. If None, searches for + packages that are supported by the current system. Otherwise, will + find packages that can be built on the platforms passed in. These + packages will only be downloaded for distribution: they will + not be built locally. + :param py_version_info: An optional tuple of ints representing the + Python version information to use (e.g. `sys.version_info[:3]`). + This can have length 1, 2, or 3 when provided. + :param abis: A list of strings or None. This is passed to + compatibility_tags.py's get_supported() function as is. + :param implementation: A string or None. This is passed to + compatibility_tags.py's get_supported() function as is. + """ + # Store the given py_version_info for when we call get_supported(). + self._given_py_version_info = py_version_info + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + py_version = ".".join(map(str, py_version_info[:2])) + + self.abis = abis + self.implementation = implementation + self.platforms = platforms + self.py_version = py_version + self.py_version_info = py_version_info + + # This is used to cache the return value of get_(un)sorted_tags. + self._valid_tags: Optional[List[Tag]] = None + self._valid_tags_set: Optional[Set[Tag]] = None + + def format_given(self) -> str: + """ + Format the given, non-None attributes for display. + """ + display_version = None + if self._given_py_version_info is not None: + display_version = ".".join( + str(part) for part in self._given_py_version_info + ) + + key_values = [ + ("platforms", self.platforms), + ("version_info", display_version), + ("abis", self.abis), + ("implementation", self.implementation), + ] + return " ".join( + f"{key}={value!r}" for key, value in key_values if value is not None + ) + + def get_sorted_tags(self) -> List[Tag]: + """ + Return the supported PEP 425 tags to check wheel candidates against. + + The tags are returned in order of preference (most preferred first). + """ + if self._valid_tags is None: + # Pass versions=None if no py_version_info was given since + # versions=None uses special default logic. + py_version_info = self._given_py_version_info + if py_version_info is None: + version = None + else: + version = version_info_to_nodot(py_version_info) + + tags = get_supported( + version=version, + platforms=self.platforms, + abis=self.abis, + impl=self.implementation, + ) + self._valid_tags = tags + + return self._valid_tags + + def get_unsorted_tags(self) -> Set[Tag]: + """Exactly the same as get_sorted_tags, but returns a set. + + This is important for performance. + """ + if self._valid_tags_set is None: + self._valid_tags_set = set(self.get_sorted_tags()) + + return self._valid_tags_set diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/models/wheel.py b/myenv/lib/python3.12/site-packages/pip/_internal/models/wheel.py new file mode 100644 index 0000000..a5dc12b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/models/wheel.py @@ -0,0 +1,92 @@ +"""Represents a wheel file and provides access to the various parts of the +name that have meaning. +""" +import re +from typing import Dict, Iterable, List + +from pip._vendor.packaging.tags import Tag + +from pip._internal.exceptions import InvalidWheelFilename + + +class Wheel: + """A wheel file""" + + wheel_file_re = re.compile( + r"""^(?P(?P[^\s-]+?)-(?P[^\s-]*?)) + ((-(?P\d[^-]*?))?-(?P[^\s-]+?)-(?P[^\s-]+?)-(?P[^\s-]+?) + \.whl|\.dist-info)$""", + re.VERBOSE, + ) + + def __init__(self, filename: str) -> None: + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.") + self.filename = filename + self.name = wheel_info.group("name").replace("_", "-") + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group("ver").replace("_", "-") + self.build_tag = wheel_info.group("build") + self.pyversions = wheel_info.group("pyver").split(".") + self.abis = wheel_info.group("abi").split(".") + self.plats = wheel_info.group("plat").split(".") + + # All the tag combinations from this file + self.file_tags = { + Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats + } + + def get_formatted_file_tags(self) -> List[str]: + """Return the wheel's tags as a sorted list of strings.""" + return sorted(str(tag) for tag in self.file_tags) + + def support_index_min(self, tags: List[Tag]) -> int: + """Return the lowest index that one of the wheel's file_tag combinations + achieves in the given list of supported tags. + + For example, if there are 8 supported tags and one of the file tags + is first in the list, then return 0. + + :param tags: the PEP 425 tags to check the wheel against, in order + with most preferred first. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + try: + return next(i for i, t in enumerate(tags) if t in self.file_tags) + except StopIteration: + raise ValueError() + + def find_most_preferred_tag( + self, tags: List[Tag], tag_to_priority: Dict[Tag, int] + ) -> int: + """Return the priority of the most preferred tag that one of the wheel's file + tag combinations achieves in the given list of supported tags using the given + tag_to_priority mapping, where lower priorities are more-preferred. + + This is used in place of support_index_min in some cases in order to avoid + an expensive linear scan of a large list of tags. + + :param tags: the PEP 425 tags to check the wheel against. + :param tag_to_priority: a mapping from tag to priority of that tag, where + lower is more preferred. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min( + tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority + ) + + def supported(self, tags: Iterable[Tag]) -> bool: + """Return whether the wheel is compatible with one of the given tags. + + :param tags: the PEP 425 tags to check the wheel against. + """ + return not self.file_tags.isdisjoint(tags) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/network/__init__.py new file mode 100644 index 0000000..b51bde9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/network/__init__.py @@ -0,0 +1,2 @@ +"""Contains purely network-related utilities. +""" diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..b5307bf Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/auth.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000..d718461 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/auth.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/cache.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/cache.cpython-312.pyc new file mode 100644 index 0000000..68b2073 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/cache.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/download.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/download.cpython-312.pyc new file mode 100644 index 0000000..fe24be3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/download.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc new file mode 100644 index 0000000..9a28bcc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/session.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/session.cpython-312.pyc new file mode 100644 index 0000000..779748a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/session.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..a5b131d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc new file mode 100644 index 0000000..ed1c853 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/auth.py b/myenv/lib/python3.12/site-packages/pip/_internal/network/auth.py new file mode 100644 index 0000000..94a82fa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/network/auth.py @@ -0,0 +1,561 @@ +"""Network Authentication Helpers + +Contains interface (MultiDomainBasicAuth) and associated glue code for +providing credentials in the context of network requests. +""" +import logging +import os +import shutil +import subprocess +import sysconfig +import typing +import urllib.parse +from abc import ABC, abstractmethod +from functools import lru_cache +from os.path import commonprefix +from pathlib import Path +from typing import Any, Dict, List, NamedTuple, Optional, Tuple + +from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth +from pip._vendor.requests.models import Request, Response +from pip._vendor.requests.utils import get_netrc_auth + +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ( + ask, + ask_input, + ask_password, + remove_auth_from_url, + split_auth_netloc_from_url, +) +from pip._internal.vcs.versioncontrol import AuthInfo + +logger = getLogger(__name__) + +KEYRING_DISABLED = False + + +class Credentials(NamedTuple): + url: str + username: str + password: str + + +class KeyRingBaseProvider(ABC): + """Keyring base provider interface""" + + has_keyring: bool + + @abstractmethod + def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: + ... + + @abstractmethod + def save_auth_info(self, url: str, username: str, password: str) -> None: + ... + + +class KeyRingNullProvider(KeyRingBaseProvider): + """Keyring null provider""" + + has_keyring = False + + def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: + return None + + def save_auth_info(self, url: str, username: str, password: str) -> None: + return None + + +class KeyRingPythonProvider(KeyRingBaseProvider): + """Keyring interface which uses locally imported `keyring`""" + + has_keyring = True + + def __init__(self) -> None: + import keyring + + self.keyring = keyring + + def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: + # Support keyring's get_credential interface which supports getting + # credentials without a username. This is only available for + # keyring>=15.2.0. + if hasattr(self.keyring, "get_credential"): + logger.debug("Getting credentials from keyring for %s", url) + cred = self.keyring.get_credential(url, username) + if cred is not None: + return cred.username, cred.password + return None + + if username is not None: + logger.debug("Getting password from keyring for %s", url) + password = self.keyring.get_password(url, username) + if password: + return username, password + return None + + def save_auth_info(self, url: str, username: str, password: str) -> None: + self.keyring.set_password(url, username, password) + + +class KeyRingCliProvider(KeyRingBaseProvider): + """Provider which uses `keyring` cli + + Instead of calling the keyring package installed alongside pip + we call keyring on the command line which will enable pip to + use which ever installation of keyring is available first in + PATH. + """ + + has_keyring = True + + def __init__(self, cmd: str) -> None: + self.keyring = cmd + + def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: + # This is the default implementation of keyring.get_credential + # https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139 + if username is not None: + password = self._get_password(url, username) + if password is not None: + return username, password + return None + + def save_auth_info(self, url: str, username: str, password: str) -> None: + return self._set_password(url, username, password) + + def _get_password(self, service_name: str, username: str) -> Optional[str]: + """Mirror the implementation of keyring.get_password using cli""" + if self.keyring is None: + return None + + cmd = [self.keyring, "get", service_name, username] + env = os.environ.copy() + env["PYTHONIOENCODING"] = "utf-8" + res = subprocess.run( + cmd, + stdin=subprocess.DEVNULL, + stdout=subprocess.PIPE, + env=env, + ) + if res.returncode: + return None + return res.stdout.decode("utf-8").strip(os.linesep) + + def _set_password(self, service_name: str, username: str, password: str) -> None: + """Mirror the implementation of keyring.set_password using cli""" + if self.keyring is None: + return None + env = os.environ.copy() + env["PYTHONIOENCODING"] = "utf-8" + subprocess.run( + [self.keyring, "set", service_name, username], + input=f"{password}{os.linesep}".encode("utf-8"), + env=env, + check=True, + ) + return None + + +@lru_cache(maxsize=None) +def get_keyring_provider(provider: str) -> KeyRingBaseProvider: + logger.verbose("Keyring provider requested: %s", provider) + + # keyring has previously failed and been disabled + if KEYRING_DISABLED: + provider = "disabled" + if provider in ["import", "auto"]: + try: + impl = KeyRingPythonProvider() + logger.verbose("Keyring provider set: import") + return impl + except ImportError: + pass + except Exception as exc: + # In the event of an unexpected exception + # we should warn the user + msg = "Installed copy of keyring fails with exception %s" + if provider == "auto": + msg = msg + ", trying to find a keyring executable as a fallback" + logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG)) + if provider in ["subprocess", "auto"]: + cli = shutil.which("keyring") + if cli and cli.startswith(sysconfig.get_path("scripts")): + # all code within this function is stolen from shutil.which implementation + @typing.no_type_check + def PATH_as_shutil_which_determines_it() -> str: + path = os.environ.get("PATH", None) + if path is None: + try: + path = os.confstr("CS_PATH") + except (AttributeError, ValueError): + # os.confstr() or CS_PATH is not available + path = os.defpath + # bpo-35755: Don't use os.defpath if the PATH environment variable is + # set to an empty string + + return path + + scripts = Path(sysconfig.get_path("scripts")) + + paths = [] + for path in PATH_as_shutil_which_determines_it().split(os.pathsep): + p = Path(path) + try: + if not p.samefile(scripts): + paths.append(path) + except FileNotFoundError: + pass + + path = os.pathsep.join(paths) + + cli = shutil.which("keyring", path=path) + + if cli: + logger.verbose("Keyring provider set: subprocess with executable %s", cli) + return KeyRingCliProvider(cli) + + logger.verbose("Keyring provider set: disabled") + return KeyRingNullProvider() + + +class MultiDomainBasicAuth(AuthBase): + def __init__( + self, + prompting: bool = True, + index_urls: Optional[List[str]] = None, + keyring_provider: str = "auto", + ) -> None: + self.prompting = prompting + self.index_urls = index_urls + self.keyring_provider = keyring_provider # type: ignore[assignment] + self.passwords: Dict[str, AuthInfo] = {} + # When the user is prompted to enter credentials and keyring is + # available, we will offer to save them. If the user accepts, + # this value is set to the credentials they entered. After the + # request authenticates, the caller should call + # ``save_credentials`` to save these. + self._credentials_to_save: Optional[Credentials] = None + + @property + def keyring_provider(self) -> KeyRingBaseProvider: + return get_keyring_provider(self._keyring_provider) + + @keyring_provider.setter + def keyring_provider(self, provider: str) -> None: + # The free function get_keyring_provider has been decorated with + # functools.cache. If an exception occurs in get_keyring_auth that + # cache will be cleared and keyring disabled, take that into account + # if you want to remove this indirection. + self._keyring_provider = provider + + @property + def use_keyring(self) -> bool: + # We won't use keyring when --no-input is passed unless + # a specific provider is requested because it might require + # user interaction + return self.prompting or self._keyring_provider not in ["auto", "disabled"] + + def _get_keyring_auth( + self, + url: Optional[str], + username: Optional[str], + ) -> Optional[AuthInfo]: + """Return the tuple auth for a given url from keyring.""" + # Do nothing if no url was provided + if not url: + return None + + try: + return self.keyring_provider.get_auth_info(url, username) + except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", + str(exc), + ) + global KEYRING_DISABLED + KEYRING_DISABLED = True + get_keyring_provider.cache_clear() + return None + + def _get_index_url(self, url: str) -> Optional[str]: + """Return the original index URL matching the requested URL. + + Cached or dynamically generated credentials may work against + the original index URL rather than just the netloc. + + The provided url should have had its username and password + removed already. If the original index url had credentials then + they will be included in the return value. + + Returns None if no matching index was found, or if --no-index + was specified by the user. + """ + if not url or not self.index_urls: + return None + + url = remove_auth_from_url(url).rstrip("/") + "/" + parsed_url = urllib.parse.urlsplit(url) + + candidates = [] + + for index in self.index_urls: + index = index.rstrip("/") + "/" + parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index)) + if parsed_url == parsed_index: + return index + + if parsed_url.netloc != parsed_index.netloc: + continue + + candidate = urllib.parse.urlsplit(index) + candidates.append(candidate) + + if not candidates: + return None + + candidates.sort( + reverse=True, + key=lambda candidate: commonprefix( + [ + parsed_url.path, + candidate.path, + ] + ).rfind("/"), + ) + + return urllib.parse.urlunsplit(candidates[0]) + + def _get_new_credentials( + self, + original_url: str, + *, + allow_netrc: bool = True, + allow_keyring: bool = False, + ) -> AuthInfo: + """Find and return credentials for the specified URL.""" + # Split the credentials and netloc from the url. + url, netloc, url_user_password = split_auth_netloc_from_url( + original_url, + ) + + # Start with the credentials embedded in the url + username, password = url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in url for %s", netloc) + return url_user_password + + # Find a matching index url for this request + index_url = self._get_index_url(url) + if index_url: + # Split the credentials from the url. + index_info = split_auth_netloc_from_url(index_url) + if index_info: + index_url, _, index_url_user_password = index_info + logger.debug("Found index url %s", index_url) + + # If an index URL was found, try its embedded credentials + if index_url and index_url_user_password[0] is not None: + username, password = index_url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in index url for %s", netloc) + return index_url_user_password + + # Get creds from netrc if we still don't have them + if allow_netrc: + netrc_auth = get_netrc_auth(original_url) + if netrc_auth: + logger.debug("Found credentials in netrc for %s", netloc) + return netrc_auth + + # If we don't have a password and keyring is available, use it. + if allow_keyring: + # The index url is more specific than the netloc, so try it first + # fmt: off + kr_auth = ( + self._get_keyring_auth(index_url, username) or + self._get_keyring_auth(netloc, username) + ) + # fmt: on + if kr_auth: + logger.debug("Found credentials in keyring for %s", netloc) + return kr_auth + + return username, password + + def _get_url_and_credentials( + self, original_url: str + ) -> Tuple[str, Optional[str], Optional[str]]: + """Return the credentials to use for the provided URL. + + If allowed, netrc and keyring may be used to obtain the + correct credentials. + + Returns (url_without_credentials, username, password). Note + that even if the original URL contains credentials, this + function may return a different username and password. + """ + url, netloc, _ = split_auth_netloc_from_url(original_url) + + # Try to get credentials from original url + username, password = self._get_new_credentials(original_url) + + # If credentials not found, use any stored credentials for this netloc. + # Do this if either the username or the password is missing. + # This accounts for the situation in which the user has specified + # the username in the index url, but the password comes from keyring. + if (username is None or password is None) and netloc in self.passwords: + un, pw = self.passwords[netloc] + # It is possible that the cached credentials are for a different username, + # in which case the cache should be ignored. + if username is None or username == un: + username, password = un, pw + + if username is not None or password is not None: + # Convert the username and password if they're None, so that + # this netloc will show up as "cached" in the conditional above. + # Further, HTTPBasicAuth doesn't accept None, so it makes sense to + # cache the value that is going to be used. + username = username or "" + password = password or "" + + # Store any acquired credentials. + self.passwords[netloc] = (username, password) + + assert ( + # Credentials were found + (username is not None and password is not None) + # Credentials were not found + or (username is None and password is None) + ), f"Could not load credentials from url: {original_url}" + + return url, username, password + + def __call__(self, req: Request) -> Request: + # Get credentials for this request + url, username, password = self._get_url_and_credentials(req.url) + + # Set the url of the request to the url without any credentials + req.url = url + + if username is not None and password is not None: + # Send the basic auth with this request + req = HTTPBasicAuth(username, password)(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + # Factored out to allow for easy patching in tests + def _prompt_for_password( + self, netloc: str + ) -> Tuple[Optional[str], Optional[str], bool]: + username = ask_input(f"User for {netloc}: ") if self.prompting else None + if not username: + return None, None, False + if self.use_keyring: + auth = self._get_keyring_auth(netloc, username) + if auth and auth[0] is not None and auth[1] is not None: + return auth[0], auth[1], False + password = ask_password("Password: ") + return username, password, True + + # Factored out to allow for easy patching in tests + def _should_save_password_to_keyring(self) -> bool: + if ( + not self.prompting + or not self.use_keyring + or not self.keyring_provider.has_keyring + ): + return False + return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" + + def handle_401(self, resp: Response, **kwargs: Any) -> Response: + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + username, password = None, None + + # Query the keyring for credentials: + if self.use_keyring: + username, password = self._get_new_credentials( + resp.url, + allow_netrc=False, + allow_keyring=True, + ) + + # We are not able to prompt the user so simply return the response + if not self.prompting and not username and not password: + return resp + + parsed = urllib.parse.urlparse(resp.url) + + # Prompt the user for a new username and password + save = False + if not username and not password: + username, password, save = self._prompt_for_password(parsed.netloc) + + # Store the new username and password to use for future requests + self._credentials_to_save = None + if username is not None and password is not None: + self.passwords[parsed.netloc] = (username, password) + + # Prompt to save the password to keyring + if save and self._should_save_password_to_keyring(): + self._credentials_to_save = Credentials( + url=parsed.netloc, + username=username, + password=password, + ) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + # The result of the assignment isn't used, it's just needed to consume + # the content. + _ = resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + req.register_hook("response", self.warn_on_401) + + # On successful request, save the credentials that were used to + # keyring. (Note that if the user responded "no" above, this member + # is not set and nothing will be saved.) + if self._credentials_to_save: + req.register_hook("response", self.save_credentials) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def warn_on_401(self, resp: Response, **kwargs: Any) -> None: + """Response callback to warn about incorrect credentials.""" + if resp.status_code == 401: + logger.warning( + "401 Error, Credentials not correct for %s", + resp.request.url, + ) + + def save_credentials(self, resp: Response, **kwargs: Any) -> None: + """Response callback to save credentials on success.""" + assert ( + self.keyring_provider.has_keyring + ), "should never reach here without keyring" + + creds = self._credentials_to_save + self._credentials_to_save = None + if creds and resp.status_code < 400: + try: + logger.info("Saving credentials to keyring") + self.keyring_provider.save_auth_info( + creds.url, creds.username, creds.password + ) + except Exception: + logger.exception("Failed to save credentials") diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/cache.py b/myenv/lib/python3.12/site-packages/pip/_internal/network/cache.py new file mode 100644 index 0000000..4d0fb54 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/network/cache.py @@ -0,0 +1,106 @@ +"""HTTP cache implementation. +""" + +import os +from contextlib import contextmanager +from datetime import datetime +from typing import BinaryIO, Generator, Optional, Union + +from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache +from pip._vendor.cachecontrol.caches import SeparateBodyFileCache +from pip._vendor.requests.models import Response + +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import ensure_dir + + +def is_from_cache(response: Response) -> bool: + return getattr(response, "from_cache", False) + + +@contextmanager +def suppressed_cache_errors() -> Generator[None, None, None]: + """If we can't access the cache then we can just skip caching and process + requests as if caching wasn't enabled. + """ + try: + yield + except OSError: + pass + + +class SafeFileCache(SeparateBodyBaseCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + + There is a race condition when two processes try to write and/or read the + same entry at the same time, since each entry consists of two separate + files (https://github.com/psf/cachecontrol/issues/324). We therefore have + additional logic that makes sure that both files to be present before + returning an entry; this fixes the read side of the race condition. + + For the write side, we assume that the server will only ever return the + same data for the same URL, which ought to be the case for files pip is + downloading. PyPI does not have a mechanism to swap out a wheel for + another wheel, for example. If this assumption is not true, the + CacheControl issue will need to be fixed. + """ + + def __init__(self, directory: str) -> None: + assert directory is not None, "Cache directory must not be None." + super().__init__() + self.directory = directory + + def _get_cache_path(self, name: str) -> str: + # From cachecontrol.caches.file_cache.FileCache._fn, brought into our + # class for backwards-compatibility and to avoid using a non-public + # method. + hashed = SeparateBodyFileCache.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key: str) -> Optional[bytes]: + # The cache entry is only valid if both metadata and body exist. + metadata_path = self._get_cache_path(key) + body_path = metadata_path + ".body" + if not (os.path.exists(metadata_path) and os.path.exists(body_path)): + return None + with suppressed_cache_errors(): + with open(metadata_path, "rb") as f: + return f.read() + + def _write(self, path: str, data: bytes) -> None: + with suppressed_cache_errors(): + ensure_dir(os.path.dirname(path)) + + with adjacent_tmp_file(path) as f: + f.write(data) + + replace(f.name, path) + + def set( + self, key: str, value: bytes, expires: Union[int, datetime, None] = None + ) -> None: + path = self._get_cache_path(key) + self._write(path, value) + + def delete(self, key: str) -> None: + path = self._get_cache_path(key) + with suppressed_cache_errors(): + os.remove(path) + with suppressed_cache_errors(): + os.remove(path + ".body") + + def get_body(self, key: str) -> Optional[BinaryIO]: + # The cache entry is only valid if both metadata and body exist. + metadata_path = self._get_cache_path(key) + body_path = metadata_path + ".body" + if not (os.path.exists(metadata_path) and os.path.exists(body_path)): + return None + with suppressed_cache_errors(): + return open(body_path, "rb") + + def set_body(self, key: str, body: bytes) -> None: + path = self._get_cache_path(key) + ".body" + self._write(path, body) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/download.py b/myenv/lib/python3.12/site-packages/pip/_internal/network/download.py new file mode 100644 index 0000000..d1d4354 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/network/download.py @@ -0,0 +1,186 @@ +"""Download files with progress indicators. +""" +import email.message +import logging +import mimetypes +import os +from typing import Iterable, Optional, Tuple + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.cli.progress_bars import get_download_progress_renderer +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.index import PyPI +from pip._internal.models.link import Link +from pip._internal.network.cache import is_from_cache +from pip._internal.network.session import PipSession +from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks +from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext + +logger = logging.getLogger(__name__) + + +def _get_http_response_size(resp: Response) -> Optional[int]: + try: + return int(resp.headers["content-length"]) + except (ValueError, KeyError, TypeError): + return None + + +def _prepare_download( + resp: Response, + link: Link, + progress_bar: str, +) -> Iterable[bytes]: + total_length = _get_http_response_size(resp) + + if link.netloc == PyPI.file_storage_domain: + url = link.show_url + else: + url = link.url_without_fragment + + logged_url = redact_auth_from_url(url) + + if total_length: + logged_url = f"{logged_url} ({format_size(total_length)})" + + if is_from_cache(resp): + logger.info("Using cached %s", logged_url) + else: + logger.info("Downloading %s", logged_url) + + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif is_from_cache(resp): + show_progress = False + elif not total_length: + show_progress = True + elif total_length > (40 * 1000): + show_progress = True + else: + show_progress = False + + chunks = response_chunks(resp, CONTENT_CHUNK_SIZE) + + if not show_progress: + return chunks + + renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length) + return renderer(chunks) + + +def sanitize_content_filename(filename: str) -> str: + """ + Sanitize the "filename" value from a Content-Disposition header. + """ + return os.path.basename(filename) + + +def parse_content_disposition(content_disposition: str, default_filename: str) -> str: + """ + Parse the "filename" value from a Content-Disposition header, and + return the default filename if the result is empty. + """ + m = email.message.Message() + m["content-type"] = content_disposition + filename = m.get_param("filename") + if filename: + # We need to sanitize the filename to prevent directory traversal + # in case the filename contains ".." path parts. + filename = sanitize_content_filename(str(filename)) + return filename or default_filename + + +def _get_http_response_filename(resp: Response, link: Link) -> str: + """Get an ideal filename from the given HTTP response, falling back to + the link filename if not provided. + """ + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get("content-disposition") + if content_disposition: + filename = parse_content_disposition(content_disposition, filename) + ext: Optional[str] = splitext(filename)[1] + if not ext: + ext = mimetypes.guess_extension(resp.headers.get("content-type", "")) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + return filename + + +def _http_get_download(session: PipSession, link: Link) -> Response: + target_url = link.url.split("#", 1)[0] + resp = session.get(target_url, headers=HEADERS, stream=True) + raise_for_status(resp) + return resp + + +class Downloader: + def __init__( + self, + session: PipSession, + progress_bar: str, + ) -> None: + self._session = session + self._progress_bar = progress_bar + + def __call__(self, link: Link, location: str) -> Tuple[str, str]: + """Download the file given by link into location.""" + try: + resp = _http_get_download(self._session, link) + except NetworkConnectionError as e: + assert e.response is not None + logger.critical( + "HTTP error %s while getting %s", e.response.status_code, link + ) + raise + + filename = _get_http_response_filename(resp, link) + filepath = os.path.join(location, filename) + + chunks = _prepare_download(resp, link, self._progress_bar) + with open(filepath, "wb") as content_file: + for chunk in chunks: + content_file.write(chunk) + content_type = resp.headers.get("Content-Type", "") + return filepath, content_type + + +class BatchDownloader: + def __init__( + self, + session: PipSession, + progress_bar: str, + ) -> None: + self._session = session + self._progress_bar = progress_bar + + def __call__( + self, links: Iterable[Link], location: str + ) -> Iterable[Tuple[Link, Tuple[str, str]]]: + """Download the files given by links into location.""" + for link in links: + try: + resp = _http_get_download(self._session, link) + except NetworkConnectionError as e: + assert e.response is not None + logger.critical( + "HTTP error %s while getting %s", + e.response.status_code, + link, + ) + raise + + filename = _get_http_response_filename(resp, link) + filepath = os.path.join(location, filename) + + chunks = _prepare_download(resp, link, self._progress_bar) + with open(filepath, "wb") as content_file: + for chunk in chunks: + content_file.write(chunk) + content_type = resp.headers.get("Content-Type", "") + yield link, (filepath, content_type) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/lazy_wheel.py b/myenv/lib/python3.12/site-packages/pip/_internal/network/lazy_wheel.py new file mode 100644 index 0000000..82ec50d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/network/lazy_wheel.py @@ -0,0 +1,210 @@ +"""Lazy ZIP over HTTP""" + +__all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"] + +from bisect import bisect_left, bisect_right +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from typing import Any, Dict, Generator, List, Optional, Tuple +from zipfile import BadZipFile, ZipFile + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution +from pip._internal.network.session import PipSession +from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks + + +class HTTPRangeRequestUnsupported(Exception): + pass + + +def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution: + """Return a distribution object from the given wheel URL. + + This uses HTTP range requests to only fetch the portion of the wheel + containing metadata, just enough for the object to be constructed. + If such requests are not supported, HTTPRangeRequestUnsupported + is raised. + """ + with LazyZipOverHTTP(url, session) as zf: + # For read-only ZIP files, ZipFile only needs methods read, + # seek, seekable and tell, not the whole IO protocol. + wheel = MemoryWheel(zf.name, zf) # type: ignore + # After context manager exit, wheel.name + # is an invalid file by intention. + return get_wheel_distribution(wheel, canonicalize_name(name)) + + +class LazyZipOverHTTP: + """File-like object mapped to a ZIP file over HTTP. + + This uses HTTP range requests to lazily fetch the file's content, + which is supposed to be fed to ZipFile. If such requests are not + supported by the server, raise HTTPRangeRequestUnsupported + during initialization. + """ + + def __init__( + self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE + ) -> None: + head = session.head(url, headers=HEADERS) + raise_for_status(head) + assert head.status_code == 200 + self._session, self._url, self._chunk_size = session, url, chunk_size + self._length = int(head.headers["Content-Length"]) + self._file = NamedTemporaryFile() + self.truncate(self._length) + self._left: List[int] = [] + self._right: List[int] = [] + if "bytes" not in head.headers.get("Accept-Ranges", "none"): + raise HTTPRangeRequestUnsupported("range request is not supported") + self._check_zip() + + @property + def mode(self) -> str: + """Opening mode, which is always rb.""" + return "rb" + + @property + def name(self) -> str: + """Path to the underlying file.""" + return self._file.name + + def seekable(self) -> bool: + """Return whether random access is supported, which is True.""" + return True + + def close(self) -> None: + """Close the file.""" + self._file.close() + + @property + def closed(self) -> bool: + """Whether the file is closed.""" + return self._file.closed + + def read(self, size: int = -1) -> bytes: + """Read up to size bytes from the object and return them. + + As a convenience, if size is unspecified or -1, + all bytes until EOF are returned. Fewer than + size bytes may be returned if EOF is reached. + """ + download_size = max(size, self._chunk_size) + start, length = self.tell(), self._length + stop = length if size < 0 else min(start + download_size, length) + start = max(0, stop - download_size) + self._download(start, stop - 1) + return self._file.read(size) + + def readable(self) -> bool: + """Return whether the file is readable, which is True.""" + return True + + def seek(self, offset: int, whence: int = 0) -> int: + """Change stream position and return the new absolute position. + + Seek to offset relative position indicated by whence: + * 0: Start of stream (the default). pos should be >= 0; + * 1: Current position - pos may be negative; + * 2: End of stream - pos usually negative. + """ + return self._file.seek(offset, whence) + + def tell(self) -> int: + """Return the current position.""" + return self._file.tell() + + def truncate(self, size: Optional[int] = None) -> int: + """Resize the stream to the given size in bytes. + + If size is unspecified resize to the current position. + The current stream position isn't changed. + + Return the new file size. + """ + return self._file.truncate(size) + + def writable(self) -> bool: + """Return False.""" + return False + + def __enter__(self) -> "LazyZipOverHTTP": + self._file.__enter__() + return self + + def __exit__(self, *exc: Any) -> None: + self._file.__exit__(*exc) + + @contextmanager + def _stay(self) -> Generator[None, None, None]: + """Return a context manager keeping the position. + + At the end of the block, seek back to original position. + """ + pos = self.tell() + try: + yield + finally: + self.seek(pos) + + def _check_zip(self) -> None: + """Check and download until the file is a valid ZIP.""" + end = self._length - 1 + for start in reversed(range(0, end, self._chunk_size)): + self._download(start, end) + with self._stay(): + try: + # For read-only ZIP files, ZipFile only needs + # methods read, seek, seekable and tell. + ZipFile(self) # type: ignore + except BadZipFile: + pass + else: + break + + def _stream_response( + self, start: int, end: int, base_headers: Dict[str, str] = HEADERS + ) -> Response: + """Return HTTP response to a range request from start to end.""" + headers = base_headers.copy() + headers["Range"] = f"bytes={start}-{end}" + # TODO: Get range requests to be correctly cached + headers["Cache-Control"] = "no-cache" + return self._session.get(self._url, headers=headers, stream=True) + + def _merge( + self, start: int, end: int, left: int, right: int + ) -> Generator[Tuple[int, int], None, None]: + """Return a generator of intervals to be fetched. + + Args: + start (int): Start of needed interval + end (int): End of needed interval + left (int): Index of first overlapping downloaded data + right (int): Index after last overlapping downloaded data + """ + lslice, rslice = self._left[left:right], self._right[left:right] + i = start = min([start] + lslice[:1]) + end = max([end] + rslice[-1:]) + for j, k in zip(lslice, rslice): + if j > i: + yield i, j - 1 + i = k + 1 + if i <= end: + yield i, end + self._left[left:right], self._right[left:right] = [start], [end] + + def _download(self, start: int, end: int) -> None: + """Download bytes from start to end inclusively.""" + with self._stay(): + left = bisect_left(self._right, start) + right = bisect_right(self._left, end) + for start, end in self._merge(start, end, left, right): + response = self._stream_response(start, end) + response.raise_for_status() + self.seek(start) + for chunk in response_chunks(response, self._chunk_size): + self._file.write(chunk) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/session.py b/myenv/lib/python3.12/site-packages/pip/_internal/network/session.py new file mode 100644 index 0000000..f17efc5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/network/session.py @@ -0,0 +1,520 @@ +"""PipSession and supporting code, containing all pip-specific +network request configuration and behavior. +""" + +import email.utils +import io +import ipaddress +import json +import logging +import mimetypes +import os +import platform +import shutil +import subprocess +import sys +import urllib.parse +import warnings +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Union, +) + +from pip._vendor import requests, urllib3 +from pip._vendor.cachecontrol import CacheControlAdapter as _BaseCacheControlAdapter +from pip._vendor.requests.adapters import DEFAULT_POOLBLOCK, BaseAdapter +from pip._vendor.requests.adapters import HTTPAdapter as _BaseHTTPAdapter +from pip._vendor.requests.models import PreparedRequest, Response +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.urllib3.connectionpool import ConnectionPool +from pip._vendor.urllib3.exceptions import InsecureRequestWarning + +from pip import __version__ +from pip._internal.metadata import get_default_environment +from pip._internal.models.link import Link +from pip._internal.network.auth import MultiDomainBasicAuth +from pip._internal.network.cache import SafeFileCache + +# Import ssl from compat so the initial import occurs in only one place. +from pip._internal.utils.compat import has_tls +from pip._internal.utils.glibc import libc_ver +from pip._internal.utils.misc import build_url_from_netloc, parse_netloc +from pip._internal.utils.urls import url_to_path + +if TYPE_CHECKING: + from ssl import SSLContext + + from pip._vendor.urllib3.poolmanager import PoolManager + + +logger = logging.getLogger(__name__) + +SecureOrigin = Tuple[str, str, Optional[Union[int, str]]] + + +# Ignore warning raised when using --trusted-host. +warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +SECURE_ORIGINS: List[SecureOrigin] = [ + # protocol, hostname, port + # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) + ("https", "*", "*"), + ("*", "localhost", "*"), + ("*", "127.0.0.0/8", "*"), + ("*", "::1/128", "*"), + ("file", "*", None), + # ssh is always secure. + ("ssh", "*", "*"), +] + + +# These are environment variables present when running under various +# CI systems. For each variable, some CI systems that use the variable +# are indicated. The collection was chosen so that for each of a number +# of popular systems, at least one of the environment variables is used. +# This list is used to provide some indication of and lower bound for +# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive. +# For more background, see: https://github.com/pypa/pip/issues/5499 +CI_ENVIRONMENT_VARIABLES = ( + # Azure Pipelines + "BUILD_BUILDID", + # Jenkins + "BUILD_ID", + # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI + "CI", + # Explicit environment variable. + "PIP_IS_CI", +) + + +def looks_like_ci() -> bool: + """ + Return whether it looks like pip is running under CI. + """ + # We don't use the method of checking for a tty (e.g. using isatty()) + # because some CI systems mimic a tty (e.g. Travis CI). Thus that + # method doesn't provide definitive information in either direction. + return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) + + +def user_agent() -> str: + """ + Return a string representing the user agent. + """ + data: Dict[str, Any] = { + "installer": {"name": "pip", "version": __version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), + }, + } + + if data["implementation"]["name"] == "CPython": + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == "PyPy": + pypy_version_info = sys.pypy_version_info # type: ignore + if pypy_version_info.releaselevel == "final": + pypy_version_info = pypy_version_info[:3] + data["implementation"]["version"] = ".".join( + [str(x) for x in pypy_version_info] + ) + elif data["implementation"]["name"] == "Jython": + # Complete Guess + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == "IronPython": + # Complete Guess + data["implementation"]["version"] = platform.python_version() + + if sys.platform.startswith("linux"): + from pip._vendor import distro + + linux_distribution = distro.name(), distro.version(), distro.codename() + distro_infos: Dict[str, Any] = dict( + filter( + lambda x: x[1], + zip(["name", "version", "id"], linux_distribution), + ) + ) + libc = dict( + filter( + lambda x: x[1], + zip(["lib", "version"], libc_ver()), + ) + ) + if libc: + distro_infos["libc"] = libc + if distro_infos: + data["distro"] = distro_infos + + if sys.platform.startswith("darwin") and platform.mac_ver()[0]: + data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() + + if platform.release(): + data.setdefault("system", {})["release"] = platform.release() + + if platform.machine(): + data["cpu"] = platform.machine() + + if has_tls(): + import _ssl as ssl + + data["openssl_version"] = ssl.OPENSSL_VERSION + + setuptools_dist = get_default_environment().get_distribution("setuptools") + if setuptools_dist is not None: + data["setuptools_version"] = str(setuptools_dist.version) + + if shutil.which("rustc") is not None: + # If for any reason `rustc --version` fails, silently ignore it + try: + rustc_output = subprocess.check_output( + ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5 + ) + except Exception: + pass + else: + if rustc_output.startswith(b"rustc "): + # The format of `rustc --version` is: + # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'` + # We extract just the middle (1.52.1) part + data["rustc_version"] = rustc_output.split(b" ")[1].decode() + + # Use None rather than False so as not to give the impression that + # pip knows it is not being run under CI. Rather, it is a null or + # inconclusive result. Also, we include some value rather than no + # value to make it easier to know that the check has been run. + data["ci"] = True if looks_like_ci() else None + + user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") + if user_data is not None: + data["user_data"] = user_data + + return "{data[installer][name]}/{data[installer][version]} {json}".format( + data=data, + json=json.dumps(data, separators=(",", ":"), sort_keys=True), + ) + + +class LocalFSAdapter(BaseAdapter): + def send( + self, + request: PreparedRequest, + stream: bool = False, + timeout: Optional[Union[float, Tuple[float, float]]] = None, + verify: Union[bool, str] = True, + cert: Optional[Union[str, Tuple[str, str]]] = None, + proxies: Optional[Mapping[str, str]] = None, + ) -> Response: + pathname = url_to_path(request.url) + + resp = Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + # format the exception raised as a io.BytesIO object, + # to return a better error message: + resp.status_code = 404 + resp.reason = type(exc).__name__ + resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8")) + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = CaseInsensitiveDict( + { + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + } + ) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self) -> None: + pass + + +class _SSLContextAdapterMixin: + """Mixin to add the ``ssl_context`` constructor argument to HTTP adapters. + + The additional argument is forwarded directly to the pool manager. This allows us + to dynamically decide what SSL store to use at runtime, which is used to implement + the optional ``truststore`` backend. + """ + + def __init__( + self, + *, + ssl_context: Optional["SSLContext"] = None, + **kwargs: Any, + ) -> None: + self._ssl_context = ssl_context + super().__init__(**kwargs) + + def init_poolmanager( + self, + connections: int, + maxsize: int, + block: bool = DEFAULT_POOLBLOCK, + **pool_kwargs: Any, + ) -> "PoolManager": + if self._ssl_context is not None: + pool_kwargs.setdefault("ssl_context", self._ssl_context) + return super().init_poolmanager( # type: ignore[misc] + connections=connections, + maxsize=maxsize, + block=block, + **pool_kwargs, + ) + + +class HTTPAdapter(_SSLContextAdapterMixin, _BaseHTTPAdapter): + pass + + +class CacheControlAdapter(_SSLContextAdapterMixin, _BaseCacheControlAdapter): + pass + + +class InsecureHTTPAdapter(HTTPAdapter): + def cert_verify( + self, + conn: ConnectionPool, + url: str, + verify: Union[bool, str], + cert: Optional[Union[str, Tuple[str, str]]], + ) -> None: + super().cert_verify(conn=conn, url=url, verify=False, cert=cert) + + +class InsecureCacheControlAdapter(CacheControlAdapter): + def cert_verify( + self, + conn: ConnectionPool, + url: str, + verify: Union[bool, str], + cert: Optional[Union[str, Tuple[str, str]]], + ) -> None: + super().cert_verify(conn=conn, url=url, verify=False, cert=cert) + + +class PipSession(requests.Session): + timeout: Optional[int] = None + + def __init__( + self, + *args: Any, + retries: int = 0, + cache: Optional[str] = None, + trusted_hosts: Sequence[str] = (), + index_urls: Optional[List[str]] = None, + ssl_context: Optional["SSLContext"] = None, + **kwargs: Any, + ) -> None: + """ + :param trusted_hosts: Domains not to emit warnings for when not using + HTTPS. + """ + super().__init__(*args, **kwargs) + + # Namespace the attribute with "pip_" just in case to prevent + # possible conflicts with the base class. + self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = [] + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth(index_urls=index_urls) + + # Create our urllib3.Retry instance which will allow us to customize + # how we handle retries. + retries = urllib3.Retry( + # Set the total number of retries that a particular request can + # have. + total=retries, + # A 503 error from PyPI typically means that the Fastly -> Origin + # connection got interrupted in some way. A 503 error in general + # is typically considered a transient error so we'll go ahead and + # retry it. + # A 500 may indicate transient error in Amazon S3 + # A 502 may be a transient error from a CDN like CloudFlare or CloudFront + # A 520 or 527 - may indicate transient error in CloudFlare + status_forcelist=[500, 502, 503, 520, 527], + # Add a small amount of back off between failed requests in + # order to prevent hammering the service. + backoff_factor=0.25, + ) # type: ignore + + # Our Insecure HTTPAdapter disables HTTPS validation. It does not + # support caching so we'll use it for all http:// URLs. + # If caching is disabled, we will also use it for + # https:// hosts that we've marked as ignoring + # TLS errors for (trusted-hosts). + insecure_adapter = InsecureHTTPAdapter(max_retries=retries) + + # We want to _only_ cache responses on securely fetched origins or when + # the host is specified as trusted. We do this because + # we can't validate the response of an insecurely/untrusted fetched + # origin, and we don't want someone to be able to poison the cache and + # require manual eviction from the cache to fix it. + if cache: + secure_adapter = CacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ssl_context=ssl_context, + ) + self._trusted_host_adapter = InsecureCacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + else: + secure_adapter = HTTPAdapter(max_retries=retries, ssl_context=ssl_context) + self._trusted_host_adapter = insecure_adapter + + self.mount("https://", secure_adapter) + self.mount("http://", insecure_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + for host in trusted_hosts: + self.add_trusted_host(host, suppress_logging=True) + + def update_index_urls(self, new_index_urls: List[str]) -> None: + """ + :param new_index_urls: New index urls to update the authentication + handler with. + """ + self.auth.index_urls = new_index_urls + + def add_trusted_host( + self, host: str, source: Optional[str] = None, suppress_logging: bool = False + ) -> None: + """ + :param host: It is okay to provide a host that has previously been + added. + :param source: An optional source string, for logging where the host + string came from. + """ + if not suppress_logging: + msg = f"adding trusted host: {host!r}" + if source is not None: + msg += f" (from {source})" + logger.info(msg) + + parsed_host, parsed_port = parse_netloc(host) + if parsed_host is None: + raise ValueError(f"Trusted host URL must include a host part: {host!r}") + if (parsed_host, parsed_port) not in self.pip_trusted_origins: + self.pip_trusted_origins.append((parsed_host, parsed_port)) + + self.mount( + build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter + ) + self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter) + if not parsed_port: + self.mount( + build_url_from_netloc(host, scheme="http") + ":", + self._trusted_host_adapter, + ) + # Mount wildcard ports for the same host. + self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter) + + def iter_secure_origins(self) -> Generator[SecureOrigin, None, None]: + yield from SECURE_ORIGINS + for host, port in self.pip_trusted_origins: + yield ("*", host, "*" if port is None else port) + + def is_secure_origin(self, location: Link) -> bool: + # Determine if this url used a secure transport mechanism + parsed = urllib.parse.urlparse(str(location)) + origin_protocol, origin_host, origin_port = ( + parsed.scheme, + parsed.hostname, + parsed.port, + ) + + # The protocol to use to see if the protocol matches. + # Don't count the repository type as part of the protocol: in + # cases such as "git+ssh", only use "ssh". (I.e., Only verify against + # the last scheme.) + origin_protocol = origin_protocol.rsplit("+", 1)[-1] + + # Determine if our origin is a secure origin by looking through our + # hardcoded list of secure origins, as well as any additional ones + # configured on this PackageFinder instance. + for secure_origin in self.iter_secure_origins(): + secure_protocol, secure_host, secure_port = secure_origin + if origin_protocol != secure_protocol and secure_protocol != "*": + continue + + try: + addr = ipaddress.ip_address(origin_host or "") + network = ipaddress.ip_network(secure_host) + except ValueError: + # We don't have both a valid address or a valid network, so + # we'll check this origin against hostnames. + if ( + origin_host + and origin_host.lower() != secure_host.lower() + and secure_host != "*" + ): + continue + else: + # We have a valid address and network, so see if the address + # is contained within the network. + if addr not in network: + continue + + # Check to see if the port matches. + if ( + origin_port != secure_port + and secure_port != "*" + and secure_port is not None + ): + continue + + # If we've gotten here, then this origin matches the current + # secure origin and we should return True + return True + + # If we've gotten to this point, then the origin isn't secure and we + # will not accept it as a valid location to search. We will however + # log a warning that we are ignoring it. + logger.warning( + "The repository located at %s is not a trusted or secure host and " + "is being ignored. If this repository is available via HTTPS we " + "recommend you use HTTPS instead, otherwise you may silence " + "this warning and allow it anyway with '--trusted-host %s'.", + origin_host, + origin_host, + ) + + return False + + def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response: + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + # Allow setting a default proxies on a session + kwargs.setdefault("proxies", self.proxies) + + # Dispatch the actual request + return super().request(method, url, *args, **kwargs) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/utils.py b/myenv/lib/python3.12/site-packages/pip/_internal/network/utils.py new file mode 100644 index 0000000..134848a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/network/utils.py @@ -0,0 +1,96 @@ +from typing import Dict, Generator + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.exceptions import NetworkConnectionError + +# The following comments and HTTP headers were originally added by +# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03. +# +# We use Accept-Encoding: identity here because requests defaults to +# accepting compressed responses. This breaks in a variety of ways +# depending on how the server is configured. +# - Some servers will notice that the file isn't a compressible file +# and will leave the file alone and with an empty Content-Encoding +# - Some servers will notice that the file is already compressed and +# will leave the file alone, adding a Content-Encoding: gzip header +# - Some servers won't notice anything at all and will take a file +# that's already been compressed and compress it again, and set +# the Content-Encoding: gzip header +# By setting this to request only the identity encoding we're hoping +# to eliminate the third case. Hopefully there does not exist a server +# which when given a file will notice it is already compressed and that +# you're not asking for a compressed file and will then decompress it +# before sending because if that's the case I don't think it'll ever be +# possible to make this work. +HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"} + + +def raise_for_status(resp: Response) -> None: + http_error_msg = "" + if isinstance(resp.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. + try: + reason = resp.reason.decode("utf-8") + except UnicodeDecodeError: + reason = resp.reason.decode("iso-8859-1") + else: + reason = resp.reason + + if 400 <= resp.status_code < 500: + http_error_msg = ( + f"{resp.status_code} Client Error: {reason} for url: {resp.url}" + ) + + elif 500 <= resp.status_code < 600: + http_error_msg = ( + f"{resp.status_code} Server Error: {reason} for url: {resp.url}" + ) + + if http_error_msg: + raise NetworkConnectionError(http_error_msg, response=resp) + + +def response_chunks( + response: Response, chunk_size: int = CONTENT_CHUNK_SIZE +) -> Generator[bytes, None, None]: + """Given a requests Response, provide the data chunks.""" + try: + # Special case for urllib3. + for chunk in response.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False, + ): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = response.raw.read(chunk_size) + if not chunk: + break + yield chunk diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/network/xmlrpc.py b/myenv/lib/python3.12/site-packages/pip/_internal/network/xmlrpc.py new file mode 100644 index 0000000..22ec8d2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/network/xmlrpc.py @@ -0,0 +1,62 @@ +"""xmlrpclib.Transport implementation +""" + +import logging +import urllib.parse +import xmlrpc.client +from typing import TYPE_CHECKING, Tuple + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status + +if TYPE_CHECKING: + from xmlrpc.client import _HostType, _Marshallable + + from _typeshed import SizedBuffer + +logger = logging.getLogger(__name__) + + +class PipXmlrpcTransport(xmlrpc.client.Transport): + """Provide a `xmlrpclib.Transport` implementation via a `PipSession` + object. + """ + + def __init__( + self, index_url: str, session: PipSession, use_datetime: bool = False + ) -> None: + super().__init__(use_datetime) + index_parts = urllib.parse.urlparse(index_url) + self._scheme = index_parts.scheme + self._session = session + + def request( + self, + host: "_HostType", + handler: str, + request_body: "SizedBuffer", + verbose: bool = False, + ) -> Tuple["_Marshallable", ...]: + assert isinstance(host, str) + parts = (self._scheme, host, handler, None, None, None) + url = urllib.parse.urlunparse(parts) + try: + headers = {"Content-Type": "text/xml"} + response = self._session.post( + url, + data=request_body, + headers=headers, + stream=True, + ) + raise_for_status(response) + self.verbose = verbose + return self.parse_response(response.raw) + except NetworkConnectionError as exc: + assert exc.response + logger.critical( + "HTTP error %s while getting %s", + exc.response.status_code, + url, + ) + raise diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..8177136 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/check.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/check.cpython-312.pyc new file mode 100644 index 0000000..7f67262 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/check.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-312.pyc new file mode 100644 index 0000000..27c3f04 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-312.pyc new file mode 100644 index 0000000..b149838 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f6e3bb2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-312.pyc new file mode 100644 index 0000000..de9bf77 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-312.pyc new file mode 100644 index 0000000..2ecbbe6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-312.pyc new file mode 100644 index 0000000..992444a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-312.pyc new file mode 100644 index 0000000..99928b2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..dc7f379 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-312.pyc new file mode 100644 index 0000000..08eb9ee Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-312.pyc new file mode 100644 index 0000000..47f9329 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/build_tracker.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/build_tracker.py new file mode 100644 index 0000000..3791932 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/build_tracker.py @@ -0,0 +1,139 @@ +import contextlib +import hashlib +import logging +import os +from types import TracebackType +from typing import Dict, Generator, Optional, Set, Type, Union + +from pip._internal.models.link import Link +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +@contextlib.contextmanager +def update_env_context_manager(**changes: str) -> Generator[None, None, None]: + target = os.environ + + # Save values from the target and change them. + non_existent_marker = object() + saved_values: Dict[str, Union[object, str]] = {} + for name, new_value in changes.items(): + try: + saved_values[name] = target[name] + except KeyError: + saved_values[name] = non_existent_marker + target[name] = new_value + + try: + yield + finally: + # Restore original values in the target. + for name, original_value in saved_values.items(): + if original_value is non_existent_marker: + del target[name] + else: + assert isinstance(original_value, str) # for mypy + target[name] = original_value + + +@contextlib.contextmanager +def get_build_tracker() -> Generator["BuildTracker", None, None]: + root = os.environ.get("PIP_BUILD_TRACKER") + with contextlib.ExitStack() as ctx: + if root is None: + root = ctx.enter_context(TempDirectory(kind="build-tracker")).path + ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root)) + logger.debug("Initialized build tracking at %s", root) + + with BuildTracker(root) as tracker: + yield tracker + + +class TrackerId(str): + """Uniquely identifying string provided to the build tracker.""" + + +class BuildTracker: + """Ensure that an sdist cannot request itself as a setup requirement. + + When an sdist is prepared, it identifies its setup requirements in the + context of ``BuildTracker.track()``. If a requirement shows up recursively, this + raises an exception. + + This stops fork bombs embedded in malicious packages.""" + + def __init__(self, root: str) -> None: + self._root = root + self._entries: Dict[TrackerId, InstallRequirement] = {} + logger.debug("Created build tracker: %s", self._root) + + def __enter__(self) -> "BuildTracker": + logger.debug("Entered build tracker: %s", self._root) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.cleanup() + + def _entry_path(self, key: TrackerId) -> str: + hashed = hashlib.sha224(key.encode()).hexdigest() + return os.path.join(self._root, hashed) + + def add(self, req: InstallRequirement, key: TrackerId) -> None: + """Add an InstallRequirement to build tracking.""" + + # Get the file to write information about this requirement. + entry_path = self._entry_path(key) + + # Try reading from the file. If it exists and can be read from, a build + # is already in progress, so a LookupError is raised. + try: + with open(entry_path) as fp: + contents = fp.read() + except FileNotFoundError: + pass + else: + message = "{} is already being built: {}".format(req.link, contents) + raise LookupError(message) + + # If we're here, req should really not be building already. + assert key not in self._entries + + # Start tracking this requirement. + with open(entry_path, "w", encoding="utf-8") as fp: + fp.write(str(req)) + self._entries[key] = req + + logger.debug("Added %s to build tracker %r", req, self._root) + + def remove(self, req: InstallRequirement, key: TrackerId) -> None: + """Remove an InstallRequirement from build tracking.""" + + # Delete the created file and the corresponding entry. + os.unlink(self._entry_path(key)) + del self._entries[key] + + logger.debug("Removed %s from build tracker %r", req, self._root) + + def cleanup(self) -> None: + for key, req in list(self._entries.items()): + self.remove(req, key) + + logger.debug("Removed build tracker: %r", self._root) + + @contextlib.contextmanager + def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]: + """Ensure that `key` cannot install itself as a setup requirement. + + :raises LookupError: If `key` was already provided in a parent invocation of + the context introduced by this method.""" + tracker_id = TrackerId(key) + self.add(req, tracker_id) + yield + self.remove(req, tracker_id) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata.py new file mode 100644 index 0000000..c66ac35 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata.py @@ -0,0 +1,39 @@ +"""Metadata generation logic for source distributions. +""" + +import os + +from pip._vendor.pyproject_hooks import BuildBackendHookCaller + +from pip._internal.build_env import BuildEnvironment +from pip._internal.exceptions import ( + InstallationSubprocessError, + MetadataGenerationFailed, +) +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + + +def generate_metadata( + build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str +) -> str: + """Generate metadata using mechanisms described in PEP 517. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that BuildBackendHookCaller implements a fallback for + # prepare_metadata_for_build_wheel, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") + with backend.subprocess_runner(runner): + try: + distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir) + except InstallationSubprocessError as error: + raise MetadataGenerationFailed(package_details=details) from error + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata_editable.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata_editable.py new file mode 100644 index 0000000..27c69f0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata_editable.py @@ -0,0 +1,41 @@ +"""Metadata generation logic for source distributions. +""" + +import os + +from pip._vendor.pyproject_hooks import BuildBackendHookCaller + +from pip._internal.build_env import BuildEnvironment +from pip._internal.exceptions import ( + InstallationSubprocessError, + MetadataGenerationFailed, +) +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + + +def generate_editable_metadata( + build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str +) -> str: + """Generate metadata using mechanisms described in PEP 660. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that BuildBackendHookCaller implements a fallback for + # prepare_metadata_for_build_wheel/editable, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message( + "Preparing editable metadata (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + try: + distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir) + except InstallationSubprocessError as error: + raise MetadataGenerationFailed(package_details=details) from error + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata_legacy.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata_legacy.py new file mode 100644 index 0000000..e60988d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata_legacy.py @@ -0,0 +1,74 @@ +"""Metadata generation logic for legacy source distributions. +""" + +import logging +import os + +from pip._internal.build_env import BuildEnvironment +from pip._internal.cli.spinners import open_spinner +from pip._internal.exceptions import ( + InstallationError, + InstallationSubprocessError, + MetadataGenerationFailed, +) +from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +def _find_egg_info(directory: str) -> str: + """Find an .egg-info subdirectory in `directory`.""" + filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")] + + if not filenames: + raise InstallationError(f"No .egg-info directory found in {directory}") + + if len(filenames) > 1: + raise InstallationError( + "More than one .egg-info directory found in {}".format(directory) + ) + + return os.path.join(directory, filenames[0]) + + +def generate_metadata( + build_env: BuildEnvironment, + setup_py_path: str, + source_dir: str, + isolated: bool, + details: str, +) -> str: + """Generate metadata using setup.py-based defacto mechanisms. + + Returns the generated metadata directory. + """ + logger.debug( + "Running setup.py (path:%s) egg_info for package %s", + setup_py_path, + details, + ) + + egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path + + args = make_setuptools_egg_info_args( + setup_py_path, + egg_info_dir=egg_info_dir, + no_user_config=isolated, + ) + + with build_env: + with open_spinner("Preparing metadata (setup.py)") as spinner: + try: + call_subprocess( + args, + cwd=source_dir, + command_desc="python setup.py egg_info", + spinner=spinner, + ) + except InstallationSubprocessError as error: + raise MetadataGenerationFailed(package_details=details) from error + + # Return the .egg-info directory. + return _find_egg_info(egg_info_dir) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel.py new file mode 100644 index 0000000..064811a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel.py @@ -0,0 +1,37 @@ +import logging +import os +from typing import Optional + +from pip._vendor.pyproject_hooks import BuildBackendHookCaller + +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +def build_wheel_pep517( + name: str, + backend: BuildBackendHookCaller, + metadata_directory: str, + tempd: str, +) -> Optional[str]: + """Build one InstallRequirement using the PEP 517 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + try: + logger.debug("Destination directory: %s", tempd) + + runner = runner_with_spinner_message( + f"Building wheel for {name} (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( + tempd, + metadata_directory=metadata_directory, + ) + except Exception: + logger.error("Failed building wheel for %s", name) + return None + return os.path.join(tempd, wheel_name) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel_editable.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel_editable.py new file mode 100644 index 0000000..719d69d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel_editable.py @@ -0,0 +1,46 @@ +import logging +import os +from typing import Optional + +from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing + +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +def build_wheel_editable( + name: str, + backend: BuildBackendHookCaller, + metadata_directory: str, + tempd: str, +) -> Optional[str]: + """Build one InstallRequirement using the PEP 660 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + try: + logger.debug("Destination directory: %s", tempd) + + runner = runner_with_spinner_message( + f"Building editable for {name} (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + try: + wheel_name = backend.build_editable( + tempd, + metadata_directory=metadata_directory, + ) + except HookMissing as e: + logger.error( + "Cannot build editable %s because the build " + "backend does not have the %s hook", + name, + e, + ) + return None + except Exception: + logger.error("Failed building editable for %s", name) + return None + return os.path.join(tempd, wheel_name) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel_legacy.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel_legacy.py new file mode 100644 index 0000000..c5f0492 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel_legacy.py @@ -0,0 +1,102 @@ +import logging +import os.path +from typing import List, Optional + +from pip._internal.cli.spinners import open_spinner +from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args +from pip._internal.utils.subprocess import call_subprocess, format_command_args + +logger = logging.getLogger(__name__) + + +def format_command_result( + command_args: List[str], + command_output: str, +) -> str: + """Format command information for logging.""" + command_desc = format_command_args(command_args) + text = f"Command arguments: {command_desc}\n" + + if not command_output: + text += "Command output: None" + elif logger.getEffectiveLevel() > logging.DEBUG: + text += "Command output: [use --verbose to show]" + else: + if not command_output.endswith("\n"): + command_output += "\n" + text += f"Command output:\n{command_output}" + + return text + + +def get_legacy_build_wheel_path( + names: List[str], + temp_dir: str, + name: str, + command_args: List[str], + command_output: str, +) -> Optional[str]: + """Return the path to the wheel in the temporary build directory.""" + # Sort for determinism. + names = sorted(names) + if not names: + msg = ("Legacy build of wheel for {!r} created no files.\n").format(name) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + return None + + if len(names) > 1: + msg = ( + "Legacy build of wheel for {!r} created more than one file.\n" + "Filenames (choosing first): {}\n" + ).format(name, names) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + + return os.path.join(temp_dir, names[0]) + + +def build_wheel_legacy( + name: str, + setup_py_path: str, + source_dir: str, + global_options: List[str], + build_options: List[str], + tempd: str, +) -> Optional[str]: + """Build one unpacked package using the "legacy" build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + wheel_args = make_setuptools_bdist_wheel_args( + setup_py_path, + global_options=global_options, + build_options=build_options, + destination_dir=tempd, + ) + + spin_message = f"Building wheel for {name} (setup.py)" + with open_spinner(spin_message) as spinner: + logger.debug("Destination directory: %s", tempd) + + try: + output = call_subprocess( + wheel_args, + command_desc="python setup.py bdist_wheel", + cwd=source_dir, + spinner=spinner, + ) + except Exception: + spinner.finish("error") + logger.error("Failed building wheel for %s", name) + return None + + names = os.listdir(tempd) + wheel_path = get_legacy_build_wheel_path( + names=names, + temp_dir=tempd, + name=name, + command_args=wheel_args, + command_output=output, + ) + return wheel_path diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/check.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/check.py new file mode 100644 index 0000000..90c6a58 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/check.py @@ -0,0 +1,187 @@ +"""Validation of dependencies of packages +""" + +import logging +from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import LegacySpecifier +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import LegacyVersion + +from pip._internal.distributions import make_distribution_for_install_requirement +from pip._internal.metadata import get_default_environment +from pip._internal.metadata.base import DistributionVersion +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.deprecation import deprecated + +logger = logging.getLogger(__name__) + + +class PackageDetails(NamedTuple): + version: DistributionVersion + dependencies: List[Requirement] + + +# Shorthands +PackageSet = Dict[NormalizedName, PackageDetails] +Missing = Tuple[NormalizedName, Requirement] +Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement] + +MissingDict = Dict[NormalizedName, List[Missing]] +ConflictingDict = Dict[NormalizedName, List[Conflicting]] +CheckResult = Tuple[MissingDict, ConflictingDict] +ConflictDetails = Tuple[PackageSet, CheckResult] + + +def create_package_set_from_installed() -> Tuple[PackageSet, bool]: + """Converts a list of distributions into a PackageSet.""" + package_set = {} + problems = False + env = get_default_environment() + for dist in env.iter_installed_distributions(local_only=False, skip=()): + name = dist.canonical_name + try: + dependencies = list(dist.iter_dependencies()) + package_set[name] = PackageDetails(dist.version, dependencies) + except (OSError, ValueError) as e: + # Don't crash on unreadable or broken metadata. + logger.warning("Error parsing requirements for %s: %s", name, e) + problems = True + return package_set, problems + + +def check_package_set( + package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None +) -> CheckResult: + """Check if a package set is consistent + + If should_ignore is passed, it should be a callable that takes a + package name and returns a boolean. + """ + + warn_legacy_versions_and_specifiers(package_set) + + missing = {} + conflicting = {} + + for package_name, package_detail in package_set.items(): + # Info about dependencies of package_name + missing_deps: Set[Missing] = set() + conflicting_deps: Set[Conflicting] = set() + + if should_ignore and should_ignore(package_name): + continue + + for req in package_detail.dependencies: + name = canonicalize_name(req.name) + + # Check if it's missing + if name not in package_set: + missed = True + if req.marker is not None: + missed = req.marker.evaluate({"extra": ""}) + if missed: + missing_deps.add((name, req)) + continue + + # Check if there's a conflict + version = package_set[name].version + if not req.specifier.contains(version, prereleases=True): + conflicting_deps.add((name, version, req)) + + if missing_deps: + missing[package_name] = sorted(missing_deps, key=str) + if conflicting_deps: + conflicting[package_name] = sorted(conflicting_deps, key=str) + + return missing, conflicting + + +def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails: + """For checking if the dependency graph would be consistent after \ + installing given requirements + """ + # Start from the current state + package_set, _ = create_package_set_from_installed() + # Install packages + would_be_installed = _simulate_installation_of(to_install, package_set) + + # Only warn about directly-dependent packages; create a whitelist of them + whitelist = _create_whitelist(would_be_installed, package_set) + + return ( + package_set, + check_package_set( + package_set, should_ignore=lambda name: name not in whitelist + ), + ) + + +def _simulate_installation_of( + to_install: List[InstallRequirement], package_set: PackageSet +) -> Set[NormalizedName]: + """Computes the version of packages after installing to_install.""" + # Keep track of packages that were installed + installed = set() + + # Modify it as installing requirement_set would (assuming no errors) + for inst_req in to_install: + abstract_dist = make_distribution_for_install_requirement(inst_req) + dist = abstract_dist.get_metadata_distribution() + name = dist.canonical_name + package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies())) + + installed.add(name) + + return installed + + +def _create_whitelist( + would_be_installed: Set[NormalizedName], package_set: PackageSet +) -> Set[NormalizedName]: + packages_affected = set(would_be_installed) + + for package_name in package_set: + if package_name in packages_affected: + continue + + for req in package_set[package_name].dependencies: + if canonicalize_name(req.name) in packages_affected: + packages_affected.add(package_name) + break + + return packages_affected + + +def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None: + for project_name, package_details in package_set.items(): + if isinstance(package_details.version, LegacyVersion): + deprecated( + reason=( + f"{project_name} {package_details.version} " + f"has a non-standard version number." + ), + replacement=( + f"to upgrade to a newer version of {project_name} " + f"or contact the author to suggest that they " + f"release a version with a conforming version number" + ), + issue=12063, + gone_in="24.1", + ) + for dep in package_details.dependencies: + if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): + deprecated( + reason=( + f"{project_name} {package_details.version} " + f"has a non-standard dependency specifier {dep}." + ), + replacement=( + f"to upgrade to a newer version of {project_name} " + f"or contact the author to suggest that they " + f"release a version with a conforming dependency specifiers" + ), + issue=12063, + gone_in="24.1", + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/freeze.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/freeze.py new file mode 100644 index 0000000..3544568 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/freeze.py @@ -0,0 +1,255 @@ +import collections +import logging +import os +from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.metadata import BaseDistribution, get_environment +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_file import COMMENT_RE +from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference + +logger = logging.getLogger(__name__) + + +class _EditableInfo(NamedTuple): + requirement: str + comments: List[str] + + +def freeze( + requirement: Optional[List[str]] = None, + local_only: bool = False, + user_only: bool = False, + paths: Optional[List[str]] = None, + isolated: bool = False, + exclude_editable: bool = False, + skip: Container[str] = (), +) -> Generator[str, None, None]: + installations: Dict[str, FrozenRequirement] = {} + + dists = get_environment(paths).iter_installed_distributions( + local_only=local_only, + skip=(), + user_only=user_only, + ) + for dist in dists: + req = FrozenRequirement.from_dist(dist) + if exclude_editable and req.editable: + continue + installations[req.canonical_name] = req + + if requirement: + # the options that don't get turned into an InstallRequirement + # should only be emitted once, even if the same option is in multiple + # requirements files, so we need to keep track of what has been emitted + # so that we don't emit it again if it's seen again + emitted_options: Set[str] = set() + # keep track of which files a requirement is in so that we can + # give an accurate warning if a requirement appears multiple times. + req_files: Dict[str, List[str]] = collections.defaultdict(list) + for req_file_path in requirement: + with open(req_file_path) as req_file: + for line in req_file: + if ( + not line.strip() + or line.strip().startswith("#") + or line.startswith( + ( + "-r", + "--requirement", + "-f", + "--find-links", + "-i", + "--index-url", + "--pre", + "--trusted-host", + "--process-dependency-links", + "--extra-index-url", + "--use-feature", + ) + ) + ): + line = line.rstrip() + if line not in emitted_options: + emitted_options.add(line) + yield line + continue + + if line.startswith("-e") or line.startswith("--editable"): + if line.startswith("-e"): + line = line[2:].strip() + else: + line = line[len("--editable") :].strip().lstrip("=") + line_req = install_req_from_editable( + line, + isolated=isolated, + ) + else: + line_req = install_req_from_line( + COMMENT_RE.sub("", line).strip(), + isolated=isolated, + ) + + if not line_req.name: + logger.info( + "Skipping line in requirement file [%s] because " + "it's not clear what it would install: %s", + req_file_path, + line.strip(), + ) + logger.info( + " (add #egg=PackageName to the URL to avoid" + " this warning)" + ) + else: + line_req_canonical_name = canonicalize_name(line_req.name) + if line_req_canonical_name not in installations: + # either it's not installed, or it is installed + # but has been processed already + if not req_files[line_req.name]: + logger.warning( + "Requirement file [%s] contains %s, but " + "package %r is not installed", + req_file_path, + COMMENT_RE.sub("", line).strip(), + line_req.name, + ) + else: + req_files[line_req.name].append(req_file_path) + else: + yield str(installations[line_req_canonical_name]).rstrip() + del installations[line_req_canonical_name] + req_files[line_req.name].append(req_file_path) + + # Warn about requirements that were included multiple times (in a + # single requirements file or in different requirements files). + for name, files in req_files.items(): + if len(files) > 1: + logger.warning( + "Requirement %s included multiple times [%s]", + name, + ", ".join(sorted(set(files))), + ) + + yield ("## The following requirements were added by pip freeze:") + for installation in sorted(installations.values(), key=lambda x: x.name.lower()): + if installation.canonical_name not in skip: + yield str(installation).rstrip() + + +def _format_as_name_version(dist: BaseDistribution) -> str: + dist_version = dist.version + if isinstance(dist_version, Version): + return f"{dist.raw_name}=={dist_version}" + return f"{dist.raw_name}==={dist_version}" + + +def _get_editable_info(dist: BaseDistribution) -> _EditableInfo: + """ + Compute and return values (req, comments) for use in + FrozenRequirement.from_dist(). + """ + editable_project_location = dist.editable_project_location + assert editable_project_location + location = os.path.normcase(os.path.abspath(editable_project_location)) + + from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs + + vcs_backend = vcs.get_backend_for_dir(location) + + if vcs_backend is None: + display = _format_as_name_version(dist) + logger.debug( + 'No VCS found for editable requirement "%s" in: %r', + display, + location, + ) + return _EditableInfo( + requirement=location, + comments=[f"# Editable install with no version control ({display})"], + ) + + vcs_name = type(vcs_backend).__name__ + + try: + req = vcs_backend.get_src_requirement(location, dist.raw_name) + except RemoteNotFoundError: + display = _format_as_name_version(dist) + return _EditableInfo( + requirement=location, + comments=[f"# Editable {vcs_name} install with no remote ({display})"], + ) + except RemoteNotValidError as ex: + display = _format_as_name_version(dist) + return _EditableInfo( + requirement=location, + comments=[ + f"# Editable {vcs_name} install ({display}) with either a deleted " + f"local remote or invalid URI:", + f"# '{ex.url}'", + ], + ) + except BadCommand: + logger.warning( + "cannot determine version of editable source in %s " + "(%s command not found in path)", + location, + vcs_backend.name, + ) + return _EditableInfo(requirement=location, comments=[]) + except InstallationError as exc: + logger.warning("Error when trying to get requirement for VCS system %s", exc) + else: + return _EditableInfo(requirement=req, comments=[]) + + logger.warning("Could not determine repository location of %s", location) + + return _EditableInfo( + requirement=location, + comments=["## !! Could not determine repository location"], + ) + + +class FrozenRequirement: + def __init__( + self, + name: str, + req: str, + editable: bool, + comments: Iterable[str] = (), + ) -> None: + self.name = name + self.canonical_name = canonicalize_name(name) + self.req = req + self.editable = editable + self.comments = comments + + @classmethod + def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement": + editable = dist.editable + if editable: + req, comments = _get_editable_info(dist) + else: + comments = [] + direct_url = dist.direct_url + if direct_url: + # if PEP 610 metadata is present, use it + req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name) + else: + # name==version requirement + req = _format_as_name_version(dist) + + return cls(dist.raw_name, req, editable, comments=comments) + + def __str__(self) -> str: + req = self.req + if self.editable: + req = f"-e {req}" + return "\n".join(list(self.comments) + [str(req)]) + "\n" diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__init__.py new file mode 100644 index 0000000..24d6a5d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__init__.py @@ -0,0 +1,2 @@ +"""For modules related to installing packages. +""" diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1119bdb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-312.pyc new file mode 100644 index 0000000..1885771 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..4852af8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/editable_legacy.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/editable_legacy.py new file mode 100644 index 0000000..bebe24e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/editable_legacy.py @@ -0,0 +1,46 @@ +"""Legacy editable installation process, i.e. `setup.py develop`. +""" +import logging +from typing import Optional, Sequence + +from pip._internal.build_env import BuildEnvironment +from pip._internal.utils.logging import indent_log +from pip._internal.utils.setuptools_build import make_setuptools_develop_args +from pip._internal.utils.subprocess import call_subprocess + +logger = logging.getLogger(__name__) + + +def install_editable( + *, + global_options: Sequence[str], + prefix: Optional[str], + home: Optional[str], + use_user_site: bool, + name: str, + setup_py_path: str, + isolated: bool, + build_env: BuildEnvironment, + unpacked_source_directory: str, +) -> None: + """Install a package in editable mode. Most arguments are pass-through + to setuptools. + """ + logger.info("Running setup.py develop for %s", name) + + args = make_setuptools_develop_args( + setup_py_path, + global_options=global_options, + no_user_config=isolated, + prefix=prefix, + home=home, + use_user_site=use_user_site, + ) + + with indent_log(): + with build_env: + call_subprocess( + args, + command_desc="python setup.py develop", + cwd=unpacked_source_directory, + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/wheel.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/wheel.py new file mode 100644 index 0000000..f67180c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/install/wheel.py @@ -0,0 +1,734 @@ +"""Support for installing and building the "wheel" binary package format. +""" + +import collections +import compileall +import contextlib +import csv +import importlib +import logging +import os.path +import re +import shutil +import sys +import warnings +from base64 import urlsafe_b64encode +from email.message import Message +from itertools import chain, filterfalse, starmap +from typing import ( + IO, + TYPE_CHECKING, + Any, + BinaryIO, + Callable, + Dict, + Generator, + Iterable, + Iterator, + List, + NewType, + Optional, + Sequence, + Set, + Tuple, + Union, + cast, +) +from zipfile import ZipFile, ZipInfo + +from pip._vendor.distlib.scripts import ScriptMaker +from pip._vendor.distlib.util import get_export_entry +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InstallationError +from pip._internal.locations import get_major_minor_version +from pip._internal.metadata import ( + BaseDistribution, + FilesystemWheel, + get_wheel_distribution, +) +from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition +from pip._internal.utils.unpacking import ( + current_umask, + is_within_directory, + set_extracted_file_to_default_mode_plus_executable, + zip_item_is_executable, +) +from pip._internal.utils.wheel import parse_wheel + +if TYPE_CHECKING: + from typing import Protocol + + class File(Protocol): + src_record_path: "RecordPath" + dest_path: str + changed: bool + + def save(self) -> None: + pass + + +logger = logging.getLogger(__name__) + +RecordPath = NewType("RecordPath", str) +InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]] + + +def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]: + """Return (encoded_digest, length) for path using hashlib.sha256()""" + h, length = hash_file(path, blocksize) + digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=") + return (digest, str(length)) + + +def csv_io_kwargs(mode: str) -> Dict[str, Any]: + """Return keyword arguments to properly open a CSV file + in the given mode. + """ + return {"mode": mode, "newline": "", "encoding": "utf-8"} + + +def fix_script(path: str) -> bool: + """Replace #!python with #!/path/to/python + Return True if file was changed. + """ + # XXX RECORD hashes will need to be updated + assert os.path.isfile(path) + + with open(path, "rb") as script: + firstline = script.readline() + if not firstline.startswith(b"#!python"): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b"#!" + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, "wb") as script: + script.write(firstline) + script.write(rest) + return True + + +def wheel_root_is_purelib(metadata: Message) -> bool: + return metadata.get("Root-Is-Purelib", "").lower() == "true" + + +def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]: + console_scripts = {} + gui_scripts = {} + for entry_point in dist.iter_entry_points(): + if entry_point.group == "console_scripts": + console_scripts[entry_point.name] = entry_point.value + elif entry_point.group == "gui_scripts": + gui_scripts[entry_point.name] = entry_point.value + return console_scripts, gui_scripts + + +def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]: + """Determine if any scripts are not on PATH and format a warning. + Returns a warning message if one or more scripts are not on PATH, + otherwise None. + """ + if not scripts: + return None + + # Group scripts by the path they were installed in + grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set) + for destfile in scripts: + parent_dir = os.path.dirname(destfile) + script_name = os.path.basename(destfile) + grouped_by_dir[parent_dir].add(script_name) + + # We don't want to warn for directories that are on PATH. + not_warn_dirs = [ + os.path.normcase(os.path.normpath(i)).rstrip(os.sep) + for i in os.environ.get("PATH", "").split(os.pathsep) + ] + # If an executable sits with sys.executable, we don't warn for it. + # This covers the case of venv invocations without activating the venv. + not_warn_dirs.append( + os.path.normcase(os.path.normpath(os.path.dirname(sys.executable))) + ) + warn_for: Dict[str, Set[str]] = { + parent_dir: scripts + for parent_dir, scripts in grouped_by_dir.items() + if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs + } + if not warn_for: + return None + + # Format a message + msg_lines = [] + for parent_dir, dir_scripts in warn_for.items(): + sorted_scripts: List[str] = sorted(dir_scripts) + if len(sorted_scripts) == 1: + start_text = f"script {sorted_scripts[0]} is" + else: + start_text = "scripts {} are".format( + ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] + ) + + msg_lines.append( + f"The {start_text} installed in '{parent_dir}' which is not on PATH." + ) + + last_line_fmt = ( + "Consider adding {} to PATH or, if you prefer " + "to suppress this warning, use --no-warn-script-location." + ) + if len(msg_lines) == 1: + msg_lines.append(last_line_fmt.format("this directory")) + else: + msg_lines.append(last_line_fmt.format("these directories")) + + # Add a note if any directory starts with ~ + warn_for_tilde = any( + i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i + ) + if warn_for_tilde: + tilde_warning_msg = ( + "NOTE: The current PATH contains path(s) starting with `~`, " + "which may not be expanded by all applications." + ) + msg_lines.append(tilde_warning_msg) + + # Returns the formatted multiline message + return "\n".join(msg_lines) + + +def _normalized_outrows( + outrows: Iterable[InstalledCSVRow], +) -> List[Tuple[str, str, str]]: + """Normalize the given rows of a RECORD file. + + Items in each row are converted into str. Rows are then sorted to make + the value more predictable for tests. + + Each row is a 3-tuple (path, hash, size) and corresponds to a record of + a RECORD file (see PEP 376 and PEP 427 for details). For the rows + passed to this function, the size can be an integer as an int or string, + or the empty string. + """ + # Normally, there should only be one row per path, in which case the + # second and third elements don't come into play when sorting. + # However, in cases in the wild where a path might happen to occur twice, + # we don't want the sort operation to trigger an error (but still want + # determinism). Since the third element can be an int or string, we + # coerce each element to a string to avoid a TypeError in this case. + # For additional background, see-- + # https://github.com/pypa/pip/issues/5868 + return sorted( + (record_path, hash_, str(size)) for record_path, hash_, size in outrows + ) + + +def _record_to_fs_path(record_path: RecordPath, lib_dir: str) -> str: + return os.path.join(lib_dir, record_path) + + +def _fs_to_record_path(path: str, lib_dir: str) -> RecordPath: + # On Windows, do not handle relative paths if they belong to different + # logical disks + if os.path.splitdrive(path)[0].lower() == os.path.splitdrive(lib_dir)[0].lower(): + path = os.path.relpath(path, lib_dir) + + path = path.replace(os.path.sep, "/") + return cast("RecordPath", path) + + +def get_csv_rows_for_installed( + old_csv_rows: List[List[str]], + installed: Dict[RecordPath, RecordPath], + changed: Set[RecordPath], + generated: List[str], + lib_dir: str, +) -> List[InstalledCSVRow]: + """ + :param installed: A map from archive RECORD path to installation RECORD + path. + """ + installed_rows: List[InstalledCSVRow] = [] + for row in old_csv_rows: + if len(row) > 3: + logger.warning("RECORD line has more than three elements: %s", row) + old_record_path = cast("RecordPath", row[0]) + new_record_path = installed.pop(old_record_path, old_record_path) + if new_record_path in changed: + digest, length = rehash(_record_to_fs_path(new_record_path, lib_dir)) + else: + digest = row[1] if len(row) > 1 else "" + length = row[2] if len(row) > 2 else "" + installed_rows.append((new_record_path, digest, length)) + for f in generated: + path = _fs_to_record_path(f, lib_dir) + digest, length = rehash(f) + installed_rows.append((path, digest, length)) + return installed_rows + [ + (installed_record_path, "", "") for installed_record_path in installed.values() + ] + + +def get_console_script_specs(console: Dict[str, str]) -> List[str]: + """ + Given the mapping from entrypoint name to callable, return the relevant + console script specs. + """ + # Don't mutate caller's version + console = console.copy() + + scripts_to_generate = [] + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadata 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop("pip", None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append("pip = " + pip_script) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}") + + scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}") + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop("easy_install", None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append("easy_install = " + easy_install_script) + + scripts_to_generate.append( + f"easy_install-{get_major_minor_version()} = {easy_install_script}" + ) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console entry points specified in the wheel + scripts_to_generate.extend(starmap("{} = {}".format, console.items())) + + return scripts_to_generate + + +class ZipBackedFile: + def __init__( + self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile + ) -> None: + self.src_record_path = src_record_path + self.dest_path = dest_path + self._zip_file = zip_file + self.changed = False + + def _getinfo(self) -> ZipInfo: + return self._zip_file.getinfo(self.src_record_path) + + def save(self) -> None: + # directory creation is lazy and after file filtering + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + parent_dir = os.path.dirname(self.dest_path) + ensure_dir(parent_dir) + + # When we open the output file below, any existing file is truncated + # before we start writing the new contents. This is fine in most + # cases, but can cause a segfault if pip has loaded a shared + # object (e.g. from pyopenssl through its vendored urllib3) + # Since the shared object is mmap'd an attempt to call a + # symbol in it will then cause a segfault. Unlinking the file + # allows writing of new contents while allowing the process to + # continue to use the old copy. + if os.path.exists(self.dest_path): + os.unlink(self.dest_path) + + zipinfo = self._getinfo() + + with self._zip_file.open(zipinfo) as f: + with open(self.dest_path, "wb") as dest: + shutil.copyfileobj(f, dest) + + if zip_item_is_executable(zipinfo): + set_extracted_file_to_default_mode_plus_executable(self.dest_path) + + +class ScriptFile: + def __init__(self, file: "File") -> None: + self._file = file + self.src_record_path = self._file.src_record_path + self.dest_path = self._file.dest_path + self.changed = False + + def save(self) -> None: + self._file.save() + self.changed = fix_script(self.dest_path) + + +class MissingCallableSuffix(InstallationError): + def __init__(self, entry_point: str) -> None: + super().__init__( + f"Invalid script entry point: {entry_point} - A callable " + "suffix is required. Cf https://packaging.python.org/" + "specifications/entry-points/#use-for-scripts for more " + "information." + ) + + +def _raise_for_invalid_entrypoint(specification: str) -> None: + entry = get_export_entry(specification) + if entry is not None and entry.suffix is None: + raise MissingCallableSuffix(str(entry)) + + +class PipScriptMaker(ScriptMaker): + def make( + self, specification: str, options: Optional[Dict[str, Any]] = None + ) -> List[str]: + _raise_for_invalid_entrypoint(specification) + return super().make(specification, options) + + +def _install_wheel( + name: str, + wheel_zip: ZipFile, + wheel_path: str, + scheme: Scheme, + pycompile: bool = True, + warn_script_location: bool = True, + direct_url: Optional[DirectUrl] = None, + requested: bool = False, +) -> None: + """Install a wheel. + + :param name: Name of the project to install + :param wheel_zip: open ZipFile for wheel being installed + :param scheme: Distutils scheme dictating the install directories + :param req_description: String used in place of the requirement, for + logging + :param pycompile: Whether to byte-compile installed Python files + :param warn_script_location: Whether to check that scripts are installed + into a directory on PATH + :raises UnsupportedWheel: + * when the directory holds an unpacked wheel with incompatible + Wheel-Version + * when the .dist-info dir does not match the wheel + """ + info_dir, metadata = parse_wheel(wheel_zip, name) + + if wheel_root_is_purelib(metadata): + lib_dir = scheme.purelib + else: + lib_dir = scheme.platlib + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed: Dict[RecordPath, RecordPath] = {} + changed: Set[RecordPath] = set() + generated: List[str] = [] + + def record_installed( + srcfile: RecordPath, destfile: str, modified: bool = False + ) -> None: + """Map archive RECORD paths to installation RECORD paths.""" + newpath = _fs_to_record_path(destfile, lib_dir) + installed[srcfile] = newpath + if modified: + changed.add(newpath) + + def is_dir_path(path: RecordPath) -> bool: + return path.endswith("/") + + def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None: + if not is_within_directory(dest_dir_path, target_path): + message = ( + "The wheel {!r} has a file {!r} trying to install" + " outside the target directory {!r}" + ) + raise InstallationError( + message.format(wheel_path, target_path, dest_dir_path) + ) + + def root_scheme_file_maker( + zip_file: ZipFile, dest: str + ) -> Callable[[RecordPath], "File"]: + def make_root_scheme_file(record_path: RecordPath) -> "File": + normed_path = os.path.normpath(record_path) + dest_path = os.path.join(dest, normed_path) + assert_no_path_traversal(dest, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_root_scheme_file + + def data_scheme_file_maker( + zip_file: ZipFile, scheme: Scheme + ) -> Callable[[RecordPath], "File"]: + scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS} + + def make_data_scheme_file(record_path: RecordPath) -> "File": + normed_path = os.path.normpath(record_path) + try: + _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) + except ValueError: + message = ( + "Unexpected file in {}: {!r}. .data directory contents" + " should be named like: '/'." + ).format(wheel_path, record_path) + raise InstallationError(message) + + try: + scheme_path = scheme_paths[scheme_key] + except KeyError: + valid_scheme_keys = ", ".join(sorted(scheme_paths)) + message = ( + "Unknown scheme key used in {}: {} (for file {!r}). .data" + " directory contents should be in subdirectories named" + " with a valid scheme key ({})" + ).format(wheel_path, scheme_key, record_path, valid_scheme_keys) + raise InstallationError(message) + + dest_path = os.path.join(scheme_path, dest_subpath) + assert_no_path_traversal(scheme_path, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_data_scheme_file + + def is_data_scheme_path(path: RecordPath) -> bool: + return path.split("/", 1)[0].endswith(".data") + + paths = cast(List[RecordPath], wheel_zip.namelist()) + file_paths = filterfalse(is_dir_path, paths) + root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths) + + make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir) + files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths) + + def is_script_scheme_path(path: RecordPath) -> bool: + parts = path.split("/", 2) + return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts" + + other_scheme_paths, script_scheme_paths = partition( + is_script_scheme_path, data_scheme_paths + ) + + make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme) + other_scheme_files = map(make_data_scheme_file, other_scheme_paths) + files = chain(files, other_scheme_files) + + # Get the defined entry points + distribution = get_wheel_distribution( + FilesystemWheel(wheel_path), + canonicalize_name(name), + ) + console, gui = get_entrypoints(distribution) + + def is_entrypoint_wrapper(file: "File") -> bool: + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + path = file.dest_path + name = os.path.basename(path) + if name.lower().endswith(".exe"): + matchname = name[:-4] + elif name.lower().endswith("-script.py"): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return matchname in console or matchname in gui + + script_scheme_files: Iterator[File] = map( + make_data_scheme_file, script_scheme_paths + ) + script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files) + script_scheme_files = map(ScriptFile, script_scheme_files) + files = chain(files, script_scheme_files) + + for file in files: + file.save() + record_installed(file.src_record_path, file.dest_path, file.changed) + + def pyc_source_file_paths() -> Generator[str, None, None]: + # We de-duplicate installation paths, since there can be overlap (e.g. + # file in .data maps to same location as file in wheel root). + # Sorting installation paths makes it easier to reproduce and debug + # issues related to permissions on existing files. + for installed_path in sorted(set(installed.values())): + full_installed_path = os.path.join(lib_dir, installed_path) + if not os.path.isfile(full_installed_path): + continue + if not full_installed_path.endswith(".py"): + continue + yield full_installed_path + + def pyc_output_path(path: str) -> str: + """Return the path the pyc file would have been written to.""" + return importlib.util.cache_from_source(path) + + # Compile all of the pyc files for the installed files + if pycompile: + with captured_stdout() as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + for path in pyc_source_file_paths(): + success = compileall.compile_file(path, force=True, quiet=True) + if success: + pyc_path = pyc_output_path(path) + assert os.path.exists(pyc_path) + pyc_record_path = cast( + "RecordPath", pyc_path.replace(os.path.sep, "/") + ) + record_installed(pyc_record_path, pyc_path) + logger.debug(stdout.getvalue()) + + maker = PipScriptMaker(None, scheme.scripts) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = {""} + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + # Generate the console and GUI entry points specified in the wheel + scripts_to_generate = get_console_script_specs(console) + + gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items())) + + generated_console_scripts = maker.make_multiple(scripts_to_generate) + generated.extend(generated_console_scripts) + + generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True})) + + if warn_script_location: + msg = message_about_scripts_not_on_PATH(generated_console_scripts) + if msg is not None: + logger.warning(msg) + + generated_file_mode = 0o666 & ~current_umask() + + @contextlib.contextmanager + def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]: + with adjacent_tmp_file(path, **kwargs) as f: + yield f + os.chmod(f.name, generated_file_mode) + replace(f.name, path) + + dest_info_dir = os.path.join(lib_dir, info_dir) + + # Record pip as the installer + installer_path = os.path.join(dest_info_dir, "INSTALLER") + with _generate_file(installer_path) as installer_file: + installer_file.write(b"pip\n") + generated.append(installer_path) + + # Record the PEP 610 direct URL reference + if direct_url is not None: + direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) + with _generate_file(direct_url_path) as direct_url_file: + direct_url_file.write(direct_url.to_json().encode("utf-8")) + generated.append(direct_url_path) + + # Record the REQUESTED file + if requested: + requested_path = os.path.join(dest_info_dir, "REQUESTED") + with open(requested_path, "wb"): + pass + generated.append(requested_path) + + record_text = distribution.read_text("RECORD") + record_rows = list(csv.reader(record_text.splitlines())) + + rows = get_csv_rows_for_installed( + record_rows, + installed=installed, + changed=changed, + generated=generated, + lib_dir=lib_dir, + ) + + # Record details of all files installed + record_path = os.path.join(dest_info_dir, "RECORD") + + with _generate_file(record_path, **csv_io_kwargs("w")) as record_file: + # Explicitly cast to typing.IO[str] as a workaround for the mypy error: + # "writer" has incompatible type "BinaryIO"; expected "_Writer" + writer = csv.writer(cast("IO[str]", record_file)) + writer.writerows(_normalized_outrows(rows)) + + +@contextlib.contextmanager +def req_error_context(req_description: str) -> Generator[None, None, None]: + try: + yield + except InstallationError as e: + message = f"For req: {req_description}. {e.args[0]}" + raise InstallationError(message) from e + + +def install_wheel( + name: str, + wheel_path: str, + scheme: Scheme, + req_description: str, + pycompile: bool = True, + warn_script_location: bool = True, + direct_url: Optional[DirectUrl] = None, + requested: bool = False, +) -> None: + with ZipFile(wheel_path, allowZip64=True) as z: + with req_error_context(req_description): + _install_wheel( + name=name, + wheel_zip=z, + wheel_path=wheel_path, + scheme=scheme, + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=direct_url, + requested=requested, + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/operations/prepare.py b/myenv/lib/python3.12/site-packages/pip/_internal/operations/prepare.py new file mode 100644 index 0000000..956717d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/operations/prepare.py @@ -0,0 +1,730 @@ +"""Prepares a distribution for installation +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import mimetypes +import os +import shutil +from pathlib import Path +from typing import Dict, Iterable, List, Optional + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.distributions import make_distribution_for_install_requirement +from pip._internal.distributions.installed import InstalledDistribution +from pip._internal.exceptions import ( + DirectoryUrlHashUnsupported, + HashMismatch, + HashUnpinned, + InstallationError, + MetadataInconsistent, + NetworkConnectionError, + VcsHashUnsupported, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution, get_metadata_distribution +from pip._internal.models.direct_url import ArchiveInfo +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.network.download import BatchDownloader, Downloader +from pip._internal.network.lazy_wheel import ( + HTTPRangeRequestUnsupported, + dist_from_wheel_url, +) +from pip._internal.network.session import PipSession +from pip._internal.operations.build.build_tracker import BuildTracker +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils._log import getLogger +from pip._internal.utils.direct_url_helpers import ( + direct_url_for_editable, + direct_url_from_link, +) +from pip._internal.utils.hashes import Hashes, MissingHashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ( + display_path, + hash_file, + hide_url, + redact_auth_from_requirement, +) +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.unpacking import unpack_file +from pip._internal.vcs import vcs + +logger = getLogger(__name__) + + +def _get_prepared_distribution( + req: InstallRequirement, + build_tracker: BuildTracker, + finder: PackageFinder, + build_isolation: bool, + check_build_deps: bool, +) -> BaseDistribution: + """Prepare a distribution for installation.""" + abstract_dist = make_distribution_for_install_requirement(req) + tracker_id = abstract_dist.build_tracker_id + if tracker_id is not None: + with build_tracker.track(req, tracker_id): + abstract_dist.prepare_distribution_metadata( + finder, build_isolation, check_build_deps + ) + return abstract_dist.get_metadata_distribution() + + +def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None: + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend is not None + vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity) + + +class File: + def __init__(self, path: str, content_type: Optional[str]) -> None: + self.path = path + if content_type is None: + self.content_type = mimetypes.guess_type(path)[0] + else: + self.content_type = content_type + + +def get_http_url( + link: Link, + download: Downloader, + download_dir: Optional[str] = None, + hashes: Optional[Hashes] = None, +) -> File: + temp_dir = TempDirectory(kind="unpack", globally_managed=True) + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, download_dir, hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + content_type = None + else: + # let's download to a tmp dir + from_path, content_type = download(link, temp_dir.path) + if hashes: + hashes.check_against_path(from_path) + + return File(from_path, content_type) + + +def get_file_url( + link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None +) -> File: + """Get file and optionally check its hash.""" + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, download_dir, hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link.file_path + + # If --require-hashes is off, `hashes` is either empty, the + # link's embedded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(from_path) + return File(from_path, None) + + +def unpack_url( + link: Link, + location: str, + download: Downloader, + verbosity: int, + download_dir: Optional[str] = None, + hashes: Optional[Hashes] = None, +) -> Optional[File]: + """Unpack link into location, downloading if required. + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if link.is_vcs: + unpack_vcs_link(link, location, verbosity=verbosity) + return None + + assert not link.is_existing_dir() + + # file urls + if link.is_file: + file = get_file_url(link, download_dir, hashes=hashes) + + # http urls + else: + file = get_http_url( + link, + download, + download_dir, + hashes=hashes, + ) + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies, except wheels + if not link.is_wheel: + unpack_file(file.path, location, file.content_type) + + return file + + +def _check_download_dir( + link: Link, + download_dir: str, + hashes: Optional[Hashes], + warn_on_hash_mismatch: bool = True, +) -> Optional[str]: + """Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + + if not os.path.exists(download_path): + return None + + # If already downloaded, does its hash match? + logger.info("File was already downloaded %s", download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + if warn_on_hash_mismatch: + logger.warning( + "Previously-downloaded file %s has bad hash. Re-downloading.", + download_path, + ) + os.unlink(download_path) + return None + return download_path + + +class RequirementPreparer: + """Prepares a Requirement""" + + def __init__( + self, + build_dir: str, + download_dir: Optional[str], + src_dir: str, + build_isolation: bool, + check_build_deps: bool, + build_tracker: BuildTracker, + session: PipSession, + progress_bar: str, + finder: PackageFinder, + require_hashes: bool, + use_user_site: bool, + lazy_wheel: bool, + verbosity: int, + legacy_resolver: bool, + ) -> None: + super().__init__() + + self.src_dir = src_dir + self.build_dir = build_dir + self.build_tracker = build_tracker + self._session = session + self._download = Downloader(session, progress_bar) + self._batch_download = BatchDownloader(session, progress_bar) + self.finder = finder + + # Where still-packed archives should be written to. If None, they are + # not saved, and are deleted immediately after unpacking. + self.download_dir = download_dir + + # Is build isolation allowed? + self.build_isolation = build_isolation + + # Should check build dependencies? + self.check_build_deps = check_build_deps + + # Should hash-checking be required? + self.require_hashes = require_hashes + + # Should install in user site-packages? + self.use_user_site = use_user_site + + # Should wheels be downloaded lazily? + self.use_lazy_wheel = lazy_wheel + + # How verbose should underlying tooling be? + self.verbosity = verbosity + + # Are we using the legacy resolver? + self.legacy_resolver = legacy_resolver + + # Memoized downloaded files, as mapping of url: path. + self._downloaded: Dict[str, str] = {} + + # Previous "header" printed for a link-based InstallRequirement + self._previous_requirement_header = ("", "") + + def _log_preparing_link(self, req: InstallRequirement) -> None: + """Provide context for the requirement being prepared.""" + if req.link.is_file and not req.is_wheel_from_cache: + message = "Processing %s" + information = str(display_path(req.link.file_path)) + else: + message = "Collecting %s" + information = redact_auth_from_requirement(req.req) if req.req else str(req) + + # If we used req.req, inject requirement source if available (this + # would already be included if we used req directly) + if req.req and req.comes_from: + if isinstance(req.comes_from, str): + comes_from: Optional[str] = req.comes_from + else: + comes_from = req.comes_from.from_path() + if comes_from: + information += f" (from {comes_from})" + + if (message, information) != self._previous_requirement_header: + self._previous_requirement_header = (message, information) + logger.info(message, information) + + if req.is_wheel_from_cache: + with indent_log(): + logger.info("Using cached %s", req.link.filename) + + def _ensure_link_req_src_dir( + self, req: InstallRequirement, parallel_builds: bool + ) -> None: + """Ensure source_dir of a linked InstallRequirement.""" + # Since source_dir is only set for editable requirements. + if req.link.is_wheel: + # We don't need to unpack wheels, so no need for a source + # directory. + return + assert req.source_dir is None + if req.link.is_existing_dir(): + # build local directories in-tree + req.source_dir = req.link.file_path + return + + # We always delete unpacked sdists after pip runs. + req.ensure_has_source_dir( + self.build_dir, + autodelete=True, + parallel_builds=parallel_builds, + ) + req.ensure_pristine_source_checkout() + + def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: + # By the time this is called, the requirement's link should have + # been checked so we can tell what kind of requirements req is + # and raise some more informative errors than otherwise. + # (For example, we can raise VcsHashUnsupported for a VCS URL + # rather than HashMissing.) + if not self.require_hashes: + return req.hashes(trust_internet=True) + + # We could check these first 2 conditions inside unpack_url + # and save repetition of conditions, but then we would + # report less-useful error messages for unhashable + # requirements, complaining that there's no hash provided. + if req.link.is_vcs: + raise VcsHashUnsupported() + if req.link.is_existing_dir(): + raise DirectoryUrlHashUnsupported() + + # Unpinned packages are asking for trouble when a new version + # is uploaded. This isn't a security check, but it saves users + # a surprising hash mismatch in the future. + # file:/// URLs aren't pinnable, so don't complain about them + # not being pinned. + if not req.is_direct and not req.is_pinned: + raise HashUnpinned() + + # If known-good hashes are missing for this requirement, + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + return req.hashes(trust_internet=False) or MissingHashes() + + def _fetch_metadata_only( + self, + req: InstallRequirement, + ) -> Optional[BaseDistribution]: + if self.legacy_resolver: + logger.debug( + "Metadata-only fetching is not used in the legacy resolver", + ) + return None + if self.require_hashes: + logger.debug( + "Metadata-only fetching is not used as hash checking is required", + ) + return None + # Try PEP 658 metadata first, then fall back to lazy wheel if unavailable. + return self._fetch_metadata_using_link_data_attr( + req + ) or self._fetch_metadata_using_lazy_wheel(req.link) + + def _fetch_metadata_using_link_data_attr( + self, + req: InstallRequirement, + ) -> Optional[BaseDistribution]: + """Fetch metadata from the data-dist-info-metadata attribute, if possible.""" + # (1) Get the link to the metadata file, if provided by the backend. + metadata_link = req.link.metadata_link() + if metadata_link is None: + return None + assert req.req is not None + logger.verbose( + "Obtaining dependency information for %s from %s", + req.req, + metadata_link, + ) + # (2) Download the contents of the METADATA file, separate from the dist itself. + metadata_file = get_http_url( + metadata_link, + self._download, + hashes=metadata_link.as_hashes(), + ) + with open(metadata_file.path, "rb") as f: + metadata_contents = f.read() + # (3) Generate a dist just from those file contents. + metadata_dist = get_metadata_distribution( + metadata_contents, + req.link.filename, + req.req.name, + ) + # (4) Ensure the Name: field from the METADATA file matches the name from the + # install requirement. + # + # NB: raw_name will fall back to the name from the install requirement if + # the Name: field is not present, but it's noted in the raw_name docstring + # that that should NEVER happen anyway. + if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name): + raise MetadataInconsistent( + req, "Name", req.req.name, metadata_dist.raw_name + ) + return metadata_dist + + def _fetch_metadata_using_lazy_wheel( + self, + link: Link, + ) -> Optional[BaseDistribution]: + """Fetch metadata using lazy wheel, if possible.""" + # --use-feature=fast-deps must be provided. + if not self.use_lazy_wheel: + return None + if link.is_file or not link.is_wheel: + logger.debug( + "Lazy wheel is not used as %r does not point to a remote wheel", + link, + ) + return None + + wheel = Wheel(link.filename) + name = canonicalize_name(wheel.name) + logger.info( + "Obtaining dependency information from %s %s", + name, + wheel.version, + ) + url = link.url.split("#", 1)[0] + try: + return dist_from_wheel_url(name, url, self._session) + except HTTPRangeRequestUnsupported: + logger.debug("%s does not support range requests", url) + return None + + def _complete_partial_requirements( + self, + partially_downloaded_reqs: Iterable[InstallRequirement], + parallel_builds: bool = False, + ) -> None: + """Download any requirements which were only fetched by metadata.""" + # Download to a temporary directory. These will be copied over as + # needed for downstream 'download', 'wheel', and 'install' commands. + temp_dir = TempDirectory(kind="unpack", globally_managed=True).path + + # Map each link to the requirement that owns it. This allows us to set + # `req.local_file_path` on the appropriate requirement after passing + # all the links at once into BatchDownloader. + links_to_fully_download: Dict[Link, InstallRequirement] = {} + for req in partially_downloaded_reqs: + assert req.link + links_to_fully_download[req.link] = req + + batch_download = self._batch_download( + links_to_fully_download.keys(), + temp_dir, + ) + for link, (filepath, _) in batch_download: + logger.debug("Downloading link %s to %s", link, filepath) + req = links_to_fully_download[link] + # Record the downloaded file path so wheel reqs can extract a Distribution + # in .get_dist(). + req.local_file_path = filepath + # Record that the file is downloaded so we don't do it again in + # _prepare_linked_requirement(). + self._downloaded[req.link.url] = filepath + + # If this is an sdist, we need to unpack it after downloading, but the + # .source_dir won't be set up until we are in _prepare_linked_requirement(). + # Add the downloaded archive to the install requirement to unpack after + # preparing the source dir. + if not req.is_wheel: + req.needs_unpacked_archive(Path(filepath)) + + # This step is necessary to ensure all lazy wheels are processed + # successfully by the 'download', 'wheel', and 'install' commands. + for req in partially_downloaded_reqs: + self._prepare_linked_requirement(req, parallel_builds) + + def prepare_linked_requirement( + self, req: InstallRequirement, parallel_builds: bool = False + ) -> BaseDistribution: + """Prepare a requirement to be obtained from req.link.""" + assert req.link + self._log_preparing_link(req) + with indent_log(): + # Check if the relevant file is already available + # in the download directory + file_path = None + if self.download_dir is not None and req.link.is_wheel: + hashes = self._get_linked_req_hashes(req) + file_path = _check_download_dir( + req.link, + self.download_dir, + hashes, + # When a locally built wheel has been found in cache, we don't warn + # about re-downloading when the already downloaded wheel hash does + # not match. This is because the hash must be checked against the + # original link, not the cached link. It that case the already + # downloaded file will be removed and re-fetched from cache (which + # implies a hash check against the cache entry's origin.json). + warn_on_hash_mismatch=not req.is_wheel_from_cache, + ) + + if file_path is not None: + # The file is already available, so mark it as downloaded + self._downloaded[req.link.url] = file_path + else: + # The file is not available, attempt to fetch only metadata + metadata_dist = self._fetch_metadata_only(req) + if metadata_dist is not None: + req.needs_more_preparation = True + return metadata_dist + + # None of the optimizations worked, fully prepare the requirement + return self._prepare_linked_requirement(req, parallel_builds) + + def prepare_linked_requirements_more( + self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False + ) -> None: + """Prepare linked requirements more, if needed.""" + reqs = [req for req in reqs if req.needs_more_preparation] + for req in reqs: + # Determine if any of these requirements were already downloaded. + if self.download_dir is not None and req.link.is_wheel: + hashes = self._get_linked_req_hashes(req) + file_path = _check_download_dir(req.link, self.download_dir, hashes) + if file_path is not None: + self._downloaded[req.link.url] = file_path + req.needs_more_preparation = False + + # Prepare requirements we found were already downloaded for some + # reason. The other downloads will be completed separately. + partially_downloaded_reqs: List[InstallRequirement] = [] + for req in reqs: + if req.needs_more_preparation: + partially_downloaded_reqs.append(req) + else: + self._prepare_linked_requirement(req, parallel_builds) + + # TODO: separate this part out from RequirementPreparer when the v1 + # resolver can be removed! + self._complete_partial_requirements( + partially_downloaded_reqs, + parallel_builds=parallel_builds, + ) + + def _prepare_linked_requirement( + self, req: InstallRequirement, parallel_builds: bool + ) -> BaseDistribution: + assert req.link + link = req.link + + hashes = self._get_linked_req_hashes(req) + + if hashes and req.is_wheel_from_cache: + assert req.download_info is not None + assert link.is_wheel + assert link.is_file + # We need to verify hashes, and we have found the requirement in the cache + # of locally built wheels. + if ( + isinstance(req.download_info.info, ArchiveInfo) + and req.download_info.info.hashes + and hashes.has_one_of(req.download_info.info.hashes) + ): + # At this point we know the requirement was built from a hashable source + # artifact, and we verified that the cache entry's hash of the original + # artifact matches one of the hashes we expect. We don't verify hashes + # against the cached wheel, because the wheel is not the original. + hashes = None + else: + logger.warning( + "The hashes of the source archive found in cache entry " + "don't match, ignoring cached built wheel " + "and re-downloading source." + ) + req.link = req.cached_wheel_source_link + link = req.link + + self._ensure_link_req_src_dir(req, parallel_builds) + + if link.is_existing_dir(): + local_file = None + elif link.url not in self._downloaded: + try: + local_file = unpack_url( + link, + req.source_dir, + self._download, + self.verbosity, + self.download_dir, + hashes, + ) + except NetworkConnectionError as exc: + raise InstallationError( + f"Could not install requirement {req} because of HTTP " + f"error {exc} for URL {link}" + ) + else: + file_path = self._downloaded[link.url] + if hashes: + hashes.check_against_path(file_path) + local_file = File(file_path, content_type=None) + + # If download_info is set, we got it from the wheel cache. + if req.download_info is None: + # Editables don't go through this function (see + # prepare_editable_requirement). + assert not req.editable + req.download_info = direct_url_from_link(link, req.source_dir) + # Make sure we have a hash in download_info. If we got it as part of the + # URL, it will have been verified and we can rely on it. Otherwise we + # compute it from the downloaded file. + # FIXME: https://github.com/pypa/pip/issues/11943 + if ( + isinstance(req.download_info.info, ArchiveInfo) + and not req.download_info.info.hashes + and local_file + ): + hash = hash_file(local_file.path)[0].hexdigest() + # We populate info.hash for backward compatibility. + # This will automatically populate info.hashes. + req.download_info.info.hash = f"sha256={hash}" + + # For use in later processing, + # preserve the file path on the requirement. + if local_file: + req.local_file_path = local_file.path + + dist = _get_prepared_distribution( + req, + self.build_tracker, + self.finder, + self.build_isolation, + self.check_build_deps, + ) + return dist + + def save_linked_requirement(self, req: InstallRequirement) -> None: + assert self.download_dir is not None + assert req.link is not None + link = req.link + if link.is_vcs or (link.is_existing_dir() and req.editable): + # Make a .zip of the source_dir we already created. + req.archive(self.download_dir) + return + + if link.is_existing_dir(): + logger.debug( + "Not copying link to destination directory " + "since it is a directory: %s", + link, + ) + return + if req.local_file_path is None: + # No distribution was downloaded for this requirement. + return + + download_location = os.path.join(self.download_dir, link.filename) + if not os.path.exists(download_location): + shutil.copy(req.local_file_path, download_location) + download_path = display_path(download_location) + logger.info("Saved %s", download_path) + + def prepare_editable_requirement( + self, + req: InstallRequirement, + ) -> BaseDistribution: + """Prepare an editable requirement.""" + assert req.editable, "cannot prepare a non-editable req as editable" + + logger.info("Obtaining %s", req) + + with indent_log(): + if self.require_hashes: + raise InstallationError( + f"The editable requirement {req} cannot be installed when " + "requiring hashes, because there is no single file to " + "hash." + ) + req.ensure_has_source_dir(self.src_dir) + req.update_editable() + assert req.source_dir + req.download_info = direct_url_for_editable(req.unpacked_source_directory) + + dist = _get_prepared_distribution( + req, + self.build_tracker, + self.finder, + self.build_isolation, + self.check_build_deps, + ) + + req.check_if_exists(self.use_user_site) + + return dist + + def prepare_installed_requirement( + self, + req: InstallRequirement, + skip_reason: str, + ) -> BaseDistribution: + """Prepare an already-installed requirement.""" + assert req.satisfied_by, "req should have been satisfied but isn't" + assert skip_reason is not None, ( + "did not get skip reason skipped but req.satisfied_by " + f"is set to {req.satisfied_by}" + ) + logger.info( + "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version + ) + with indent_log(): + if self.require_hashes: + logger.debug( + "Since it is already installed, we are trusting this " + "package without checking its hash. To ensure a " + "completely repeatable environment, install into an " + "empty virtualenv." + ) + return InstalledDistribution(req).get_metadata_distribution() diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/pyproject.py b/myenv/lib/python3.12/site-packages/pip/_internal/pyproject.py new file mode 100644 index 0000000..8de36b8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/pyproject.py @@ -0,0 +1,179 @@ +import importlib.util +import os +from collections import namedtuple +from typing import Any, List, Optional + +from pip._vendor import tomli +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement + +from pip._internal.exceptions import ( + InstallationError, + InvalidPyProjectBuildRequires, + MissingPyProjectBuildRequires, +) + + +def _is_list_of_str(obj: Any) -> bool: + return isinstance(obj, list) and all(isinstance(item, str) for item in obj) + + +def make_pyproject_path(unpacked_source_directory: str) -> str: + return os.path.join(unpacked_source_directory, "pyproject.toml") + + +BuildSystemDetails = namedtuple( + "BuildSystemDetails", ["requires", "backend", "check", "backend_path"] +) + + +def load_pyproject_toml( + use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str +) -> Optional[BuildSystemDetails]: + """Load the pyproject.toml file. + + Parameters: + use_pep517 - Has the user requested PEP 517 processing? None + means the user hasn't explicitly specified. + pyproject_toml - Location of the project's pyproject.toml file + setup_py - Location of the project's setup.py file + req_name - The name of the requirement we're processing (for + error reporting) + + Returns: + None if we should use the legacy code path, otherwise a tuple + ( + requirements from pyproject.toml, + name of PEP 517 backend, + requirements we should check are installed after setting + up the build environment + directory paths to import the backend from (backend-path), + relative to the project root. + ) + """ + has_pyproject = os.path.isfile(pyproject_toml) + has_setup = os.path.isfile(setup_py) + + if not has_pyproject and not has_setup: + raise InstallationError( + f"{req_name} does not appear to be a Python project: " + f"neither 'setup.py' nor 'pyproject.toml' found." + ) + + if has_pyproject: + with open(pyproject_toml, encoding="utf-8") as f: + pp_toml = tomli.loads(f.read()) + build_system = pp_toml.get("build-system") + else: + build_system = None + + # The following cases must use PEP 517 + # We check for use_pep517 being non-None and falsey because that means + # the user explicitly requested --no-use-pep517. The value 0 as + # opposed to False can occur when the value is provided via an + # environment variable or config file option (due to the quirk of + # strtobool() returning an integer in pip's configuration code). + if has_pyproject and not has_setup: + if use_pep517 is not None and not use_pep517: + raise InstallationError( + "Disabling PEP 517 processing is invalid: " + "project does not have a setup.py" + ) + use_pep517 = True + elif build_system and "build-backend" in build_system: + if use_pep517 is not None and not use_pep517: + raise InstallationError( + "Disabling PEP 517 processing is invalid: " + "project specifies a build backend of {} " + "in pyproject.toml".format(build_system["build-backend"]) + ) + use_pep517 = True + + # If we haven't worked out whether to use PEP 517 yet, + # and the user hasn't explicitly stated a preference, + # we do so if the project has a pyproject.toml file + # or if we cannot import setuptools or wheels. + + # We fallback to PEP 517 when without setuptools or without the wheel package, + # so setuptools can be installed as a default build backend. + # For more info see: + # https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9 + # https://github.com/pypa/pip/issues/8559 + elif use_pep517 is None: + use_pep517 = ( + has_pyproject + or not importlib.util.find_spec("setuptools") + or not importlib.util.find_spec("wheel") + ) + + # At this point, we know whether we're going to use PEP 517. + assert use_pep517 is not None + + # If we're using the legacy code path, there is nothing further + # for us to do here. + if not use_pep517: + return None + + if build_system is None: + # Either the user has a pyproject.toml with no build-system + # section, or the user has no pyproject.toml, but has opted in + # explicitly via --use-pep517. + # In the absence of any explicit backend specification, we + # assume the setuptools backend that most closely emulates the + # traditional direct setup.py execution, and require wheel and + # a version of setuptools that supports that backend. + + build_system = { + "requires": ["setuptools>=40.8.0"], + "build-backend": "setuptools.build_meta:__legacy__", + } + + # If we're using PEP 517, we have build system information (either + # from pyproject.toml, or defaulted by the code above). + # Note that at this point, we do not know if the user has actually + # specified a backend, though. + assert build_system is not None + + # Ensure that the build-system section in pyproject.toml conforms + # to PEP 518. + + # Specifying the build-system table but not the requires key is invalid + if "requires" not in build_system: + raise MissingPyProjectBuildRequires(package=req_name) + + # Error out if requires is not a list of strings + requires = build_system["requires"] + if not _is_list_of_str(requires): + raise InvalidPyProjectBuildRequires( + package=req_name, + reason="It is not a list of strings.", + ) + + # Each requirement must be valid as per PEP 508 + for requirement in requires: + try: + Requirement(requirement) + except InvalidRequirement as error: + raise InvalidPyProjectBuildRequires( + package=req_name, + reason=f"It contains an invalid requirement: {requirement!r}", + ) from error + + backend = build_system.get("build-backend") + backend_path = build_system.get("backend-path", []) + check: List[str] = [] + if backend is None: + # If the user didn't specify a backend, we assume they want to use + # the setuptools backend. But we can't be sure they have included + # a version of setuptools which supplies the backend. So we + # make a note to check that this requirement is present once + # we have set up the environment. + # This is quite a lot of work to check for a very specific case. But + # the problem is, that case is potentially quite common - projects that + # adopted PEP 518 early for the ability to specify requirements to + # execute setup.py, but never considered needing to mention the build + # tools themselves. The original PEP 518 code had a similar check (but + # implemented in a different way). + backend = "setuptools.build_meta:__legacy__" + check = ["setuptools>=40.8.0"] + + return BuildSystemDetails(requires, backend, check, backend_path) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/req/__init__.py new file mode 100644 index 0000000..16de903 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/req/__init__.py @@ -0,0 +1,92 @@ +import collections +import logging +from typing import Generator, List, Optional, Sequence, Tuple + +from pip._internal.utils.logging import indent_log + +from .req_file import parse_requirements +from .req_install import InstallRequirement +from .req_set import RequirementSet + +__all__ = [ + "RequirementSet", + "InstallRequirement", + "parse_requirements", + "install_given_reqs", +] + +logger = logging.getLogger(__name__) + + +class InstallationResult: + def __init__(self, name: str) -> None: + self.name = name + + def __repr__(self) -> str: + return f"InstallationResult(name={self.name!r})" + + +def _validate_requirements( + requirements: List[InstallRequirement], +) -> Generator[Tuple[str, InstallRequirement], None, None]: + for req in requirements: + assert req.name, f"invalid to-be-installed requirement: {req}" + yield req.name, req + + +def install_given_reqs( + requirements: List[InstallRequirement], + global_options: Sequence[str], + root: Optional[str], + home: Optional[str], + prefix: Optional[str], + warn_script_location: bool, + use_user_site: bool, + pycompile: bool, +) -> List[InstallationResult]: + """ + Install everything in the given list. + + (to be called after having downloaded and unpacked the packages) + """ + to_install = collections.OrderedDict(_validate_requirements(requirements)) + + if to_install: + logger.info( + "Installing collected packages: %s", + ", ".join(to_install.keys()), + ) + + installed = [] + + with indent_log(): + for req_name, requirement in to_install.items(): + if requirement.should_reinstall: + logger.info("Attempting uninstall: %s", req_name) + with indent_log(): + uninstalled_pathset = requirement.uninstall(auto_confirm=True) + else: + uninstalled_pathset = None + + try: + requirement.install( + global_options, + root=root, + home=home, + prefix=prefix, + warn_script_location=warn_script_location, + use_user_site=use_user_site, + pycompile=pycompile, + ) + except Exception: + # if install did not succeed, rollback previous uninstall + if uninstalled_pathset and not requirement.install_succeeded: + uninstalled_pathset.rollback() + raise + else: + if uninstalled_pathset and requirement.install_succeeded: + uninstalled_pathset.commit() + + installed.append(InstallationResult(req_name)) + + return installed diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..23dec75 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/constructors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/constructors.cpython-312.pyc new file mode 100644 index 0000000..5a2886c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/constructors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_file.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_file.cpython-312.pyc new file mode 100644 index 0000000..b7a158e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_file.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_install.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_install.cpython-312.pyc new file mode 100644 index 0000000..a9cccfa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_install.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_set.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_set.cpython-312.pyc new file mode 100644 index 0000000..b305d99 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_set.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc new file mode 100644 index 0000000..9bef7ef Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/constructors.py b/myenv/lib/python3.12/site-packages/pip/_internal/req/constructors.py new file mode 100644 index 0000000..7e2d0e5 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/req/constructors.py @@ -0,0 +1,576 @@ +"""Backing implementation for InstallRequirement's various constructors + +The idea here is that these formed a major chunk of InstallRequirement's size +so, moving them and support code dedicated to them outside of that class +helps creates for better understandability for the rest of the code. + +These are meant to be used elsewhere within pip to create instances of +InstallRequirement. +""" + +import copy +import logging +import os +import re +from typing import Collection, Dict, List, Optional, Set, Tuple, Union + +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement +from pip._vendor.packaging.specifiers import Specifier + +from pip._internal.exceptions import InstallationError +from pip._internal.models.index import PyPI, TestPyPI +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.req.req_file import ParsedRequirement +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.filetypes import is_archive_file +from pip._internal.utils.misc import is_installable_dir +from pip._internal.utils.packaging import get_requirement +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import is_url, vcs + +__all__ = [ + "install_req_from_editable", + "install_req_from_line", + "parse_editable", +] + +logger = logging.getLogger(__name__) +operators = Specifier._operators.keys() + + +def _strip_extras(path: str) -> Tuple[str, Optional[str]]: + m = re.match(r"^(.+)(\[[^\]]+\])$", path) + extras = None + if m: + path_no_extras = m.group(1) + extras = m.group(2) + else: + path_no_extras = path + + return path_no_extras, extras + + +def convert_extras(extras: Optional[str]) -> Set[str]: + if not extras: + return set() + return get_requirement("placeholder" + extras.lower()).extras + + +def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requirement: + """ + Returns a new requirement based on the given one, with the supplied extras. If the + given requirement already has extras those are replaced (or dropped if no new extras + are given). + """ + match: Optional[re.Match[str]] = re.fullmatch( + # see https://peps.python.org/pep-0508/#complete-grammar + r"([\w\t .-]+)(\[[^\]]*\])?(.*)", + str(req), + flags=re.ASCII, + ) + # ireq.req is a valid requirement so the regex should always match + assert ( + match is not None + ), f"regex match on requirement {req} failed, this should never happen" + pre: Optional[str] = match.group(1) + post: Optional[str] = match.group(3) + assert ( + pre is not None and post is not None + ), f"regex group selection for requirement {req} failed, this should never happen" + extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else "" + return Requirement(f"{pre}{extras}{post}") + + +def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]: + """Parses an editable requirement into: + - a requirement name + - an URL + - extras + - editable options + Accepted requirements: + svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir + .[some_extra] + """ + + url = editable_req + + # If a file path is specified with extras, strip off the extras. + url_no_extras, extras = _strip_extras(url) + + if os.path.isdir(url_no_extras): + # Treating it as code that has already been checked out + url_no_extras = path_to_url(url_no_extras) + + if url_no_extras.lower().startswith("file:"): + package_name = Link(url_no_extras).egg_fragment + if extras: + return ( + package_name, + url_no_extras, + get_requirement("placeholder" + extras.lower()).extras, + ) + else: + return package_name, url_no_extras, set() + + for version_control in vcs: + if url.lower().startswith(f"{version_control}:"): + url = f"{version_control}+{url}" + break + + link = Link(url) + + if not link.is_vcs: + backends = ", ".join(vcs.all_schemes) + raise InstallationError( + f"{editable_req} is not a valid editable requirement. " + f"It should either be a path to a local project or a VCS URL " + f"(beginning with {backends})." + ) + + package_name = link.egg_fragment + if not package_name: + raise InstallationError( + "Could not detect requirement name for '{}', please specify one " + "with #egg=your_package_name".format(editable_req) + ) + return package_name, url, set() + + +def check_first_requirement_in_file(filename: str) -> None: + """Check if file is parsable as a requirements file. + + This is heavily based on ``pkg_resources.parse_requirements``, but + simplified to just check the first meaningful line. + + :raises InvalidRequirement: If the first meaningful line cannot be parsed + as an requirement. + """ + with open(filename, encoding="utf-8", errors="ignore") as f: + # Create a steppable iterator, so we can handle \-continuations. + lines = ( + line + for line in (line.strip() for line in f) + if line and not line.startswith("#") # Skip blank lines/comments. + ) + + for line in lines: + # Drop comments -- a hash without a space may be in a URL. + if " #" in line: + line = line[: line.find(" #")] + # If there is a line continuation, drop it, and append the next line. + if line.endswith("\\"): + line = line[:-2].strip() + next(lines, "") + Requirement(line) + return + + +def deduce_helpful_msg(req: str) -> str: + """Returns helpful msg in case requirements file does not exist, + or cannot be parsed. + + :params req: Requirements file path + """ + if not os.path.exists(req): + return f" File '{req}' does not exist." + msg = " The path does exist. " + # Try to parse and check if it is a requirements file. + try: + check_first_requirement_in_file(req) + except InvalidRequirement: + logger.debug("Cannot parse '%s' as requirements file", req) + else: + msg += ( + f"The argument you provided " + f"({req}) appears to be a" + f" requirements file. If that is the" + f" case, use the '-r' flag to install" + f" the packages specified within it." + ) + return msg + + +class RequirementParts: + def __init__( + self, + requirement: Optional[Requirement], + link: Optional[Link], + markers: Optional[Marker], + extras: Set[str], + ): + self.requirement = requirement + self.link = link + self.markers = markers + self.extras = extras + + +def parse_req_from_editable(editable_req: str) -> RequirementParts: + name, url, extras_override = parse_editable(editable_req) + + if name is not None: + try: + req: Optional[Requirement] = Requirement(name) + except InvalidRequirement: + raise InstallationError(f"Invalid requirement: '{name}'") + else: + req = None + + link = Link(url) + + return RequirementParts(req, link, None, extras_override) + + +# ---- The actual constructors follow ---- + + +def install_req_from_editable( + editable_req: str, + comes_from: Optional[Union[InstallRequirement, str]] = None, + *, + use_pep517: Optional[bool] = None, + isolated: bool = False, + global_options: Optional[List[str]] = None, + hash_options: Optional[Dict[str, List[str]]] = None, + constraint: bool = False, + user_supplied: bool = False, + permit_editable_wheels: bool = False, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, +) -> InstallRequirement: + parts = parse_req_from_editable(editable_req) + + return InstallRequirement( + parts.requirement, + comes_from=comes_from, + user_supplied=user_supplied, + editable=True, + permit_editable_wheels=permit_editable_wheels, + link=parts.link, + constraint=constraint, + use_pep517=use_pep517, + isolated=isolated, + global_options=global_options, + hash_options=hash_options, + config_settings=config_settings, + extras=parts.extras, + ) + + +def _looks_like_path(name: str) -> bool: + """Checks whether the string "looks like" a path on the filesystem. + + This does not check whether the target actually exists, only judge from the + appearance. + + Returns true if any of the following conditions is true: + * a path separator is found (either os.path.sep or os.path.altsep); + * a dot is found (which represents the current directory). + """ + if os.path.sep in name: + return True + if os.path.altsep is not None and os.path.altsep in name: + return True + if name.startswith("."): + return True + return False + + +def _get_url_from_path(path: str, name: str) -> Optional[str]: + """ + First, it checks whether a provided path is an installable directory. If it + is, returns the path. + + If false, check if the path is an archive file (such as a .whl). + The function checks if the path is a file. If false, if the path has + an @, it will treat it as a PEP 440 URL requirement and return the path. + """ + if _looks_like_path(name) and os.path.isdir(path): + if is_installable_dir(path): + return path_to_url(path) + # TODO: The is_installable_dir test here might not be necessary + # now that it is done in load_pyproject_toml too. + raise InstallationError( + f"Directory {name!r} is not installable. Neither 'setup.py' " + "nor 'pyproject.toml' found." + ) + if not is_archive_file(path): + return None + if os.path.isfile(path): + return path_to_url(path) + urlreq_parts = name.split("@", 1) + if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]): + # If the path contains '@' and the part before it does not look + # like a path, try to treat it as a PEP 440 URL req instead. + return None + logger.warning( + "Requirement %r looks like a filename, but the file does not exist", + name, + ) + return path_to_url(path) + + +def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts: + if is_url(name): + marker_sep = "; " + else: + marker_sep = ";" + if marker_sep in name: + name, markers_as_string = name.split(marker_sep, 1) + markers_as_string = markers_as_string.strip() + if not markers_as_string: + markers = None + else: + markers = Marker(markers_as_string) + else: + markers = None + name = name.strip() + req_as_string = None + path = os.path.normpath(os.path.abspath(name)) + link = None + extras_as_string = None + + if is_url(name): + link = Link(name) + else: + p, extras_as_string = _strip_extras(path) + url = _get_url_from_path(p, name) + if url is not None: + link = Link(url) + + # it's a local file, dir, or url + if link: + # Handle relative file URLs + if link.scheme == "file" and re.search(r"\.\./", link.url): + link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path)))) + # wheel file + if link.is_wheel: + wheel = Wheel(link.filename) # can raise InvalidWheelFilename + req_as_string = f"{wheel.name}=={wheel.version}" + else: + # set the req to the egg fragment. when it's not there, this + # will become an 'unnamed' requirement + req_as_string = link.egg_fragment + + # a requirement specifier + else: + req_as_string = name + + extras = convert_extras(extras_as_string) + + def with_source(text: str) -> str: + if not line_source: + return text + return f"{text} (from {line_source})" + + def _parse_req_string(req_as_string: str) -> Requirement: + try: + req = get_requirement(req_as_string) + except InvalidRequirement: + if os.path.sep in req_as_string: + add_msg = "It looks like a path." + add_msg += deduce_helpful_msg(req_as_string) + elif "=" in req_as_string and not any( + op in req_as_string for op in operators + ): + add_msg = "= is not a valid operator. Did you mean == ?" + else: + add_msg = "" + msg = with_source(f"Invalid requirement: {req_as_string!r}") + if add_msg: + msg += f"\nHint: {add_msg}" + raise InstallationError(msg) + else: + # Deprecate extras after specifiers: "name>=1.0[extras]" + # This currently works by accident because _strip_extras() parses + # any extras in the end of the string and those are saved in + # RequirementParts + for spec in req.specifier: + spec_str = str(spec) + if spec_str.endswith("]"): + msg = f"Extras after version '{spec_str}'." + raise InstallationError(msg) + return req + + if req_as_string is not None: + req: Optional[Requirement] = _parse_req_string(req_as_string) + else: + req = None + + return RequirementParts(req, link, markers, extras) + + +def install_req_from_line( + name: str, + comes_from: Optional[Union[str, InstallRequirement]] = None, + *, + use_pep517: Optional[bool] = None, + isolated: bool = False, + global_options: Optional[List[str]] = None, + hash_options: Optional[Dict[str, List[str]]] = None, + constraint: bool = False, + line_source: Optional[str] = None, + user_supplied: bool = False, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, +) -> InstallRequirement: + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + + :param line_source: An optional string describing where the line is from, + for logging purposes in case of an error. + """ + parts = parse_req_from_line(name, line_source) + + return InstallRequirement( + parts.requirement, + comes_from, + link=parts.link, + markers=parts.markers, + use_pep517=use_pep517, + isolated=isolated, + global_options=global_options, + hash_options=hash_options, + config_settings=config_settings, + constraint=constraint, + extras=parts.extras, + user_supplied=user_supplied, + ) + + +def install_req_from_req_string( + req_string: str, + comes_from: Optional[InstallRequirement] = None, + isolated: bool = False, + use_pep517: Optional[bool] = None, + user_supplied: bool = False, +) -> InstallRequirement: + try: + req = get_requirement(req_string) + except InvalidRequirement: + raise InstallationError(f"Invalid requirement: '{req_string}'") + + domains_not_allowed = [ + PyPI.file_storage_domain, + TestPyPI.file_storage_domain, + ] + if ( + req.url + and comes_from + and comes_from.link + and comes_from.link.netloc in domains_not_allowed + ): + # Explicitly disallow pypi packages that depend on external urls + raise InstallationError( + "Packages installed from PyPI cannot depend on packages " + "which are not also hosted on PyPI.\n" + f"{comes_from.name} depends on {req} " + ) + + return InstallRequirement( + req, + comes_from, + isolated=isolated, + use_pep517=use_pep517, + user_supplied=user_supplied, + ) + + +def install_req_from_parsed_requirement( + parsed_req: ParsedRequirement, + isolated: bool = False, + use_pep517: Optional[bool] = None, + user_supplied: bool = False, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, +) -> InstallRequirement: + if parsed_req.is_editable: + req = install_req_from_editable( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + constraint=parsed_req.constraint, + isolated=isolated, + user_supplied=user_supplied, + config_settings=config_settings, + ) + + else: + req = install_req_from_line( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + isolated=isolated, + global_options=( + parsed_req.options.get("global_options", []) + if parsed_req.options + else [] + ), + hash_options=( + parsed_req.options.get("hashes", {}) if parsed_req.options else {} + ), + constraint=parsed_req.constraint, + line_source=parsed_req.line_source, + user_supplied=user_supplied, + config_settings=config_settings, + ) + return req + + +def install_req_from_link_and_ireq( + link: Link, ireq: InstallRequirement +) -> InstallRequirement: + return InstallRequirement( + req=ireq.req, + comes_from=ireq.comes_from, + editable=ireq.editable, + link=link, + markers=ireq.markers, + use_pep517=ireq.use_pep517, + isolated=ireq.isolated, + global_options=ireq.global_options, + hash_options=ireq.hash_options, + config_settings=ireq.config_settings, + user_supplied=ireq.user_supplied, + ) + + +def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement: + """ + Creates a new InstallationRequirement using the given template but without + any extras. Sets the original requirement as the new one's parent + (comes_from). + """ + return InstallRequirement( + req=( + _set_requirement_extras(ireq.req, set()) if ireq.req is not None else None + ), + comes_from=ireq, + editable=ireq.editable, + link=ireq.link, + markers=ireq.markers, + use_pep517=ireq.use_pep517, + isolated=ireq.isolated, + global_options=ireq.global_options, + hash_options=ireq.hash_options, + constraint=ireq.constraint, + extras=[], + config_settings=ireq.config_settings, + user_supplied=ireq.user_supplied, + permit_editable_wheels=ireq.permit_editable_wheels, + ) + + +def install_req_extend_extras( + ireq: InstallRequirement, + extras: Collection[str], +) -> InstallRequirement: + """ + Returns a copy of an installation requirement with some additional extras. + Makes a shallow copy of the ireq object. + """ + result = copy.copy(ireq) + result.extras = {*ireq.extras, *extras} + result.req = ( + _set_requirement_extras(ireq.req, result.extras) + if ireq.req is not None + else None + ) + return result diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/req_file.py b/myenv/lib/python3.12/site-packages/pip/_internal/req/req_file.py new file mode 100644 index 0000000..1ef3d5e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/req/req_file.py @@ -0,0 +1,554 @@ +""" +Requirements file parsing +""" + +import logging +import optparse +import os +import re +import shlex +import urllib.parse +from optparse import Values +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Generator, + Iterable, + List, + Optional, + Tuple, +) + +from pip._internal.cli import cmdoptions +from pip._internal.exceptions import InstallationError, RequirementsFileParseError +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.encoding import auto_decode +from pip._internal.utils.urls import get_url_scheme + +if TYPE_CHECKING: + # NoReturn introduced in 3.6.2; imported only for type checking to maintain + # pip compatibility with older patch versions of Python 3.6 + from typing import NoReturn + + from pip._internal.index.package_finder import PackageFinder + +__all__ = ["parse_requirements"] + +ReqFileLines = Iterable[Tuple[int, str]] + +LineParser = Callable[[str], Tuple[str, Values]] + +SCHEME_RE = re.compile(r"^(http|https|file):", re.I) +COMMENT_RE = re.compile(r"(^|\s+)#.*$") + +# Matches environment variable-style values in '${MY_VARIABLE_1}' with the +# variable name consisting of only uppercase letters, digits or the '_' +# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, +# 2013 Edition. +ENV_VAR_RE = re.compile(r"(?P\$\{(?P[A-Z0-9_]+)\})") + +SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [ + cmdoptions.index_url, + cmdoptions.extra_index_url, + cmdoptions.no_index, + cmdoptions.constraints, + cmdoptions.requirements, + cmdoptions.editable, + cmdoptions.find_links, + cmdoptions.no_binary, + cmdoptions.only_binary, + cmdoptions.prefer_binary, + cmdoptions.require_hashes, + cmdoptions.pre, + cmdoptions.trusted_host, + cmdoptions.use_new_feature, +] + +# options to be passed to requirements +SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [ + cmdoptions.global_options, + cmdoptions.hash, + cmdoptions.config_settings, +] + +SUPPORTED_OPTIONS_EDITABLE_REQ: List[Callable[..., optparse.Option]] = [ + cmdoptions.config_settings, +] + + +# the 'dest' string values +SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] +SUPPORTED_OPTIONS_EDITABLE_REQ_DEST = [ + str(o().dest) for o in SUPPORTED_OPTIONS_EDITABLE_REQ +] + +logger = logging.getLogger(__name__) + + +class ParsedRequirement: + def __init__( + self, + requirement: str, + is_editable: bool, + comes_from: str, + constraint: bool, + options: Optional[Dict[str, Any]] = None, + line_source: Optional[str] = None, + ) -> None: + self.requirement = requirement + self.is_editable = is_editable + self.comes_from = comes_from + self.options = options + self.constraint = constraint + self.line_source = line_source + + +class ParsedLine: + def __init__( + self, + filename: str, + lineno: int, + args: str, + opts: Values, + constraint: bool, + ) -> None: + self.filename = filename + self.lineno = lineno + self.opts = opts + self.constraint = constraint + + if args: + self.is_requirement = True + self.is_editable = False + self.requirement = args + elif opts.editables: + self.is_requirement = True + self.is_editable = True + # We don't support multiple -e on one line + self.requirement = opts.editables[0] + else: + self.is_requirement = False + + +def parse_requirements( + filename: str, + session: PipSession, + finder: Optional["PackageFinder"] = None, + options: Optional[optparse.Values] = None, + constraint: bool = False, +) -> Generator[ParsedRequirement, None, None]: + """Parse a requirements file and yield ParsedRequirement instances. + + :param filename: Path or url of requirements file. + :param session: PipSession instance. + :param finder: Instance of pip.index.PackageFinder. + :param options: cli options. + :param constraint: If true, parsing a constraint file rather than + requirements file. + """ + line_parser = get_line_parser(finder) + parser = RequirementsFileParser(session, line_parser) + + for parsed_line in parser.parse(filename, constraint): + parsed_req = handle_line( + parsed_line, options=options, finder=finder, session=session + ) + if parsed_req is not None: + yield parsed_req + + +def preprocess(content: str) -> ReqFileLines: + """Split, filter, and join lines, and return a line iterator + + :param content: the content of the requirements file + """ + lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1) + lines_enum = join_lines(lines_enum) + lines_enum = ignore_comments(lines_enum) + lines_enum = expand_env_variables(lines_enum) + return lines_enum + + +def handle_requirement_line( + line: ParsedLine, + options: Optional[optparse.Values] = None, +) -> ParsedRequirement: + # preserve for the nested code path + line_comes_from = "{} {} (line {})".format( + "-c" if line.constraint else "-r", + line.filename, + line.lineno, + ) + + assert line.is_requirement + + # get the options that apply to requirements + if line.is_editable: + supported_dest = SUPPORTED_OPTIONS_EDITABLE_REQ_DEST + else: + supported_dest = SUPPORTED_OPTIONS_REQ_DEST + req_options = {} + for dest in supported_dest: + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + + line_source = f"line {line.lineno} of {line.filename}" + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + options=req_options, + line_source=line_source, + ) + + +def handle_option_line( + opts: Values, + filename: str, + lineno: int, + finder: Optional["PackageFinder"] = None, + options: Optional[optparse.Values] = None, + session: Optional[PipSession] = None, +) -> None: + if opts.hashes: + logger.warning( + "%s line %s has --hash but no requirement, and will be ignored.", + filename, + lineno, + ) + + if options: + # percolate options upward + if opts.require_hashes: + options.require_hashes = opts.require_hashes + if opts.features_enabled: + options.features_enabled.extend( + f for f in opts.features_enabled if f not in options.features_enabled + ) + + # set finder options + if finder: + find_links = finder.find_links + index_urls = finder.index_urls + no_index = finder.search_scope.no_index + if opts.no_index is True: + no_index = True + index_urls = [] + if opts.index_url and not no_index: + index_urls = [opts.index_url] + if opts.extra_index_urls and not no_index: + index_urls.extend(opts.extra_index_urls) + if opts.find_links: + # FIXME: it would be nice to keep track of the source + # of the find_links: support a find-links local path + # relative to a requirements file. + value = opts.find_links[0] + req_dir = os.path.dirname(os.path.abspath(filename)) + relative_to_reqs_file = os.path.join(req_dir, value) + if os.path.exists(relative_to_reqs_file): + value = relative_to_reqs_file + find_links.append(value) + + if session: + # We need to update the auth urls in session + session.update_index_urls(index_urls) + + search_scope = SearchScope( + find_links=find_links, + index_urls=index_urls, + no_index=no_index, + ) + finder.search_scope = search_scope + + if opts.pre: + finder.set_allow_all_prereleases() + + if opts.prefer_binary: + finder.set_prefer_binary() + + if session: + for host in opts.trusted_hosts or []: + source = f"line {lineno} of {filename}" + session.add_trusted_host(host, source=source) + + +def handle_line( + line: ParsedLine, + options: Optional[optparse.Values] = None, + finder: Optional["PackageFinder"] = None, + session: Optional[PipSession] = None, +) -> Optional[ParsedRequirement]: + """Handle a single parsed requirements line; This can result in + creating/yielding requirements, or updating the finder. + + :param line: The parsed line to be processed. + :param options: CLI options. + :param finder: The finder - updated by non-requirement lines. + :param session: The session - updated by non-requirement lines. + + Returns a ParsedRequirement object if the line is a requirement line, + otherwise returns None. + + For lines that contain requirements, the only options that have an effect + are from SUPPORTED_OPTIONS_REQ, and they are scoped to the + requirement. Other options from SUPPORTED_OPTIONS may be present, but are + ignored. + + For lines that do not contain requirements, the only options that have an + effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may + be present, but are ignored. These lines may contain multiple options + (although our docs imply only one is supported), and all our parsed and + affect the finder. + """ + + if line.is_requirement: + parsed_req = handle_requirement_line(line, options) + return parsed_req + else: + handle_option_line( + line.opts, + line.filename, + line.lineno, + finder, + options, + session, + ) + return None + + +class RequirementsFileParser: + def __init__( + self, + session: PipSession, + line_parser: LineParser, + ) -> None: + self._session = session + self._line_parser = line_parser + + def parse( + self, filename: str, constraint: bool + ) -> Generator[ParsedLine, None, None]: + """Parse a given file, yielding parsed lines.""" + yield from self._parse_and_recurse(filename, constraint) + + def _parse_and_recurse( + self, filename: str, constraint: bool + ) -> Generator[ParsedLine, None, None]: + for line in self._parse_file(filename, constraint): + if not line.is_requirement and ( + line.opts.requirements or line.opts.constraints + ): + # parse a nested requirements file + if line.opts.requirements: + req_path = line.opts.requirements[0] + nested_constraint = False + else: + req_path = line.opts.constraints[0] + nested_constraint = True + + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib.parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + req_path = os.path.join( + os.path.dirname(filename), + req_path, + ) + + yield from self._parse_and_recurse(req_path, nested_constraint) + else: + yield line + + def _parse_file( + self, filename: str, constraint: bool + ) -> Generator[ParsedLine, None, None]: + _, content = get_file_content(filename, self._session) + + lines_enum = preprocess(content) + + for line_number, line in lines_enum: + try: + args_str, opts = self._line_parser(line) + except OptionParsingError as e: + # add offending line + msg = f"Invalid requirement: {line}\n{e.msg}" + raise RequirementsFileParseError(msg) + + yield ParsedLine( + filename, + line_number, + args_str, + opts, + constraint, + ) + + +def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser: + def parse_line(line: str) -> Tuple[str, Values]: + # Build new parser for each line since it accumulates appendable + # options. + parser = build_parser() + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + defaults.format_control = finder.format_control + + args_str, options_str = break_args_options(line) + + try: + options = shlex.split(options_str) + except ValueError as e: + raise OptionParsingError(f"Could not split options: {options_str}") from e + + opts, _ = parser.parse_args(options, defaults) + + return args_str, opts + + return parse_line + + +def break_args_options(line: str) -> Tuple[str, str]: + """Break up the line into an args and options string. We only want to shlex + (and then optparse) the options, not the args. args can contain markers + which are corrupted by shlex. + """ + tokens = line.split(" ") + args = [] + options = tokens[:] + for token in tokens: + if token.startswith("-") or token.startswith("--"): + break + else: + args.append(token) + options.pop(0) + return " ".join(args), " ".join(options) + + +class OptionParsingError(Exception): + def __init__(self, msg: str) -> None: + self.msg = msg + + +def build_parser() -> optparse.OptionParser: + """ + Return a parser for parsing requirement lines + """ + parser = optparse.OptionParser(add_help_option=False) + + option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ + for option_factory in option_factories: + option = option_factory() + parser.add_option(option) + + # By default optparse sys.exits on parsing errors. We want to wrap + # that in our own exception. + def parser_exit(self: Any, msg: str) -> "NoReturn": + raise OptionParsingError(msg) + + # NOTE: mypy disallows assigning to a method + # https://github.com/python/mypy/issues/2427 + parser.exit = parser_exit # type: ignore + + return parser + + +def join_lines(lines_enum: ReqFileLines) -> ReqFileLines: + """Joins a line ending in '\' with the previous line (except when following + comments). The joined line takes on the index of the first line. + """ + primary_line_number = None + new_line: List[str] = [] + for line_number, line in lines_enum: + if not line.endswith("\\") or COMMENT_RE.match(line): + if COMMENT_RE.match(line): + # this ensures comments are always matched later + line = " " + line + if new_line: + new_line.append(line) + assert primary_line_number is not None + yield primary_line_number, "".join(new_line) + new_line = [] + else: + yield line_number, line + else: + if not new_line: + primary_line_number = line_number + new_line.append(line.strip("\\")) + + # last line contains \ + if new_line: + assert primary_line_number is not None + yield primary_line_number, "".join(new_line) + + # TODO: handle space after '\'. + + +def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines: + """ + Strips comments and filter empty lines. + """ + for line_number, line in lines_enum: + line = COMMENT_RE.sub("", line) + line = line.strip() + if line: + yield line_number, line + + +def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines: + """Replace all environment variables that can be retrieved via `os.getenv`. + + The only allowed format for environment variables defined in the + requirement file is `${MY_VARIABLE_1}` to ensure two things: + + 1. Strings that contain a `$` aren't accidentally (partially) expanded. + 2. Ensure consistency across platforms for requirement files. + + These points are the result of a discussion on the `github pull + request #3514 `_. + + Valid characters in variable names follow the `POSIX standard + `_ and are limited + to uppercase letter, digits and the `_` (underscore). + """ + for line_number, line in lines_enum: + for env_var, var_name in ENV_VAR_RE.findall(line): + value = os.getenv(var_name) + if not value: + continue + + line = line.replace(env_var, value) + + yield line_number, line + + +def get_file_content(url: str, session: PipSession) -> Tuple[str, str]: + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode. + Respects # -*- coding: declarations on the retrieved files. + + :param url: File path or url. + :param session: PipSession instance. + """ + scheme = get_url_scheme(url) + + # Pip has special support for file:// URLs (LocalFSAdapter). + if scheme in ["http", "https", "file"]: + resp = session.get(url) + raise_for_status(resp) + return resp.url, resp.text + + # Assume this is a bare path. + try: + with open(url, "rb") as f: + content = auto_decode(f.read()) + except OSError as exc: + raise InstallationError(f"Could not open requirements file: {exc}") + return url, content diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/req_install.py b/myenv/lib/python3.12/site-packages/pip/_internal/req/req_install.py new file mode 100644 index 0000000..a65611c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/req/req_install.py @@ -0,0 +1,923 @@ +import functools +import logging +import os +import shutil +import sys +import uuid +import zipfile +from optparse import Values +from pathlib import Path +from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union + +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.pyproject_hooks import BuildBackendHookCaller + +from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment +from pip._internal.exceptions import InstallationError, PreviousBuildDirError +from pip._internal.locations import get_scheme +from pip._internal.metadata import ( + BaseDistribution, + get_default_environment, + get_directory_distribution, + get_wheel_distribution, +) +from pip._internal.metadata.base import FilesystemWheel +from pip._internal.models.direct_url import DirectUrl +from pip._internal.models.link import Link +from pip._internal.operations.build.metadata import generate_metadata +from pip._internal.operations.build.metadata_editable import generate_editable_metadata +from pip._internal.operations.build.metadata_legacy import ( + generate_metadata as generate_metadata_legacy, +) +from pip._internal.operations.install.editable_legacy import ( + install_editable as install_editable_legacy, +) +from pip._internal.operations.install.wheel import install_wheel +from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path +from pip._internal.req.req_uninstall import UninstallPathSet +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + ConfiguredBuildBackendHookCaller, + ask_path_exists, + backup_dir, + display_path, + hide_url, + is_installable_dir, + redact_auth_from_requirement, + redact_auth_from_url, +) +from pip._internal.utils.packaging import safe_extra +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pip._internal.utils.unpacking import unpack_file +from pip._internal.utils.virtualenv import running_under_virtualenv +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + + +class InstallRequirement: + """ + Represents something that may be installed later on, may have information + about where to fetch the relevant requirement and also contains logic for + installing the said requirement. + """ + + def __init__( + self, + req: Optional[Requirement], + comes_from: Optional[Union[str, "InstallRequirement"]], + editable: bool = False, + link: Optional[Link] = None, + markers: Optional[Marker] = None, + use_pep517: Optional[bool] = None, + isolated: bool = False, + *, + global_options: Optional[List[str]] = None, + hash_options: Optional[Dict[str, List[str]]] = None, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, + constraint: bool = False, + extras: Collection[str] = (), + user_supplied: bool = False, + permit_editable_wheels: bool = False, + ) -> None: + assert req is None or isinstance(req, Requirement), req + self.req = req + self.comes_from = comes_from + self.constraint = constraint + self.editable = editable + self.permit_editable_wheels = permit_editable_wheels + + # source_dir is the local directory where the linked requirement is + # located, or unpacked. In case unpacking is needed, creating and + # populating source_dir is done by the RequirementPreparer. Note this + # is not necessarily the directory where pyproject.toml or setup.py is + # located - that one is obtained via unpacked_source_directory. + self.source_dir: Optional[str] = None + if self.editable: + assert link + if link.is_file: + self.source_dir = os.path.normpath(os.path.abspath(link.file_path)) + + # original_link is the direct URL that was provided by the user for the + # requirement, either directly or via a constraints file. + if link is None and req and req.url: + # PEP 508 URL requirement + link = Link(req.url) + self.link = self.original_link = link + + # When this InstallRequirement is a wheel obtained from the cache of locally + # built wheels, this is the source link corresponding to the cache entry, which + # was used to download and build the cached wheel. + self.cached_wheel_source_link: Optional[Link] = None + + # Information about the location of the artifact that was downloaded . This + # property is guaranteed to be set in resolver results. + self.download_info: Optional[DirectUrl] = None + + # Path to any downloaded or already-existing package. + self.local_file_path: Optional[str] = None + if self.link and self.link.is_file: + self.local_file_path = self.link.file_path + + if extras: + self.extras = extras + elif req: + self.extras = req.extras + else: + self.extras = set() + if markers is None and req: + markers = req.marker + self.markers = markers + + # This holds the Distribution object if this requirement is already installed. + self.satisfied_by: Optional[BaseDistribution] = None + # Whether the installation process should try to uninstall an existing + # distribution before installing this requirement. + self.should_reinstall = False + # Temporary build location + self._temp_build_dir: Optional[TempDirectory] = None + # Set to True after successful installation + self.install_succeeded: Optional[bool] = None + # Supplied options + self.global_options = global_options if global_options else [] + self.hash_options = hash_options if hash_options else {} + self.config_settings = config_settings + # Set to True after successful preparation of this requirement + self.prepared = False + # User supplied requirement are explicitly requested for installation + # by the user via CLI arguments or requirements files, as opposed to, + # e.g. dependencies, extras or constraints. + self.user_supplied = user_supplied + + self.isolated = isolated + self.build_env: BuildEnvironment = NoOpBuildEnvironment() + + # For PEP 517, the directory where we request the project metadata + # gets stored. We need this to pass to build_wheel, so the backend + # can ensure that the wheel matches the metadata (see the PEP for + # details). + self.metadata_directory: Optional[str] = None + + # The static build requirements (from pyproject.toml) + self.pyproject_requires: Optional[List[str]] = None + + # Build requirements that we will check are available + self.requirements_to_check: List[str] = [] + + # The PEP 517 backend we should use to build the project + self.pep517_backend: Optional[BuildBackendHookCaller] = None + + # Are we using PEP 517 for this requirement? + # After pyproject.toml has been loaded, the only valid values are True + # and False. Before loading, None is valid (meaning "use the default"). + # Setting an explicit value before loading pyproject.toml is supported, + # but after loading this flag should be treated as read only. + self.use_pep517 = use_pep517 + + # If config settings are provided, enforce PEP 517. + if self.config_settings: + if self.use_pep517 is False: + logger.warning( + "--no-use-pep517 ignored for %s " + "because --config-settings are specified.", + self, + ) + self.use_pep517 = True + + # This requirement needs more preparation before it can be built + self.needs_more_preparation = False + + # This requirement needs to be unpacked before it can be installed. + self._archive_source: Optional[Path] = None + + def __str__(self) -> str: + if self.req: + s = redact_auth_from_requirement(self.req) + if self.link: + s += f" from {redact_auth_from_url(self.link.url)}" + elif self.link: + s = redact_auth_from_url(self.link.url) + else: + s = "" + if self.satisfied_by is not None: + if self.satisfied_by.location is not None: + location = display_path(self.satisfied_by.location) + else: + location = "" + s += f" in {location}" + if self.comes_from: + if isinstance(self.comes_from, str): + comes_from: Optional[str] = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += f" (from {comes_from})" + return s + + def __repr__(self) -> str: + return "<{} object: {} editable={!r}>".format( + self.__class__.__name__, str(self), self.editable + ) + + def format_debug(self) -> str: + """An un-tested helper for getting state, for debugging.""" + attributes = vars(self) + names = sorted(attributes) + + state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names)) + return "<{name} object: {{{state}}}>".format( + name=self.__class__.__name__, + state=", ".join(state), + ) + + # Things that are valid for all kinds of requirements? + @property + def name(self) -> Optional[str]: + if self.req is None: + return None + return self.req.name + + @functools.lru_cache() # use cached_property in python 3.8+ + def supports_pyproject_editable(self) -> bool: + if not self.use_pep517: + return False + assert self.pep517_backend + with self.build_env: + runner = runner_with_spinner_message( + "Checking if build backend supports build_editable" + ) + with self.pep517_backend.subprocess_runner(runner): + return "build_editable" in self.pep517_backend._supported_features() + + @property + def specifier(self) -> SpecifierSet: + assert self.req is not None + return self.req.specifier + + @property + def is_direct(self) -> bool: + """Whether this requirement was specified as a direct URL.""" + return self.original_link is not None + + @property + def is_pinned(self) -> bool: + """Return whether I am pinned to an exact version. + + For example, some-package==1.2 is pinned; some-package>1.2 is not. + """ + assert self.req is not None + specifiers = self.req.specifier + return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} + + def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool: + if not extras_requested: + # Provide an extra to safely evaluate the markers + # without matching any extra + extras_requested = ("",) + if self.markers is not None: + return any( + self.markers.evaluate({"extra": extra}) + # TODO: Remove these two variants when packaging is upgraded to + # support the marker comparison logic specified in PEP 685. + or self.markers.evaluate({"extra": safe_extra(extra)}) + or self.markers.evaluate({"extra": canonicalize_name(extra)}) + for extra in extras_requested + ) + else: + return True + + @property + def has_hash_options(self) -> bool: + """Return whether any known-good hashes are specified as options. + + These activate --require-hashes mode; hashes specified as part of a + URL do not. + + """ + return bool(self.hash_options) + + def hashes(self, trust_internet: bool = True) -> Hashes: + """Return a hash-comparer that considers my option- and URL-based + hashes to be known-good. + + Hashes in URLs--ones embedded in the requirements file, not ones + downloaded from an index server--are almost peers with ones from + flags. They satisfy --require-hashes (whether it was implicitly or + explicitly activated) but do not activate it. md5 and sha224 are not + allowed in flags, which should nudge people toward good algos. We + always OR all hashes together, even ones from URLs. + + :param trust_internet: Whether to trust URL-based (#md5=...) hashes + downloaded from the internet, as by populate_link() + + """ + good_hashes = self.hash_options.copy() + if trust_internet: + link = self.link + elif self.is_direct and self.user_supplied: + link = self.original_link + else: + link = None + if link and link.hash: + assert link.hash_name is not None + good_hashes.setdefault(link.hash_name, []).append(link.hash) + return Hashes(good_hashes) + + def from_path(self) -> Optional[str]: + """Format a nice indicator to show where this "comes from" """ + if self.req is None: + return None + s = str(self.req) + if self.comes_from: + comes_from: Optional[str] + if isinstance(self.comes_from, str): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += "->" + comes_from + return s + + def ensure_build_location( + self, build_dir: str, autodelete: bool, parallel_builds: bool + ) -> str: + assert build_dir is not None + if self._temp_build_dir is not None: + assert self._temp_build_dir.path + return self._temp_build_dir.path + if self.req is None: + # Some systems have /tmp as a symlink which confuses custom + # builds (such as numpy). Thus, we ensure that the real path + # is returned. + self._temp_build_dir = TempDirectory( + kind=tempdir_kinds.REQ_BUILD, globally_managed=True + ) + + return self._temp_build_dir.path + + # This is the only remaining place where we manually determine the path + # for the temporary directory. It is only needed for editables where + # it is the value of the --src option. + + # When parallel builds are enabled, add a UUID to the build directory + # name so multiple builds do not interfere with each other. + dir_name: str = canonicalize_name(self.req.name) + if parallel_builds: + dir_name = f"{dir_name}_{uuid.uuid4().hex}" + + # FIXME: Is there a better place to create the build_dir? (hg and bzr + # need this) + if not os.path.exists(build_dir): + logger.debug("Creating directory %s", build_dir) + os.makedirs(build_dir) + actual_build_dir = os.path.join(build_dir, dir_name) + # `None` indicates that we respect the globally-configured deletion + # settings, which is what we actually want when auto-deleting. + delete_arg = None if autodelete else False + return TempDirectory( + path=actual_build_dir, + delete=delete_arg, + kind=tempdir_kinds.REQ_BUILD, + globally_managed=True, + ).path + + def _set_requirement(self) -> None: + """Set requirement after generating metadata.""" + assert self.req is None + assert self.metadata is not None + assert self.source_dir is not None + + # Construct a Requirement object from the generated metadata + if isinstance(parse_version(self.metadata["Version"]), Version): + op = "==" + else: + op = "===" + + self.req = Requirement( + "".join( + [ + self.metadata["Name"], + op, + self.metadata["Version"], + ] + ) + ) + + def warn_on_mismatching_name(self) -> None: + assert self.req is not None + metadata_name = canonicalize_name(self.metadata["Name"]) + if canonicalize_name(self.req.name) == metadata_name: + # Everything is fine. + return + + # If we're here, there's a mismatch. Log a warning about it. + logger.warning( + "Generating metadata for package %s " + "produced metadata for project name %s. Fix your " + "#egg=%s fragments.", + self.name, + metadata_name, + self.name, + ) + self.req = Requirement(metadata_name) + + def check_if_exists(self, use_user_site: bool) -> None: + """Find an installed distribution that satisfies or conflicts + with this requirement, and set self.satisfied_by or + self.should_reinstall appropriately. + """ + if self.req is None: + return + existing_dist = get_default_environment().get_distribution(self.req.name) + if not existing_dist: + return + + version_compatible = self.req.specifier.contains( + existing_dist.version, + prereleases=True, + ) + if not version_compatible: + self.satisfied_by = None + if use_user_site: + if existing_dist.in_usersite: + self.should_reinstall = True + elif running_under_virtualenv() and existing_dist.in_site_packages: + raise InstallationError( + f"Will not install to the user site because it will " + f"lack sys.path precedence to {existing_dist.raw_name} " + f"in {existing_dist.location}" + ) + else: + self.should_reinstall = True + else: + if self.editable: + self.should_reinstall = True + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None + else: + self.satisfied_by = existing_dist + + # Things valid for wheels + @property + def is_wheel(self) -> bool: + if not self.link: + return False + return self.link.is_wheel + + @property + def is_wheel_from_cache(self) -> bool: + # When True, it means that this InstallRequirement is a local wheel file in the + # cache of locally built wheels. + return self.cached_wheel_source_link is not None + + # Things valid for sdists + @property + def unpacked_source_directory(self) -> str: + assert self.source_dir, f"No source dir for {self}" + return os.path.join( + self.source_dir, self.link and self.link.subdirectory_fragment or "" + ) + + @property + def setup_py_path(self) -> str: + assert self.source_dir, f"No source dir for {self}" + setup_py = os.path.join(self.unpacked_source_directory, "setup.py") + + return setup_py + + @property + def setup_cfg_path(self) -> str: + assert self.source_dir, f"No source dir for {self}" + setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg") + + return setup_cfg + + @property + def pyproject_toml_path(self) -> str: + assert self.source_dir, f"No source dir for {self}" + return make_pyproject_path(self.unpacked_source_directory) + + def load_pyproject_toml(self) -> None: + """Load the pyproject.toml file. + + After calling this routine, all of the attributes related to PEP 517 + processing for this requirement have been set. In particular, the + use_pep517 attribute can be used to determine whether we should + follow the PEP 517 or legacy (setup.py) code path. + """ + pyproject_toml_data = load_pyproject_toml( + self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self) + ) + + if pyproject_toml_data is None: + assert not self.config_settings + self.use_pep517 = False + return + + self.use_pep517 = True + requires, backend, check, backend_path = pyproject_toml_data + self.requirements_to_check = check + self.pyproject_requires = requires + self.pep517_backend = ConfiguredBuildBackendHookCaller( + self, + self.unpacked_source_directory, + backend, + backend_path=backend_path, + ) + + def isolated_editable_sanity_check(self) -> None: + """Check that an editable requirement if valid for use with PEP 517/518. + + This verifies that an editable that has a pyproject.toml either supports PEP 660 + or as a setup.py or a setup.cfg + """ + if ( + self.editable + and self.use_pep517 + and not self.supports_pyproject_editable() + and not os.path.isfile(self.setup_py_path) + and not os.path.isfile(self.setup_cfg_path) + ): + raise InstallationError( + f"Project {self} has a 'pyproject.toml' and its build " + f"backend is missing the 'build_editable' hook. Since it does not " + f"have a 'setup.py' nor a 'setup.cfg', " + f"it cannot be installed in editable mode. " + f"Consider using a build backend that supports PEP 660." + ) + + def prepare_metadata(self) -> None: + """Ensure that project metadata is available. + + Under PEP 517 and PEP 660, call the backend hook to prepare the metadata. + Under legacy processing, call setup.py egg-info. + """ + assert self.source_dir, f"No source dir for {self}" + details = self.name or f"from {self.link}" + + if self.use_pep517: + assert self.pep517_backend is not None + if ( + self.editable + and self.permit_editable_wheels + and self.supports_pyproject_editable() + ): + self.metadata_directory = generate_editable_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + details=details, + ) + else: + self.metadata_directory = generate_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + details=details, + ) + else: + self.metadata_directory = generate_metadata_legacy( + build_env=self.build_env, + setup_py_path=self.setup_py_path, + source_dir=self.unpacked_source_directory, + isolated=self.isolated, + details=details, + ) + + # Act on the newly generated metadata, based on the name and version. + if not self.name: + self._set_requirement() + else: + self.warn_on_mismatching_name() + + self.assert_source_matches_version() + + @property + def metadata(self) -> Any: + if not hasattr(self, "_metadata"): + self._metadata = self.get_dist().metadata + + return self._metadata + + def get_dist(self) -> BaseDistribution: + if self.metadata_directory: + return get_directory_distribution(self.metadata_directory) + elif self.local_file_path and self.is_wheel: + assert self.req is not None + return get_wheel_distribution( + FilesystemWheel(self.local_file_path), + canonicalize_name(self.req.name), + ) + raise AssertionError( + f"InstallRequirement {self} has no metadata directory and no wheel: " + f"can't make a distribution." + ) + + def assert_source_matches_version(self) -> None: + assert self.source_dir, f"No source dir for {self}" + version = self.metadata["version"] + if self.req and self.req.specifier and version not in self.req.specifier: + logger.warning( + "Requested %s, but installing version %s", + self, + version, + ) + else: + logger.debug( + "Source in %s has version %s, which satisfies requirement %s", + display_path(self.source_dir), + version, + self, + ) + + # For both source distributions and editables + def ensure_has_source_dir( + self, + parent_dir: str, + autodelete: bool = False, + parallel_builds: bool = False, + ) -> None: + """Ensure that a source_dir is set. + + This will create a temporary build dir if the name of the requirement + isn't known yet. + + :param parent_dir: The ideal pip parent_dir for the source_dir. + Generally src_dir for editables and build_dir for sdists. + :return: self.source_dir + """ + if self.source_dir is None: + self.source_dir = self.ensure_build_location( + parent_dir, + autodelete=autodelete, + parallel_builds=parallel_builds, + ) + + def needs_unpacked_archive(self, archive_source: Path) -> None: + assert self._archive_source is None + self._archive_source = archive_source + + def ensure_pristine_source_checkout(self) -> None: + """Ensure the source directory has not yet been built in.""" + assert self.source_dir is not None + if self._archive_source is not None: + unpack_file(str(self._archive_source), self.source_dir) + elif is_installable_dir(self.source_dir): + # If a checkout exists, it's unwise to keep going. + # version inconsistencies are logged later, but do not fail + # the installation. + raise PreviousBuildDirError( + f"pip can't proceed with requirements '{self}' due to a " + f"pre-existing build directory ({self.source_dir}). This is likely " + "due to a previous installation that failed . pip is " + "being responsible and not assuming it can delete this. " + "Please delete it and try again." + ) + + # For editable installations + def update_editable(self) -> None: + if not self.link: + logger.debug( + "Cannot update repository at %s; repository location is unknown", + self.source_dir, + ) + return + assert self.editable + assert self.source_dir + if self.link.scheme == "file": + # Static paths don't get updated + return + vcs_backend = vcs.get_backend_for_scheme(self.link.scheme) + # Editable requirements are validated in Requirement constructors. + # So here, if it's neither a path nor a valid VCS URL, it's a bug. + assert vcs_backend, f"Unsupported VCS URL {self.link.url}" + hidden_url = hide_url(self.link.url) + vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0) + + # Top-level Actions + def uninstall( + self, auto_confirm: bool = False, verbose: bool = False + ) -> Optional[UninstallPathSet]: + """ + Uninstall the distribution currently satisfying this requirement. + + Prompts before removing or modifying files unless + ``auto_confirm`` is True. + + Refuses to delete or modify files outside of ``sys.prefix`` - + thus uninstallation within a virtual environment can only + modify that virtual environment, even if the virtualenv is + linked to global site-packages. + + """ + assert self.req + dist = get_default_environment().get_distribution(self.req.name) + if not dist: + logger.warning("Skipping %s as it is not installed.", self.name) + return None + logger.info("Found existing installation: %s", dist) + + uninstalled_pathset = UninstallPathSet.from_dist(dist) + uninstalled_pathset.remove(auto_confirm, verbose) + return uninstalled_pathset + + def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str: + def _clean_zip_name(name: str, prefix: str) -> str: + assert name.startswith( + prefix + os.path.sep + ), f"name {name!r} doesn't start with prefix {prefix!r}" + name = name[len(prefix) + 1 :] + name = name.replace(os.path.sep, "/") + return name + + assert self.req is not None + path = os.path.join(parentdir, path) + name = _clean_zip_name(path, rootdir) + return self.req.name + "/" + name + + def archive(self, build_dir: Optional[str]) -> None: + """Saves archive to provided build_dir. + + Used for saving downloaded VCS requirements as part of `pip download`. + """ + assert self.source_dir + if build_dir is None: + return + + create_archive = True + archive_name = "{}-{}.zip".format(self.name, self.metadata["version"]) + archive_path = os.path.join(build_dir, archive_name) + + if os.path.exists(archive_path): + response = ask_path_exists( + f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, " + "(b)ackup, (a)bort ", + ("i", "w", "b", "a"), + ) + if response == "i": + create_archive = False + elif response == "w": + logger.warning("Deleting %s", display_path(archive_path)) + os.remove(archive_path) + elif response == "b": + dest_file = backup_dir(archive_path) + logger.warning( + "Backing up %s to %s", + display_path(archive_path), + display_path(dest_file), + ) + shutil.move(archive_path, dest_file) + elif response == "a": + sys.exit(-1) + + if not create_archive: + return + + zip_output = zipfile.ZipFile( + archive_path, + "w", + zipfile.ZIP_DEFLATED, + allowZip64=True, + ) + with zip_output: + dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory)) + for dirpath, dirnames, filenames in os.walk(dir): + for dirname in dirnames: + dir_arcname = self._get_archive_name( + dirname, + parentdir=dirpath, + rootdir=dir, + ) + zipdir = zipfile.ZipInfo(dir_arcname + "/") + zipdir.external_attr = 0x1ED << 16 # 0o755 + zip_output.writestr(zipdir, "") + for filename in filenames: + file_arcname = self._get_archive_name( + filename, + parentdir=dirpath, + rootdir=dir, + ) + filename = os.path.join(dirpath, filename) + zip_output.write(filename, file_arcname) + + logger.info("Saved %s", display_path(archive_path)) + + def install( + self, + global_options: Optional[Sequence[str]] = None, + root: Optional[str] = None, + home: Optional[str] = None, + prefix: Optional[str] = None, + warn_script_location: bool = True, + use_user_site: bool = False, + pycompile: bool = True, + ) -> None: + assert self.req is not None + scheme = get_scheme( + self.req.name, + user=use_user_site, + home=home, + root=root, + isolated=self.isolated, + prefix=prefix, + ) + + if self.editable and not self.is_wheel: + if self.config_settings: + logger.warning( + "--config-settings ignored for legacy editable install of %s. " + "Consider upgrading to a version of setuptools " + "that supports PEP 660 (>= 64).", + self, + ) + install_editable_legacy( + global_options=global_options if global_options is not None else [], + prefix=prefix, + home=home, + use_user_site=use_user_site, + name=self.req.name, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, + ) + self.install_succeeded = True + return + + assert self.is_wheel + assert self.local_file_path + + install_wheel( + self.req.name, + self.local_file_path, + scheme=scheme, + req_description=str(self.req), + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=self.download_info if self.is_direct else None, + requested=self.user_supplied, + ) + self.install_succeeded = True + + +def check_invalid_constraint_type(req: InstallRequirement) -> str: + # Check for unsupported forms + problem = "" + if not req.name: + problem = "Unnamed requirements are not allowed as constraints" + elif req.editable: + problem = "Editable requirements are not allowed as constraints" + elif req.extras: + problem = "Constraints cannot have extras" + + if problem: + deprecated( + reason=( + "Constraints are only allowed to take the form of a package " + "name and a version specifier. Other forms were originally " + "permitted as an accident of the implementation, but were " + "undocumented. The new implementation of the resolver no " + "longer supports these forms." + ), + replacement="replacing the constraint with a requirement", + # No plan yet for when the new resolver becomes default + gone_in=None, + issue=8210, + ) + + return problem + + +def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool: + if getattr(options, option, None): + return True + for req in reqs: + if getattr(req, option, None): + return True + return False + + +def check_legacy_setup_py_options( + options: Values, + reqs: List[InstallRequirement], +) -> None: + has_build_options = _has_option(options, reqs, "build_options") + has_global_options = _has_option(options, reqs, "global_options") + if has_build_options or has_global_options: + deprecated( + reason="--build-option and --global-option are deprecated.", + issue=11859, + replacement="to use --config-settings", + gone_in="24.2", + ) + logger.warning( + "Implying --no-binary=:all: due to the presence of " + "--build-option / --global-option. " + ) + options.format_control.disallow_binaries() diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/req_set.py b/myenv/lib/python3.12/site-packages/pip/_internal/req/req_set.py new file mode 100644 index 0000000..bf36114 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/req/req_set.py @@ -0,0 +1,119 @@ +import logging +from collections import OrderedDict +from typing import Dict, List + +from pip._vendor.packaging.specifiers import LegacySpecifier +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import LegacyVersion + +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.deprecation import deprecated + +logger = logging.getLogger(__name__) + + +class RequirementSet: + def __init__(self, check_supported_wheels: bool = True) -> None: + """Create a RequirementSet.""" + + self.requirements: Dict[str, InstallRequirement] = OrderedDict() + self.check_supported_wheels = check_supported_wheels + + self.unnamed_requirements: List[InstallRequirement] = [] + + def __str__(self) -> str: + requirements = sorted( + (req for req in self.requirements.values() if not req.comes_from), + key=lambda req: canonicalize_name(req.name or ""), + ) + return " ".join(str(req.req) for req in requirements) + + def __repr__(self) -> str: + requirements = sorted( + self.requirements.values(), + key=lambda req: canonicalize_name(req.name or ""), + ) + + format_string = "<{classname} object; {count} requirement(s): {reqs}>" + return format_string.format( + classname=self.__class__.__name__, + count=len(requirements), + reqs=", ".join(str(req.req) for req in requirements), + ) + + def add_unnamed_requirement(self, install_req: InstallRequirement) -> None: + assert not install_req.name + self.unnamed_requirements.append(install_req) + + def add_named_requirement(self, install_req: InstallRequirement) -> None: + assert install_req.name + + project_name = canonicalize_name(install_req.name) + self.requirements[project_name] = install_req + + def has_requirement(self, name: str) -> bool: + project_name = canonicalize_name(name) + + return ( + project_name in self.requirements + and not self.requirements[project_name].constraint + ) + + def get_requirement(self, name: str) -> InstallRequirement: + project_name = canonicalize_name(name) + + if project_name in self.requirements: + return self.requirements[project_name] + + raise KeyError(f"No project with the name {name!r}") + + @property + def all_requirements(self) -> List[InstallRequirement]: + return self.unnamed_requirements + list(self.requirements.values()) + + @property + def requirements_to_install(self) -> List[InstallRequirement]: + """Return the list of requirements that need to be installed. + + TODO remove this property together with the legacy resolver, since the new + resolver only returns requirements that need to be installed. + """ + return [ + install_req + for install_req in self.all_requirements + if not install_req.constraint and not install_req.satisfied_by + ] + + def warn_legacy_versions_and_specifiers(self) -> None: + for req in self.requirements_to_install: + version = req.get_dist().version + if isinstance(version, LegacyVersion): + deprecated( + reason=( + f"pip has selected the non standard version {version} " + f"of {req}. In the future this version will be " + f"ignored as it isn't standard compliant." + ), + replacement=( + "set or update constraints to select another version " + "or contact the package author to fix the version number" + ), + issue=12063, + gone_in="24.1", + ) + for dep in req.get_dist().iter_dependencies(): + if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): + deprecated( + reason=( + f"pip has selected {req} {version} which has non " + f"standard dependency specifier {dep}. " + f"In the future this version of {req} will be " + f"ignored as it isn't standard compliant." + ), + replacement=( + "set or update constraints to select another version " + "or contact the package author to fix the version number" + ), + issue=12063, + gone_in="24.1", + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/req/req_uninstall.py b/myenv/lib/python3.12/site-packages/pip/_internal/req/req_uninstall.py new file mode 100644 index 0000000..707fde1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/req/req_uninstall.py @@ -0,0 +1,649 @@ +import functools +import os +import sys +import sysconfig +from importlib.util import cache_from_source +from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Set, Tuple + +from pip._internal.exceptions import UninstallationError +from pip._internal.locations import get_bin_prefix, get_bin_user +from pip._internal.metadata import BaseDistribution +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.egg_link import egg_link_path_from_location +from pip._internal.utils.logging import getLogger, indent_log +from pip._internal.utils.misc import ask, normalize_path, renames, rmtree +from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory +from pip._internal.utils.virtualenv import running_under_virtualenv + +logger = getLogger(__name__) + + +def _script_names( + bin_dir: str, script_name: str, is_gui: bool +) -> Generator[str, None, None]: + """Create the fully qualified name of the files created by + {console,gui}_scripts for the given ``dist``. + Returns the list of file names + """ + exe_name = os.path.join(bin_dir, script_name) + yield exe_name + if not WINDOWS: + return + yield f"{exe_name}.exe" + yield f"{exe_name}.exe.manifest" + if is_gui: + yield f"{exe_name}-script.pyw" + else: + yield f"{exe_name}-script.py" + + +def _unique( + fn: Callable[..., Generator[Any, None, None]] +) -> Callable[..., Generator[Any, None, None]]: + @functools.wraps(fn) + def unique(*args: Any, **kw: Any) -> Generator[Any, None, None]: + seen: Set[Any] = set() + for item in fn(*args, **kw): + if item not in seen: + seen.add(item) + yield item + + return unique + + +@_unique +def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]: + """ + Yield all the uninstallation paths for dist based on RECORD-without-.py[co] + + Yield paths to all the files in RECORD. For each .py file in RECORD, add + the .pyc and .pyo in the same directory. + + UninstallPathSet.add() takes care of the __pycache__ .py[co]. + + If RECORD is not found, raises UninstallationError, + with possible information from the INSTALLER file. + + https://packaging.python.org/specifications/recording-installed-packages/ + """ + location = dist.location + assert location is not None, "not installed" + + entries = dist.iter_declared_entries() + if entries is None: + msg = f"Cannot uninstall {dist}, RECORD file not found." + installer = dist.installer + if not installer or installer == "pip": + dep = f"{dist.raw_name}=={dist.version}" + msg += ( + " You might be able to recover from this via: " + f"'pip install --force-reinstall --no-deps {dep}'." + ) + else: + msg += f" Hint: The package was installed by {installer}." + raise UninstallationError(msg) + + for entry in entries: + path = os.path.join(location, entry) + yield path + if path.endswith(".py"): + dn, fn = os.path.split(path) + base = fn[:-3] + path = os.path.join(dn, base + ".pyc") + yield path + path = os.path.join(dn, base + ".pyo") + yield path + + +def compact(paths: Iterable[str]) -> Set[str]: + """Compact a path set to contain the minimal number of paths + necessary to contain all paths in the set. If /a/path/ and + /a/path/to/a/file.txt are both in the set, leave only the + shorter path.""" + + sep = os.path.sep + short_paths: Set[str] = set() + for path in sorted(paths, key=len): + should_skip = any( + path.startswith(shortpath.rstrip("*")) + and path[len(shortpath.rstrip("*").rstrip(sep))] == sep + for shortpath in short_paths + ) + if not should_skip: + short_paths.add(path) + return short_paths + + +def compress_for_rename(paths: Iterable[str]) -> Set[str]: + """Returns a set containing the paths that need to be renamed. + + This set may include directories when the original sequence of paths + included every file on disk. + """ + case_map = {os.path.normcase(p): p for p in paths} + remaining = set(case_map) + unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len) + wildcards: Set[str] = set() + + def norm_join(*a: str) -> str: + return os.path.normcase(os.path.join(*a)) + + for root in unchecked: + if any(os.path.normcase(root).startswith(w) for w in wildcards): + # This directory has already been handled. + continue + + all_files: Set[str] = set() + all_subdirs: Set[str] = set() + for dirname, subdirs, files in os.walk(root): + all_subdirs.update(norm_join(root, dirname, d) for d in subdirs) + all_files.update(norm_join(root, dirname, f) for f in files) + # If all the files we found are in our remaining set of files to + # remove, then remove them from the latter set and add a wildcard + # for the directory. + if not (all_files - remaining): + remaining.difference_update(all_files) + wildcards.add(root + os.sep) + + return set(map(case_map.__getitem__, remaining)) | wildcards + + +def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]: + """Returns a tuple of 2 sets of which paths to display to user + + The first set contains paths that would be deleted. Files of a package + are not added and the top-level directory of the package has a '*' added + at the end - to signify that all it's contents are removed. + + The second set contains files that would have been skipped in the above + folders. + """ + + will_remove = set(paths) + will_skip = set() + + # Determine folders and files + folders = set() + files = set() + for path in will_remove: + if path.endswith(".pyc"): + continue + if path.endswith("__init__.py") or ".dist-info" in path: + folders.add(os.path.dirname(path)) + files.add(path) + + _normcased_files = set(map(os.path.normcase, files)) + + folders = compact(folders) + + # This walks the tree using os.walk to not miss extra folders + # that might get added. + for folder in folders: + for dirpath, _, dirfiles in os.walk(folder): + for fname in dirfiles: + if fname.endswith(".pyc"): + continue + + file_ = os.path.join(dirpath, fname) + if ( + os.path.isfile(file_) + and os.path.normcase(file_) not in _normcased_files + ): + # We are skipping this file. Add it to the set. + will_skip.add(file_) + + will_remove = files | {os.path.join(folder, "*") for folder in folders} + + return will_remove, will_skip + + +class StashedUninstallPathSet: + """A set of file rename operations to stash files while + tentatively uninstalling them.""" + + def __init__(self) -> None: + # Mapping from source file root to [Adjacent]TempDirectory + # for files under that directory. + self._save_dirs: Dict[str, TempDirectory] = {} + # (old path, new path) tuples for each move that may need + # to be undone. + self._moves: List[Tuple[str, str]] = [] + + def _get_directory_stash(self, path: str) -> str: + """Stashes a directory. + + Directories are stashed adjacent to their original location if + possible, or else moved/copied into the user's temp dir.""" + + try: + save_dir: TempDirectory = AdjacentTempDirectory(path) + except OSError: + save_dir = TempDirectory(kind="uninstall") + self._save_dirs[os.path.normcase(path)] = save_dir + + return save_dir.path + + def _get_file_stash(self, path: str) -> str: + """Stashes a file. + + If no root has been provided, one will be created for the directory + in the user's temp directory.""" + path = os.path.normcase(path) + head, old_head = os.path.dirname(path), None + save_dir = None + + while head != old_head: + try: + save_dir = self._save_dirs[head] + break + except KeyError: + pass + head, old_head = os.path.dirname(head), head + else: + # Did not find any suitable root + head = os.path.dirname(path) + save_dir = TempDirectory(kind="uninstall") + self._save_dirs[head] = save_dir + + relpath = os.path.relpath(path, head) + if relpath and relpath != os.path.curdir: + return os.path.join(save_dir.path, relpath) + return save_dir.path + + def stash(self, path: str) -> str: + """Stashes the directory or file and returns its new location. + Handle symlinks as files to avoid modifying the symlink targets. + """ + path_is_dir = os.path.isdir(path) and not os.path.islink(path) + if path_is_dir: + new_path = self._get_directory_stash(path) + else: + new_path = self._get_file_stash(path) + + self._moves.append((path, new_path)) + if path_is_dir and os.path.isdir(new_path): + # If we're moving a directory, we need to + # remove the destination first or else it will be + # moved to inside the existing directory. + # We just created new_path ourselves, so it will + # be removable. + os.rmdir(new_path) + renames(path, new_path) + return new_path + + def commit(self) -> None: + """Commits the uninstall by removing stashed files.""" + for save_dir in self._save_dirs.values(): + save_dir.cleanup() + self._moves = [] + self._save_dirs = {} + + def rollback(self) -> None: + """Undoes the uninstall by moving stashed files back.""" + for p in self._moves: + logger.info("Moving to %s\n from %s", *p) + + for new_path, path in self._moves: + try: + logger.debug("Replacing %s from %s", new_path, path) + if os.path.isfile(new_path) or os.path.islink(new_path): + os.unlink(new_path) + elif os.path.isdir(new_path): + rmtree(new_path) + renames(path, new_path) + except OSError as ex: + logger.error("Failed to restore %s", new_path) + logger.debug("Exception: %s", ex) + + self.commit() + + @property + def can_rollback(self) -> bool: + return bool(self._moves) + + +class UninstallPathSet: + """A set of file paths to be removed in the uninstallation of a + requirement.""" + + def __init__(self, dist: BaseDistribution) -> None: + self._paths: Set[str] = set() + self._refuse: Set[str] = set() + self._pth: Dict[str, UninstallPthEntries] = {} + self._dist = dist + self._moved_paths = StashedUninstallPathSet() + # Create local cache of normalize_path results. Creating an UninstallPathSet + # can result in hundreds/thousands of redundant calls to normalize_path with + # the same args, which hurts performance. + self._normalize_path_cached = functools.lru_cache()(normalize_path) + + def _permitted(self, path: str) -> bool: + """ + Return True if the given path is one we are permitted to + remove/modify, False otherwise. + + """ + # aka is_local, but caching normalized sys.prefix + if not running_under_virtualenv(): + return True + return path.startswith(self._normalize_path_cached(sys.prefix)) + + def add(self, path: str) -> None: + head, tail = os.path.split(path) + + # we normalize the head to resolve parent directory symlinks, but not + # the tail, since we only want to uninstall symlinks, not their targets + path = os.path.join(self._normalize_path_cached(head), os.path.normcase(tail)) + + if not os.path.exists(path): + return + if self._permitted(path): + self._paths.add(path) + else: + self._refuse.add(path) + + # __pycache__ files can show up after 'installed-files.txt' is created, + # due to imports + if os.path.splitext(path)[1] == ".py": + self.add(cache_from_source(path)) + + def add_pth(self, pth_file: str, entry: str) -> None: + pth_file = self._normalize_path_cached(pth_file) + if self._permitted(pth_file): + if pth_file not in self._pth: + self._pth[pth_file] = UninstallPthEntries(pth_file) + self._pth[pth_file].add(entry) + else: + self._refuse.add(pth_file) + + def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None: + """Remove paths in ``self._paths`` with confirmation (unless + ``auto_confirm`` is True).""" + + if not self._paths: + logger.info( + "Can't uninstall '%s'. No files were found to uninstall.", + self._dist.raw_name, + ) + return + + dist_name_version = f"{self._dist.raw_name}-{self._dist.version}" + logger.info("Uninstalling %s:", dist_name_version) + + with indent_log(): + if auto_confirm or self._allowed_to_proceed(verbose): + moved = self._moved_paths + + for_rename = compress_for_rename(self._paths) + + for path in sorted(compact(for_rename)): + moved.stash(path) + logger.verbose("Removing file or directory %s", path) + + for pth in self._pth.values(): + pth.remove() + + logger.info("Successfully uninstalled %s", dist_name_version) + + def _allowed_to_proceed(self, verbose: bool) -> bool: + """Display which files would be deleted and prompt for confirmation""" + + def _display(msg: str, paths: Iterable[str]) -> None: + if not paths: + return + + logger.info(msg) + with indent_log(): + for path in sorted(compact(paths)): + logger.info(path) + + if not verbose: + will_remove, will_skip = compress_for_output_listing(self._paths) + else: + # In verbose mode, display all the files that are going to be + # deleted. + will_remove = set(self._paths) + will_skip = set() + + _display("Would remove:", will_remove) + _display("Would not remove (might be manually added):", will_skip) + _display("Would not remove (outside of prefix):", self._refuse) + if verbose: + _display("Will actually move:", compress_for_rename(self._paths)) + + return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n" + + def rollback(self) -> None: + """Rollback the changes previously made by remove().""" + if not self._moved_paths.can_rollback: + logger.error( + "Can't roll back %s; was not uninstalled", + self._dist.raw_name, + ) + return + logger.info("Rolling back uninstall of %s", self._dist.raw_name) + self._moved_paths.rollback() + for pth in self._pth.values(): + pth.rollback() + + def commit(self) -> None: + """Remove temporary save dir: rollback will no longer be possible.""" + self._moved_paths.commit() + + @classmethod + def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": + dist_location = dist.location + info_location = dist.info_location + if dist_location is None: + logger.info( + "Not uninstalling %s since it is not installed", + dist.canonical_name, + ) + return cls(dist) + + normalized_dist_location = normalize_path(dist_location) + if not dist.local: + logger.info( + "Not uninstalling %s at %s, outside environment %s", + dist.canonical_name, + normalized_dist_location, + sys.prefix, + ) + return cls(dist) + + if normalized_dist_location in { + p + for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")} + if p + }: + logger.info( + "Not uninstalling %s at %s, as it is in the standard library.", + dist.canonical_name, + normalized_dist_location, + ) + return cls(dist) + + paths_to_remove = cls(dist) + develop_egg_link = egg_link_path_from_location(dist.raw_name) + + # Distribution is installed with metadata in a "flat" .egg-info + # directory. This means it is not a modern .dist-info installation, an + # egg, or legacy editable. + setuptools_flat_installation = ( + dist.installed_with_setuptools_egg_info + and info_location is not None + and os.path.exists(info_location) + # If dist is editable and the location points to a ``.egg-info``, + # we are in fact in the legacy editable case. + and not info_location.endswith(f"{dist.setuptools_filename}.egg-info") + ) + + # Uninstall cases order do matter as in the case of 2 installs of the + # same package, pip needs to uninstall the currently detected version + if setuptools_flat_installation: + if info_location is not None: + paths_to_remove.add(info_location) + installed_files = dist.iter_declared_entries() + if installed_files is not None: + for installed_file in installed_files: + paths_to_remove.add(os.path.join(dist_location, installed_file)) + # FIXME: need a test for this elif block + # occurs with --single-version-externally-managed/--record outside + # of pip + elif dist.is_file("top_level.txt"): + try: + namespace_packages = dist.read_text("namespace_packages.txt") + except FileNotFoundError: + namespaces = [] + else: + namespaces = namespace_packages.splitlines(keepends=False) + for top_level_pkg in [ + p + for p in dist.read_text("top_level.txt").splitlines() + if p and p not in namespaces + ]: + path = os.path.join(dist_location, top_level_pkg) + paths_to_remove.add(path) + paths_to_remove.add(f"{path}.py") + paths_to_remove.add(f"{path}.pyc") + paths_to_remove.add(f"{path}.pyo") + + elif dist.installed_by_distutils: + raise UninstallationError( + "Cannot uninstall {!r}. It is a distutils installed project " + "and thus we cannot accurately determine which files belong " + "to it which would lead to only a partial uninstall.".format( + dist.raw_name, + ) + ) + + elif dist.installed_as_egg: + # package installed by easy_install + # We cannot match on dist.egg_name because it can slightly vary + # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg + paths_to_remove.add(dist_location) + easy_install_egg = os.path.split(dist_location)[1] + easy_install_pth = os.path.join( + os.path.dirname(dist_location), + "easy-install.pth", + ) + paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg) + + elif dist.installed_with_dist_info: + for path in uninstallation_paths(dist): + paths_to_remove.add(path) + + elif develop_egg_link: + # PEP 660 modern editable is handled in the ``.dist-info`` case + # above, so this only covers the setuptools-style editable. + with open(develop_egg_link) as fh: + link_pointer = os.path.normcase(fh.readline().strip()) + normalized_link_pointer = paths_to_remove._normalize_path_cached( + link_pointer + ) + assert os.path.samefile( + normalized_link_pointer, normalized_dist_location + ), ( + f"Egg-link {develop_egg_link} (to {link_pointer}) does not match " + f"installed location of {dist.raw_name} (at {dist_location})" + ) + paths_to_remove.add(develop_egg_link) + easy_install_pth = os.path.join( + os.path.dirname(develop_egg_link), "easy-install.pth" + ) + paths_to_remove.add_pth(easy_install_pth, dist_location) + + else: + logger.debug( + "Not sure how to uninstall: %s - Check: %s", + dist, + dist_location, + ) + + if dist.in_usersite: + bin_dir = get_bin_user() + else: + bin_dir = get_bin_prefix() + + # find distutils scripts= scripts + try: + for script in dist.iter_distutils_script_names(): + paths_to_remove.add(os.path.join(bin_dir, script)) + if WINDOWS: + paths_to_remove.add(os.path.join(bin_dir, f"{script}.bat")) + except (FileNotFoundError, NotADirectoryError): + pass + + # find console_scripts and gui_scripts + def iter_scripts_to_remove( + dist: BaseDistribution, + bin_dir: str, + ) -> Generator[str, None, None]: + for entry_point in dist.iter_entry_points(): + if entry_point.group == "console_scripts": + yield from _script_names(bin_dir, entry_point.name, False) + elif entry_point.group == "gui_scripts": + yield from _script_names(bin_dir, entry_point.name, True) + + for s in iter_scripts_to_remove(dist, bin_dir): + paths_to_remove.add(s) + + return paths_to_remove + + +class UninstallPthEntries: + def __init__(self, pth_file: str) -> None: + self.file = pth_file + self.entries: Set[str] = set() + self._saved_lines: Optional[List[bytes]] = None + + def add(self, entry: str) -> None: + entry = os.path.normcase(entry) + # On Windows, os.path.normcase converts the entry to use + # backslashes. This is correct for entries that describe absolute + # paths outside of site-packages, but all the others use forward + # slashes. + # os.path.splitdrive is used instead of os.path.isabs because isabs + # treats non-absolute paths with drive letter markings like c:foo\bar + # as absolute paths. It also does not recognize UNC paths if they don't + # have more than "\\sever\share". Valid examples: "\\server\share\" or + # "\\server\share\folder". + if WINDOWS and not os.path.splitdrive(entry)[0]: + entry = entry.replace("\\", "/") + self.entries.add(entry) + + def remove(self) -> None: + logger.verbose("Removing pth entries from %s:", self.file) + + # If the file doesn't exist, log a warning and return + if not os.path.isfile(self.file): + logger.warning("Cannot remove entries from nonexistent file %s", self.file) + return + with open(self.file, "rb") as fh: + # windows uses '\r\n' with py3k, but uses '\n' with py2.x + lines = fh.readlines() + self._saved_lines = lines + if any(b"\r\n" in line for line in lines): + endline = "\r\n" + else: + endline = "\n" + # handle missing trailing newline + if lines and not lines[-1].endswith(endline.encode("utf-8")): + lines[-1] = lines[-1] + endline.encode("utf-8") + for entry in self.entries: + try: + logger.verbose("Removing entry: %s", entry) + lines.remove((entry + endline).encode("utf-8")) + except ValueError: + pass + with open(self.file, "wb") as fh: + fh.writelines(lines) + + def rollback(self) -> bool: + if self._saved_lines is None: + logger.error("Cannot roll back changes to %s, none were made", self.file) + return False + logger.debug("Rolling %s back to previous state", self.file) + with open(self.file, "wb") as fh: + fh.writelines(self._saved_lines) + return True diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..cdc71f0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..03b8bec Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/base.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/base.py new file mode 100644 index 0000000..42dade1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/base.py @@ -0,0 +1,20 @@ +from typing import Callable, List, Optional + +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_set import RequirementSet + +InstallRequirementProvider = Callable[ + [str, Optional[InstallRequirement]], InstallRequirement +] + + +class BaseResolver: + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + raise NotImplementedError() + + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a82384c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc new file mode 100644 index 0000000..15d8b21 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/resolver.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/resolver.py new file mode 100644 index 0000000..5ddb848 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/resolver.py @@ -0,0 +1,598 @@ +"""Dependency Resolution + +The dependency resolution in pip is performed as follows: + +for top-level requirements: + a. only one spec allowed per project, regardless of conflicts or not. + otherwise a "double requirement" exception is raised + b. they override sub-dependency requirements. +for sub-dependencies + a. "first found, wins" (where the order is breadth first) +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import sys +from collections import defaultdict +from itertools import chain +from typing import DefaultDict, Iterable, List, Optional, Set, Tuple + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.requirements import Requirement + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + HashError, + HashErrors, + InstallationError, + NoneMetadataError, + UnsupportedPythonVersion, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.req_install import ( + InstallRequirement, + check_invalid_constraint_type, +) +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider +from pip._internal.utils import compatibility_tags +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.direct_url_helpers import direct_url_from_link +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import normalize_version_info +from pip._internal.utils.packaging import check_requires_python + +logger = logging.getLogger(__name__) + +DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]] + + +def _check_dist_requires_python( + dist: BaseDistribution, + version_info: Tuple[int, int, int], + ignore_requires_python: bool = False, +) -> None: + """ + Check whether the given Python version is compatible with a distribution's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + + :raises UnsupportedPythonVersion: When the given Python version isn't + compatible. + """ + # This idiosyncratically converts the SpecifierSet to str and let + # check_requires_python then parse it again into SpecifierSet. But this + # is the legacy resolver so I'm just not going to bother refactoring. + try: + requires_python = str(dist.requires_python) + except FileNotFoundError as e: + raise NoneMetadataError(dist, str(e)) + try: + is_compatible = check_requires_python( + requires_python, + version_info=version_info, + ) + except specifiers.InvalidSpecifier as exc: + logger.warning( + "Package %r has an invalid Requires-Python: %s", dist.raw_name, exc + ) + return + + if is_compatible: + return + + version = ".".join(map(str, version_info)) + if ignore_requires_python: + logger.debug( + "Ignoring failed Requires-Python check for package %r: %s not in %r", + dist.raw_name, + version, + requires_python, + ) + return + + raise UnsupportedPythonVersion( + "Package {!r} requires a different Python: {} not in {!r}".format( + dist.raw_name, version, requires_python + ) + ) + + +class Resolver(BaseResolver): + """Resolves which packages need to be installed/uninstalled to perform \ + the requested operation without breaking the requirements of any package. + """ + + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer: RequirementPreparer, + finder: PackageFinder, + wheel_cache: Optional[WheelCache], + make_install_req: InstallRequirementProvider, + use_user_site: bool, + ignore_dependencies: bool, + ignore_installed: bool, + ignore_requires_python: bool, + force_reinstall: bool, + upgrade_strategy: str, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> None: + super().__init__() + assert upgrade_strategy in self._allowed_strategies + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + self._py_version_info = py_version_info + + self.preparer = preparer + self.finder = finder + self.wheel_cache = wheel_cache + + self.upgrade_strategy = upgrade_strategy + self.force_reinstall = force_reinstall + self.ignore_dependencies = ignore_dependencies + self.ignore_installed = ignore_installed + self.ignore_requires_python = ignore_requires_python + self.use_user_site = use_user_site + self._make_install_req = make_install_req + + self._discovered_dependencies: DiscoveredDependencies = defaultdict(list) + + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + """Resolve what operations need to be done + + As a side-effect of this method, the packages (and their dependencies) + are downloaded, unpacked and prepared for installation. This + preparation is done by ``pip.operations.prepare``. + + Once PyPI has static dependency metadata available, it would be + possible to move the preparation to become a step separated from + dependency resolution. + """ + requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels) + for req in root_reqs: + if req.constraint: + check_invalid_constraint_type(req) + self._add_requirement_to_set(requirement_set, req) + + # Actually prepare the files, and collect any exceptions. Most hash + # exceptions cannot be checked ahead of time, because + # _populate_link() needs to be called before we can make decisions + # based on link type. + discovered_reqs: List[InstallRequirement] = [] + hash_errors = HashErrors() + for req in chain(requirement_set.all_requirements, discovered_reqs): + try: + discovered_reqs.extend(self._resolve_one(requirement_set, req)) + except HashError as exc: + exc.req = req + hash_errors.append(exc) + + if hash_errors: + raise hash_errors + + return requirement_set + + def _add_requirement_to_set( + self, + requirement_set: RequirementSet, + install_req: InstallRequirement, + parent_req_name: Optional[str] = None, + extras_requested: Optional[Iterable[str]] = None, + ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]: + """Add install_req as a requirement to install. + + :param parent_req_name: The name of the requirement that needed this + added. The name is used because when multiple unnamed requirements + resolve to the same name, we could otherwise end up with dependency + links that point outside the Requirements set. parent_req must + already be added. Note that None implies that this is a user + supplied requirement, vs an inferred one. + :param extras_requested: an iterable of extras used to evaluate the + environment markers. + :return: Additional requirements to scan. That is either [] if + the requirement is not applicable, or [install_req] if the + requirement is applicable and has just been added. + """ + # If the markers do not match, ignore this requirement. + if not install_req.match_markers(extras_requested): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + install_req.name, + install_req.markers, + ) + return [], None + + # If the wheel is not supported, raise an error. + # Should check this after filtering out based on environment markers to + # allow specifying different wheels based on the environment/OS, in a + # single requirements file. + if install_req.link and install_req.link.is_wheel: + wheel = Wheel(install_req.link.filename) + tags = compatibility_tags.get_supported() + if requirement_set.check_supported_wheels and not wheel.supported(tags): + raise InstallationError( + f"{wheel.filename} is not a supported wheel on this platform." + ) + + # This next bit is really a sanity check. + assert ( + not install_req.user_supplied or parent_req_name is None + ), "a user supplied req shouldn't have a parent" + + # Unnamed requirements are scanned again and the requirement won't be + # added as a dependency until after scanning. + if not install_req.name: + requirement_set.add_unnamed_requirement(install_req) + return [install_req], None + + try: + existing_req: Optional[ + InstallRequirement + ] = requirement_set.get_requirement(install_req.name) + except KeyError: + existing_req = None + + has_conflicting_requirement = ( + parent_req_name is None + and existing_req + and not existing_req.constraint + and existing_req.extras == install_req.extras + and existing_req.req + and install_req.req + and existing_req.req.specifier != install_req.req.specifier + ) + if has_conflicting_requirement: + raise InstallationError( + "Double requirement given: {} (already in {}, name={!r})".format( + install_req, existing_req, install_req.name + ) + ) + + # When no existing requirement exists, add the requirement as a + # dependency and it will be scanned again after. + if not existing_req: + requirement_set.add_named_requirement(install_req) + # We'd want to rescan this requirement later + return [install_req], install_req + + # Assume there's no need to scan, and that we've already + # encountered this for scanning. + if install_req.constraint or not existing_req.constraint: + return [], existing_req + + does_not_satisfy_constraint = install_req.link and not ( + existing_req.link and install_req.link.path == existing_req.link.path + ) + if does_not_satisfy_constraint: + raise InstallationError( + f"Could not satisfy constraints for '{install_req.name}': " + "installation from path or url cannot be " + "constrained to a version" + ) + # If we're now installing a constraint, mark the existing + # object for real installation. + existing_req.constraint = False + # If we're now installing a user supplied requirement, + # mark the existing object as such. + if install_req.user_supplied: + existing_req.user_supplied = True + existing_req.extras = tuple( + sorted(set(existing_req.extras) | set(install_req.extras)) + ) + logger.debug( + "Setting %s extras to: %s", + existing_req, + existing_req.extras, + ) + # Return the existing requirement for addition to the parent and + # scanning again. + return [existing_req], existing_req + + def _is_upgrade_allowed(self, req: InstallRequirement) -> bool: + if self.upgrade_strategy == "to-satisfy-only": + return False + elif self.upgrade_strategy == "eager": + return True + else: + assert self.upgrade_strategy == "only-if-needed" + return req.user_supplied or req.constraint + + def _set_req_to_reinstall(self, req: InstallRequirement) -> None: + """ + Set a requirement to be installed. + """ + # Don't uninstall the conflict if doing a user install and the + # conflict is not a user install. + if not self.use_user_site or req.satisfied_by.in_usersite: + req.should_reinstall = True + req.satisfied_by = None + + def _check_skip_installed( + self, req_to_install: InstallRequirement + ) -> Optional[str]: + """Check if req_to_install should be skipped. + + This will check if the req is installed, and whether we should upgrade + or reinstall it, taking into account all the relevant user options. + + After calling this req_to_install will only have satisfied_by set to + None if the req_to_install is to be upgraded/reinstalled etc. Any + other value will be a dist recording the current thing installed that + satisfies the requirement. + + Note that for vcs urls and the like we can't assess skipping in this + routine - we simply identify that we need to pull the thing down, + then later on it is pulled down and introspected to assess upgrade/ + reinstalls etc. + + :return: A text reason for why it was skipped, or None. + """ + if self.ignore_installed: + return None + + req_to_install.check_if_exists(self.use_user_site) + if not req_to_install.satisfied_by: + return None + + if self.force_reinstall: + self._set_req_to_reinstall(req_to_install) + return None + + if not self._is_upgrade_allowed(req_to_install): + if self.upgrade_strategy == "only-if-needed": + return "already satisfied, skipping upgrade" + return "already satisfied" + + # Check for the possibility of an upgrade. For link-based + # requirements we have to pull the tree down and inspect to assess + # the version #, so it's handled way down. + if not req_to_install.link: + try: + self.finder.find_requirement(req_to_install, upgrade=True) + except BestVersionAlreadyInstalled: + # Then the best version is installed. + return "already up-to-date" + except DistributionNotFound: + # No distribution found, so we squash the error. It will + # be raised later when we re-try later to do the install. + # Why don't we just raise here? + pass + + self._set_req_to_reinstall(req_to_install) + return None + + def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]: + upgrade = self._is_upgrade_allowed(req) + best_candidate = self.finder.find_requirement(req, upgrade) + if not best_candidate: + return None + + # Log a warning per PEP 592 if necessary before returning. + link = best_candidate.link + if link.is_yanked: + reason = link.yanked_reason or "" + msg = ( + # Mark this as a unicode string to prevent + # "UnicodeEncodeError: 'ascii' codec can't encode character" + # in Python 2 when the reason contains non-ascii characters. + "The candidate selected for download or install is a " + f"yanked version: {best_candidate}\n" + f"Reason for being yanked: {reason}" + ) + logger.warning(msg) + + return link + + def _populate_link(self, req: InstallRequirement) -> None: + """Ensure that if a link can be found for this, that it is found. + + Note that req.link may still be None - if the requirement is already + installed and not needed to be upgraded based on the return value of + _is_upgrade_allowed(). + + If preparer.require_hashes is True, don't use the wheel cache, because + cached wheels, always built locally, have different hashes than the + files downloaded from the index server and thus throw false hash + mismatches. Furthermore, cached wheels at present have undeterministic + contents due to file modification times. + """ + if req.link is None: + req.link = self._find_requirement_link(req) + + if self.wheel_cache is None or self.preparer.require_hashes: + return + cache_entry = self.wheel_cache.get_cache_entry( + link=req.link, + package_name=req.name, + supported_tags=get_supported(), + ) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + if req.link is req.original_link and cache_entry.persistent: + req.cached_wheel_source_link = req.link + if cache_entry.origin is not None: + req.download_info = cache_entry.origin + else: + # Legacy cache entry that does not have origin.json. + # download_info may miss the archive_info.hashes field. + req.download_info = direct_url_from_link( + req.link, link_is_in_wheel_cache=cache_entry.persistent + ) + req.link = cache_entry.link + + def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution: + """Takes a InstallRequirement and returns a single AbstractDist \ + representing a prepared variant of the same. + """ + if req.editable: + return self.preparer.prepare_editable_requirement(req) + + # satisfied_by is only evaluated by calling _check_skip_installed, + # so it must be None here. + assert req.satisfied_by is None + skip_reason = self._check_skip_installed(req) + + if req.satisfied_by: + return self.preparer.prepare_installed_requirement(req, skip_reason) + + # We eagerly populate the link, since that's our "legacy" behavior. + self._populate_link(req) + dist = self.preparer.prepare_linked_requirement(req) + + # NOTE + # The following portion is for determining if a certain package is + # going to be re-installed/upgraded or not and reporting to the user. + # This should probably get cleaned up in a future refactor. + + # req.req is only avail after unpack for URL + # pkgs repeat check_if_exists to uninstall-on-upgrade + # (#14) + if not self.ignore_installed: + req.check_if_exists(self.use_user_site) + + if req.satisfied_by: + should_modify = ( + self.upgrade_strategy != "to-satisfy-only" + or self.force_reinstall + or self.ignore_installed + or req.link.scheme == "file" + ) + if should_modify: + self._set_req_to_reinstall(req) + else: + logger.info( + "Requirement already satisfied (use --upgrade to upgrade): %s", + req, + ) + return dist + + def _resolve_one( + self, + requirement_set: RequirementSet, + req_to_install: InstallRequirement, + ) -> List[InstallRequirement]: + """Prepare a single requirements file. + + :return: A list of additional InstallRequirements to also install. + """ + # Tell user what we are doing for this requirement: + # obtain (editable), skipping, processing (local url), collecting + # (remote url or package name) + if req_to_install.constraint or req_to_install.prepared: + return [] + + req_to_install.prepared = True + + # Parse and return dependencies + dist = self._get_dist_for(req_to_install) + # This will raise UnsupportedPythonVersion if the given Python + # version isn't compatible with the distribution's Requires-Python. + _check_dist_requires_python( + dist, + version_info=self._py_version_info, + ignore_requires_python=self.ignore_requires_python, + ) + + more_reqs: List[InstallRequirement] = [] + + def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None: + # This idiosyncratically converts the Requirement to str and let + # make_install_req then parse it again into Requirement. But this is + # the legacy resolver so I'm just not going to bother refactoring. + sub_install_req = self._make_install_req(str(subreq), req_to_install) + parent_req_name = req_to_install.name + to_scan_again, add_to_parent = self._add_requirement_to_set( + requirement_set, + sub_install_req, + parent_req_name=parent_req_name, + extras_requested=extras_requested, + ) + if parent_req_name and add_to_parent: + self._discovered_dependencies[parent_req_name].append(add_to_parent) + more_reqs.extend(to_scan_again) + + with indent_log(): + # We add req_to_install before its dependencies, so that we + # can refer to it when adding dependencies. + if not requirement_set.has_requirement(req_to_install.name): + # 'unnamed' requirements will get added here + # 'unnamed' requirements can only come from being directly + # provided by the user. + assert req_to_install.user_supplied + self._add_requirement_to_set( + requirement_set, req_to_install, parent_req_name=None + ) + + if not self.ignore_dependencies: + if req_to_install.extras: + logger.debug( + "Installing extra requirements: %r", + ",".join(req_to_install.extras), + ) + missing_requested = sorted( + set(req_to_install.extras) - set(dist.iter_provided_extras()) + ) + for missing in missing_requested: + logger.warning( + "%s %s does not provide the extra '%s'", + dist.raw_name, + dist.version, + missing, + ) + + available_requested = sorted( + set(dist.iter_provided_extras()) & set(req_to_install.extras) + ) + for subreq in dist.iter_dependencies(available_requested): + add_req(subreq, extras_requested=available_requested) + + return more_reqs + + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: + """Create the installation order. + + The installation order is topological - requirements are installed + before the requiring thing. We break cycles at an arbitrary point, + and make no other guarantees. + """ + # The current implementation, which we may change at any point + # installs the user specified things in the order given, except when + # dependencies must come earlier to achieve topological order. + order = [] + ordered_reqs: Set[InstallRequirement] = set() + + def schedule(req: InstallRequirement) -> None: + if req.satisfied_by or req in ordered_reqs: + return + if req.constraint: + return + ordered_reqs.add(req) + for dep in self._discovered_dependencies[req.name]: + schedule(dep) + order.append(req) + + for install_req in req_set.requirements.values(): + schedule(install_req) + return order diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..5dfcf5e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..3508467 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc new file mode 100644 index 0000000..9f7d9db Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc new file mode 100644 index 0000000..b943e50 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc new file mode 100644 index 0000000..59e6207 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc new file mode 100644 index 0000000..b94c26a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc new file mode 100644 index 0000000..27cf18d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc new file mode 100644 index 0000000..bdee780 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc new file mode 100644 index 0000000..5fb1862 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/base.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/base.py new file mode 100644 index 0000000..9c0ef5c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/base.py @@ -0,0 +1,141 @@ +from typing import FrozenSet, Iterable, Optional, Tuple, Union + +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName +from pip._vendor.packaging.version import LegacyVersion, Version + +from pip._internal.models.link import Link, links_equivalent +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.hashes import Hashes + +CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]] +CandidateVersion = Union[LegacyVersion, Version] + + +def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str: + if not extras: + return project + extras_expr = ",".join(sorted(extras)) + return f"{project}[{extras_expr}]" + + +class Constraint: + def __init__( + self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link] + ) -> None: + self.specifier = specifier + self.hashes = hashes + self.links = links + + @classmethod + def empty(cls) -> "Constraint": + return Constraint(SpecifierSet(), Hashes(), frozenset()) + + @classmethod + def from_ireq(cls, ireq: InstallRequirement) -> "Constraint": + links = frozenset([ireq.link]) if ireq.link else frozenset() + return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links) + + def __bool__(self) -> bool: + return bool(self.specifier) or bool(self.hashes) or bool(self.links) + + def __and__(self, other: InstallRequirement) -> "Constraint": + if not isinstance(other, InstallRequirement): + return NotImplemented + specifier = self.specifier & other.specifier + hashes = self.hashes & other.hashes(trust_internet=False) + links = self.links + if other.link: + links = links.union([other.link]) + return Constraint(specifier, hashes, links) + + def is_satisfied_by(self, candidate: "Candidate") -> bool: + # Reject if there are any mismatched URL constraints on this package. + if self.links and not all(_match_link(link, candidate) for link in self.links): + return False + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) + + +class Requirement: + @property + def project_name(self) -> NormalizedName: + """The "project name" of a requirement. + + This is different from ``name`` if this requirement contains extras, + in which case ``name`` would contain the ``[...]`` part, while this + refers to the name of the project. + """ + raise NotImplementedError("Subclass should override") + + @property + def name(self) -> str: + """The name identifying this requirement in the resolver. + + This is different from ``project_name`` if this requirement contains + extras, where ``project_name`` would not contain the ``[...]`` part. + """ + raise NotImplementedError("Subclass should override") + + def is_satisfied_by(self, candidate: "Candidate") -> bool: + return False + + def get_candidate_lookup(self) -> CandidateLookup: + raise NotImplementedError("Subclass should override") + + def format_for_error(self) -> str: + raise NotImplementedError("Subclass should override") + + +def _match_link(link: Link, candidate: "Candidate") -> bool: + if candidate.source_link: + return links_equivalent(link, candidate.source_link) + return False + + +class Candidate: + @property + def project_name(self) -> NormalizedName: + """The "project name" of the candidate. + + This is different from ``name`` if this candidate contains extras, + in which case ``name`` would contain the ``[...]`` part, while this + refers to the name of the project. + """ + raise NotImplementedError("Override in subclass") + + @property + def name(self) -> str: + """The name identifying this candidate in the resolver. + + This is different from ``project_name`` if this candidate contains + extras, where ``project_name`` would not contain the ``[...]`` part. + """ + raise NotImplementedError("Override in subclass") + + @property + def version(self) -> CandidateVersion: + raise NotImplementedError("Override in subclass") + + @property + def is_installed(self) -> bool: + raise NotImplementedError("Override in subclass") + + @property + def is_editable(self) -> bool: + raise NotImplementedError("Override in subclass") + + @property + def source_link(self) -> Optional[Link]: + raise NotImplementedError("Override in subclass") + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + raise NotImplementedError("Override in subclass") + + def get_install_requirement(self) -> Optional[InstallRequirement]: + raise NotImplementedError("Override in subclass") + + def format_for_error(self) -> str: + raise NotImplementedError("Subclass should override") diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/candidates.py new file mode 100644 index 0000000..4125cda --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/candidates.py @@ -0,0 +1,597 @@ +import logging +import sys +from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast + +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import Version + +from pip._internal.exceptions import ( + HashError, + InstallationSubprocessError, + MetadataInconsistent, +) +from pip._internal.metadata import BaseDistribution +from pip._internal.models.link import Link, links_equivalent +from pip._internal.models.wheel import Wheel +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.direct_url_helpers import direct_url_from_link +from pip._internal.utils.misc import normalize_version_info + +from .base import Candidate, CandidateVersion, Requirement, format_name + +if TYPE_CHECKING: + from .factory import Factory + +logger = logging.getLogger(__name__) + +BaseCandidate = Union[ + "AlreadyInstalledCandidate", + "EditableCandidate", + "LinkCandidate", +] + +# Avoid conflicting with the PyPI package "Python". +REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "") + + +def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]: + """The runtime version of BaseCandidate.""" + base_candidate_classes = ( + AlreadyInstalledCandidate, + EditableCandidate, + LinkCandidate, + ) + if isinstance(candidate, base_candidate_classes): + return candidate + return None + + +def make_install_req_from_link( + link: Link, template: InstallRequirement +) -> InstallRequirement: + assert not template.editable, "template is editable" + if template.req: + line = str(template.req) + else: + line = link.url + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + global_options=template.global_options, + hash_options=template.hash_options, + config_settings=template.config_settings, + ) + ireq.original_link = template.original_link + ireq.link = link + ireq.extras = template.extras + return ireq + + +def make_install_req_from_editable( + link: Link, template: InstallRequirement +) -> InstallRequirement: + assert template.editable, "template not editable" + ireq = install_req_from_editable( + link.url, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + permit_editable_wheels=template.permit_editable_wheels, + global_options=template.global_options, + hash_options=template.hash_options, + config_settings=template.config_settings, + ) + ireq.extras = template.extras + return ireq + + +def _make_install_req_from_dist( + dist: BaseDistribution, template: InstallRequirement +) -> InstallRequirement: + if template.req: + line = str(template.req) + elif template.link: + line = f"{dist.canonical_name} @ {template.link.url}" + else: + line = f"{dist.canonical_name}=={dist.version}" + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + global_options=template.global_options, + hash_options=template.hash_options, + config_settings=template.config_settings, + ) + ireq.satisfied_by = dist + return ireq + + +class _InstallRequirementBackedCandidate(Candidate): + """A candidate backed by an ``InstallRequirement``. + + This represents a package request with the target not being already + in the environment, and needs to be fetched and installed. The backing + ``InstallRequirement`` is responsible for most of the leg work; this + class exposes appropriate information to the resolver. + + :param link: The link passed to the ``InstallRequirement``. The backing + ``InstallRequirement`` will use this link to fetch the distribution. + :param source_link: The link this candidate "originates" from. This is + different from ``link`` when the link is found in the wheel cache. + ``link`` would point to the wheel cache, while this points to the + found remote link (e.g. from pypi.org). + """ + + dist: BaseDistribution + is_installed = False + + def __init__( + self, + link: Link, + source_link: Link, + ireq: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: + self._link = link + self._source_link = source_link + self._factory = factory + self._ireq = ireq + self._name = name + self._version = version + self.dist = self._prepare() + + def __str__(self) -> str: + return f"{self.name} {self.version}" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({str(self._link)!r})" + + def __hash__(self) -> int: + return hash((self.__class__, self._link)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return links_equivalent(self._link, other._link) + return False + + @property + def source_link(self) -> Optional[Link]: + return self._source_link + + @property + def project_name(self) -> NormalizedName: + """The normalised name of the project the candidate refers to""" + if self._name is None: + self._name = self.dist.canonical_name + return self._name + + @property + def name(self) -> str: + return self.project_name + + @property + def version(self) -> CandidateVersion: + if self._version is None: + self._version = self.dist.version + return self._version + + def format_for_error(self) -> str: + return "{} {} (from {})".format( + self.name, + self.version, + self._link.file_path if self._link.is_file else self._link, + ) + + def _prepare_distribution(self) -> BaseDistribution: + raise NotImplementedError("Override in subclass") + + def _check_metadata_consistency(self, dist: BaseDistribution) -> None: + """Check for consistency of project name and version of dist.""" + if self._name is not None and self._name != dist.canonical_name: + raise MetadataInconsistent( + self._ireq, + "name", + self._name, + dist.canonical_name, + ) + if self._version is not None and self._version != dist.version: + raise MetadataInconsistent( + self._ireq, + "version", + str(self._version), + str(dist.version), + ) + + def _prepare(self) -> BaseDistribution: + try: + dist = self._prepare_distribution() + except HashError as e: + # Provide HashError the underlying ireq that caused it. This + # provides context for the resulting error message to show the + # offending line to the user. + e.req = self._ireq + raise + except InstallationSubprocessError as exc: + # The output has been presented already, so don't duplicate it. + exc.context = "See above for output." + raise + + self._check_metadata_consistency(dist) + return dist + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + requires = self.dist.iter_dependencies() if with_requires else () + for r in requires: + yield from self._factory.make_requirements_from_spec(str(r), self._ireq) + yield self._factory.make_requires_python_requirement(self.dist.requires_python) + + def get_install_requirement(self) -> Optional[InstallRequirement]: + return self._ireq + + +class LinkCandidate(_InstallRequirementBackedCandidate): + is_editable = False + + def __init__( + self, + link: Link, + template: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: + source_link = link + cache_entry = factory.get_wheel_cache_entry(source_link, name) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + link = cache_entry.link + ireq = make_install_req_from_link(link, template) + assert ireq.link == link + if ireq.link.is_wheel and not ireq.link.is_file: + wheel = Wheel(ireq.link.filename) + wheel_name = canonicalize_name(wheel.name) + assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel" + # Version may not be present for PEP 508 direct URLs + if version is not None: + wheel_version = Version(wheel.version) + assert version == wheel_version, "{!r} != {!r} for wheel {}".format( + version, wheel_version, name + ) + + if cache_entry is not None: + assert ireq.link.is_wheel + assert ireq.link.is_file + if cache_entry.persistent and template.link is template.original_link: + ireq.cached_wheel_source_link = source_link + if cache_entry.origin is not None: + ireq.download_info = cache_entry.origin + else: + # Legacy cache entry that does not have origin.json. + # download_info may miss the archive_info.hashes field. + ireq.download_info = direct_url_from_link( + source_link, link_is_in_wheel_cache=cache_entry.persistent + ) + + super().__init__( + link=link, + source_link=source_link, + ireq=ireq, + factory=factory, + name=name, + version=version, + ) + + def _prepare_distribution(self) -> BaseDistribution: + preparer = self._factory.preparer + return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True) + + +class EditableCandidate(_InstallRequirementBackedCandidate): + is_editable = True + + def __init__( + self, + link: Link, + template: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: + super().__init__( + link=link, + source_link=link, + ireq=make_install_req_from_editable(link, template), + factory=factory, + name=name, + version=version, + ) + + def _prepare_distribution(self) -> BaseDistribution: + return self._factory.preparer.prepare_editable_requirement(self._ireq) + + +class AlreadyInstalledCandidate(Candidate): + is_installed = True + source_link = None + + def __init__( + self, + dist: BaseDistribution, + template: InstallRequirement, + factory: "Factory", + ) -> None: + self.dist = dist + self._ireq = _make_install_req_from_dist(dist, template) + self._factory = factory + self._version = None + + # This is just logging some messages, so we can do it eagerly. + # The returned dist would be exactly the same as self.dist because we + # set satisfied_by in _make_install_req_from_dist. + # TODO: Supply reason based on force_reinstall and upgrade_strategy. + skip_reason = "already satisfied" + factory.preparer.prepare_installed_requirement(self._ireq, skip_reason) + + def __str__(self) -> str: + return str(self.dist) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.dist!r})" + + def __hash__(self) -> int: + return hash((self.__class__, self.name, self.version)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return self.name == other.name and self.version == other.version + return False + + @property + def project_name(self) -> NormalizedName: + return self.dist.canonical_name + + @property + def name(self) -> str: + return self.project_name + + @property + def version(self) -> CandidateVersion: + if self._version is None: + self._version = self.dist.version + return self._version + + @property + def is_editable(self) -> bool: + return self.dist.editable + + def format_for_error(self) -> str: + return f"{self.name} {self.version} (Installed)" + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + if not with_requires: + return + for r in self.dist.iter_dependencies(): + yield from self._factory.make_requirements_from_spec(str(r), self._ireq) + + def get_install_requirement(self) -> Optional[InstallRequirement]: + return None + + +class ExtrasCandidate(Candidate): + """A candidate that has 'extras', indicating additional dependencies. + + Requirements can be for a project with dependencies, something like + foo[extra]. The extras don't affect the project/version being installed + directly, but indicate that we need additional dependencies. We model that + by having an artificial ExtrasCandidate that wraps the "base" candidate. + + The ExtrasCandidate differs from the base in the following ways: + + 1. It has a unique name, of the form foo[extra]. This causes the resolver + to treat it as a separate node in the dependency graph. + 2. When we're getting the candidate's dependencies, + a) We specify that we want the extra dependencies as well. + b) We add a dependency on the base candidate. + See below for why this is needed. + 3. We return None for the underlying InstallRequirement, as the base + candidate will provide it, and we don't want to end up with duplicates. + + The dependency on the base candidate is needed so that the resolver can't + decide that it should recommend foo[extra1] version 1.0 and foo[extra2] + version 2.0. Having those candidates depend on foo=1.0 and foo=2.0 + respectively forces the resolver to recognise that this is a conflict. + """ + + def __init__( + self, + base: BaseCandidate, + extras: FrozenSet[str], + *, + comes_from: Optional[InstallRequirement] = None, + ) -> None: + """ + :param comes_from: the InstallRequirement that led to this candidate if it + differs from the base's InstallRequirement. This will often be the + case in the sense that this candidate's requirement has the extras + while the base's does not. Unlike the InstallRequirement backed + candidates, this requirement is used solely for reporting purposes, + it does not do any leg work. + """ + self.base = base + self.extras = frozenset(canonicalize_name(e) for e in extras) + # If any extras are requested in their non-normalized forms, keep track + # of their raw values. This is needed when we look up dependencies + # since PEP 685 has not been implemented for marker-matching, and using + # the non-normalized extra for lookup ensures the user can select a + # non-normalized extra in a package with its non-normalized form. + # TODO: Remove this attribute when packaging is upgraded to support the + # marker comparison logic specified in PEP 685. + self._unnormalized_extras = extras.difference(self.extras) + self._comes_from = comes_from if comes_from is not None else self.base._ireq + + def __str__(self) -> str: + name, rest = str(self.base).split(" ", 1) + return "{}[{}] {}".format(name, ",".join(self.extras), rest) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(base={self.base!r}, extras={self.extras!r})" + + def __hash__(self) -> int: + return hash((self.base, self.extras)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return self.base == other.base and self.extras == other.extras + return False + + @property + def project_name(self) -> NormalizedName: + return self.base.project_name + + @property + def name(self) -> str: + """The normalised name of the project the candidate refers to""" + return format_name(self.base.project_name, self.extras) + + @property + def version(self) -> CandidateVersion: + return self.base.version + + def format_for_error(self) -> str: + return "{} [{}]".format( + self.base.format_for_error(), ", ".join(sorted(self.extras)) + ) + + @property + def is_installed(self) -> bool: + return self.base.is_installed + + @property + def is_editable(self) -> bool: + return self.base.is_editable + + @property + def source_link(self) -> Optional[Link]: + return self.base.source_link + + def _warn_invalid_extras( + self, + requested: FrozenSet[str], + valid: FrozenSet[str], + ) -> None: + """Emit warnings for invalid extras being requested. + + This emits a warning for each requested extra that is not in the + candidate's ``Provides-Extra`` list. + """ + invalid_extras_to_warn = frozenset( + extra + for extra in requested + if extra not in valid + # If an extra is requested in an unnormalized form, skip warning + # about the normalized form being missing. + and extra in self.extras + ) + if not invalid_extras_to_warn: + return + for extra in sorted(invalid_extras_to_warn): + logger.warning( + "%s %s does not provide the extra '%s'", + self.base.name, + self.version, + extra, + ) + + def _calculate_valid_requested_extras(self) -> FrozenSet[str]: + """Get a list of valid extras requested by this candidate. + + The user (or upstream dependant) may have specified extras that the + candidate doesn't support. Any unsupported extras are dropped, and each + cause a warning to be logged here. + """ + requested_extras = self.extras.union(self._unnormalized_extras) + valid_extras = frozenset( + extra + for extra in requested_extras + if self.base.dist.is_extra_provided(extra) + ) + self._warn_invalid_extras(requested_extras, valid_extras) + return valid_extras + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + factory = self.base._factory + + # Add a dependency on the exact base + # (See note 2b in the class docstring) + yield factory.make_requirement_from_candidate(self.base) + if not with_requires: + return + + valid_extras = self._calculate_valid_requested_extras() + for r in self.base.dist.iter_dependencies(valid_extras): + yield from factory.make_requirements_from_spec( + str(r), + self._comes_from, + valid_extras, + ) + + def get_install_requirement(self) -> Optional[InstallRequirement]: + # We don't return anything here, because we always + # depend on the base candidate, and we'll get the + # install requirement from that. + return None + + +class RequiresPythonCandidate(Candidate): + is_installed = False + source_link = None + + def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None: + if py_version_info is not None: + version_info = normalize_version_info(py_version_info) + else: + version_info = sys.version_info[:3] + self._version = Version(".".join(str(c) for c in version_info)) + + # We don't need to implement __eq__() and __ne__() since there is always + # only one RequiresPythonCandidate in a resolution, i.e. the host Python. + # The built-in object.__eq__() and object.__ne__() do exactly what we want. + + def __str__(self) -> str: + return f"Python {self._version}" + + @property + def project_name(self) -> NormalizedName: + return REQUIRES_PYTHON_IDENTIFIER + + @property + def name(self) -> str: + return REQUIRES_PYTHON_IDENTIFIER + + @property + def version(self) -> CandidateVersion: + return self._version + + def format_for_error(self) -> str: + return f"Python {self.version}" + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + return () + + def get_install_requirement(self) -> Optional[InstallRequirement]: + return None diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/factory.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/factory.py new file mode 100644 index 0000000..4adeb43 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/factory.py @@ -0,0 +1,812 @@ +import contextlib +import functools +import logging +from typing import ( + TYPE_CHECKING, + Dict, + FrozenSet, + Iterable, + Iterator, + List, + Mapping, + NamedTuple, + Optional, + Sequence, + Set, + Tuple, + TypeVar, + cast, +) + +from pip._vendor.packaging.requirements import InvalidRequirement +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.resolvelib import ResolutionImpossible + +from pip._internal.cache import CacheEntry, WheelCache +from pip._internal.exceptions import ( + DistributionNotFound, + InstallationError, + MetadataInconsistent, + UnsupportedPythonVersion, + UnsupportedWheel, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution, get_default_environment +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import ( + install_req_drop_extras, + install_req_from_link_and_ireq, +) +from pip._internal.req.req_install import ( + InstallRequirement, + check_invalid_constraint_type, +) +from pip._internal.resolution.base import InstallRequirementProvider +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.packaging import get_requirement +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import Candidate, CandidateVersion, Constraint, Requirement +from .candidates import ( + AlreadyInstalledCandidate, + BaseCandidate, + EditableCandidate, + ExtrasCandidate, + LinkCandidate, + RequiresPythonCandidate, + as_base_candidate, +) +from .found_candidates import FoundCandidates, IndexCandidateInfo +from .requirements import ( + ExplicitRequirement, + RequiresPythonRequirement, + SpecifierRequirement, + SpecifierWithoutExtrasRequirement, + UnsatisfiableRequirement, +) + +if TYPE_CHECKING: + from typing import Protocol + + class ConflictCause(Protocol): + requirement: RequiresPythonRequirement + parent: Candidate + + +logger = logging.getLogger(__name__) + +C = TypeVar("C") +Cache = Dict[Link, C] + + +class CollectedRootRequirements(NamedTuple): + requirements: List[Requirement] + constraints: Dict[str, Constraint] + user_requested: Dict[str, int] + + +class Factory: + def __init__( + self, + finder: PackageFinder, + preparer: RequirementPreparer, + make_install_req: InstallRequirementProvider, + wheel_cache: Optional[WheelCache], + use_user_site: bool, + force_reinstall: bool, + ignore_installed: bool, + ignore_requires_python: bool, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> None: + self._finder = finder + self.preparer = preparer + self._wheel_cache = wheel_cache + self._python_candidate = RequiresPythonCandidate(py_version_info) + self._make_install_req_from_spec = make_install_req + self._use_user_site = use_user_site + self._force_reinstall = force_reinstall + self._ignore_requires_python = ignore_requires_python + + self._build_failures: Cache[InstallationError] = {} + self._link_candidate_cache: Cache[LinkCandidate] = {} + self._editable_candidate_cache: Cache[EditableCandidate] = {} + self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {} + self._extras_candidate_cache: Dict[ + Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate + ] = {} + + if not ignore_installed: + env = get_default_environment() + self._installed_dists = { + dist.canonical_name: dist + for dist in env.iter_installed_distributions(local_only=False) + } + else: + self._installed_dists = {} + + @property + def force_reinstall(self) -> bool: + return self._force_reinstall + + def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: + if not link.is_wheel: + return + wheel = Wheel(link.filename) + if wheel.supported(self._finder.target_python.get_unsorted_tags()): + return + msg = f"{link.filename} is not a supported wheel on this platform." + raise UnsupportedWheel(msg) + + def _make_extras_candidate( + self, + base: BaseCandidate, + extras: FrozenSet[str], + *, + comes_from: Optional[InstallRequirement] = None, + ) -> ExtrasCandidate: + cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras)) + try: + candidate = self._extras_candidate_cache[cache_key] + except KeyError: + candidate = ExtrasCandidate(base, extras, comes_from=comes_from) + self._extras_candidate_cache[cache_key] = candidate + return candidate + + def _make_candidate_from_dist( + self, + dist: BaseDistribution, + extras: FrozenSet[str], + template: InstallRequirement, + ) -> Candidate: + try: + base = self._installed_candidate_cache[dist.canonical_name] + except KeyError: + base = AlreadyInstalledCandidate(dist, template, factory=self) + self._installed_candidate_cache[dist.canonical_name] = base + if not extras: + return base + return self._make_extras_candidate(base, extras, comes_from=template) + + def _make_candidate_from_link( + self, + link: Link, + extras: FrozenSet[str], + template: InstallRequirement, + name: Optional[NormalizedName], + version: Optional[CandidateVersion], + ) -> Optional[Candidate]: + base: Optional[BaseCandidate] = self._make_base_candidate_from_link( + link, template, name, version + ) + if not extras or base is None: + return base + return self._make_extras_candidate(base, extras, comes_from=template) + + def _make_base_candidate_from_link( + self, + link: Link, + template: InstallRequirement, + name: Optional[NormalizedName], + version: Optional[CandidateVersion], + ) -> Optional[BaseCandidate]: + # TODO: Check already installed candidate, and use it if the link and + # editable flag match. + + if link in self._build_failures: + # We already tried this candidate before, and it does not build. + # Don't bother trying again. + return None + + if template.editable: + if link not in self._editable_candidate_cache: + try: + self._editable_candidate_cache[link] = EditableCandidate( + link, + template, + factory=self, + name=name, + version=version, + ) + except MetadataInconsistent as e: + logger.info( + "Discarding [blue underline]%s[/]: [yellow]%s[reset]", + link, + e, + extra={"markup": True}, + ) + self._build_failures[link] = e + return None + + return self._editable_candidate_cache[link] + else: + if link not in self._link_candidate_cache: + try: + self._link_candidate_cache[link] = LinkCandidate( + link, + template, + factory=self, + name=name, + version=version, + ) + except MetadataInconsistent as e: + logger.info( + "Discarding [blue underline]%s[/]: [yellow]%s[reset]", + link, + e, + extra={"markup": True}, + ) + self._build_failures[link] = e + return None + return self._link_candidate_cache[link] + + def _iter_found_candidates( + self, + ireqs: Sequence[InstallRequirement], + specifier: SpecifierSet, + hashes: Hashes, + prefers_installed: bool, + incompatible_ids: Set[int], + ) -> Iterable[Candidate]: + if not ireqs: + return () + + # The InstallRequirement implementation requires us to give it a + # "template". Here we just choose the first requirement to represent + # all of them. + # Hopefully the Project model can correct this mismatch in the future. + template = ireqs[0] + assert template.req, "Candidates found on index must be PEP 508" + name = canonicalize_name(template.req.name) + + extras: FrozenSet[str] = frozenset() + for ireq in ireqs: + assert ireq.req, "Candidates found on index must be PEP 508" + specifier &= ireq.req.specifier + hashes &= ireq.hashes(trust_internet=False) + extras |= frozenset(ireq.extras) + + def _get_installed_candidate() -> Optional[Candidate]: + """Get the candidate for the currently-installed version.""" + # If --force-reinstall is set, we want the version from the index + # instead, so we "pretend" there is nothing installed. + if self._force_reinstall: + return None + try: + installed_dist = self._installed_dists[name] + except KeyError: + return None + # Don't use the installed distribution if its version does not fit + # the current dependency graph. + if not specifier.contains(installed_dist.version, prereleases=True): + return None + candidate = self._make_candidate_from_dist( + dist=installed_dist, + extras=extras, + template=template, + ) + # The candidate is a known incompatibility. Don't use it. + if id(candidate) in incompatible_ids: + return None + return candidate + + def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]: + result = self._finder.find_best_candidate( + project_name=name, + specifier=specifier, + hashes=hashes, + ) + icans = list(result.iter_applicable()) + + # PEP 592: Yanked releases are ignored unless the specifier + # explicitly pins a version (via '==' or '===') that can be + # solely satisfied by a yanked release. + all_yanked = all(ican.link.is_yanked for ican in icans) + + def is_pinned(specifier: SpecifierSet) -> bool: + for sp in specifier: + if sp.operator == "===": + return True + if sp.operator != "==": + continue + if sp.version.endswith(".*"): + continue + return True + return False + + pinned = is_pinned(specifier) + + # PackageFinder returns earlier versions first, so we reverse. + for ican in reversed(icans): + if not (all_yanked and pinned) and ican.link.is_yanked: + continue + func = functools.partial( + self._make_candidate_from_link, + link=ican.link, + extras=extras, + template=template, + name=name, + version=ican.version, + ) + yield ican.version, func + + return FoundCandidates( + iter_index_candidate_infos, + _get_installed_candidate(), + prefers_installed, + incompatible_ids, + ) + + def _iter_explicit_candidates_from_base( + self, + base_requirements: Iterable[Requirement], + extras: FrozenSet[str], + ) -> Iterator[Candidate]: + """Produce explicit candidates from the base given an extra-ed package. + + :param base_requirements: Requirements known to the resolver. The + requirements are guaranteed to not have extras. + :param extras: The extras to inject into the explicit requirements' + candidates. + """ + for req in base_requirements: + lookup_cand, _ = req.get_candidate_lookup() + if lookup_cand is None: # Not explicit. + continue + # We've stripped extras from the identifier, and should always + # get a BaseCandidate here, unless there's a bug elsewhere. + base_cand = as_base_candidate(lookup_cand) + assert base_cand is not None, "no extras here" + yield self._make_extras_candidate(base_cand, extras) + + def _iter_candidates_from_constraints( + self, + identifier: str, + constraint: Constraint, + template: InstallRequirement, + ) -> Iterator[Candidate]: + """Produce explicit candidates from constraints. + + This creates "fake" InstallRequirement objects that are basically clones + of what "should" be the template, but with original_link set to link. + """ + for link in constraint.links: + self._fail_if_link_is_unsupported_wheel(link) + candidate = self._make_base_candidate_from_link( + link, + template=install_req_from_link_and_ireq(link, template), + name=canonicalize_name(identifier), + version=None, + ) + if candidate: + yield candidate + + def find_candidates( + self, + identifier: str, + requirements: Mapping[str, Iterable[Requirement]], + incompatibilities: Mapping[str, Iterator[Candidate]], + constraint: Constraint, + prefers_installed: bool, + ) -> Iterable[Candidate]: + # Collect basic lookup information from the requirements. + explicit_candidates: Set[Candidate] = set() + ireqs: List[InstallRequirement] = [] + for req in requirements[identifier]: + cand, ireq = req.get_candidate_lookup() + if cand is not None: + explicit_candidates.add(cand) + if ireq is not None: + ireqs.append(ireq) + + # If the current identifier contains extras, add requires and explicit + # candidates from entries from extra-less identifier. + with contextlib.suppress(InvalidRequirement): + parsed_requirement = get_requirement(identifier) + if parsed_requirement.name != identifier: + explicit_candidates.update( + self._iter_explicit_candidates_from_base( + requirements.get(parsed_requirement.name, ()), + frozenset(parsed_requirement.extras), + ), + ) + for req in requirements.get(parsed_requirement.name, []): + _, ireq = req.get_candidate_lookup() + if ireq is not None: + ireqs.append(ireq) + + # Add explicit candidates from constraints. We only do this if there are + # known ireqs, which represent requirements not already explicit. If + # there are no ireqs, we're constraining already-explicit requirements, + # which is handled later when we return the explicit candidates. + if ireqs: + try: + explicit_candidates.update( + self._iter_candidates_from_constraints( + identifier, + constraint, + template=ireqs[0], + ), + ) + except UnsupportedWheel: + # If we're constrained to install a wheel incompatible with the + # target architecture, no candidates will ever be valid. + return () + + # Since we cache all the candidates, incompatibility identification + # can be made quicker by comparing only the id() values. + incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())} + + # If none of the requirements want an explicit candidate, we can ask + # the finder for candidates. + if not explicit_candidates: + return self._iter_found_candidates( + ireqs, + constraint.specifier, + constraint.hashes, + prefers_installed, + incompat_ids, + ) + + return ( + c + for c in explicit_candidates + if id(c) not in incompat_ids + and constraint.is_satisfied_by(c) + and all(req.is_satisfied_by(c) for req in requirements[identifier]) + ) + + def _make_requirements_from_install_req( + self, ireq: InstallRequirement, requested_extras: Iterable[str] + ) -> Iterator[Requirement]: + """ + Returns requirement objects associated with the given InstallRequirement. In + most cases this will be a single object but the following special cases exist: + - the InstallRequirement has markers that do not apply -> result is empty + - the InstallRequirement has both a constraint (or link) and extras + -> result is split in two requirement objects: one with the constraint + (or link) and one with the extra. This allows centralized constraint + handling for the base, resulting in fewer candidate rejections. + """ + if not ireq.match_markers(requested_extras): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + ireq.name, + ireq.markers, + ) + elif not ireq.link: + if ireq.extras and ireq.req is not None and ireq.req.specifier: + yield SpecifierWithoutExtrasRequirement(ireq) + yield SpecifierRequirement(ireq) + else: + self._fail_if_link_is_unsupported_wheel(ireq.link) + # Always make the link candidate for the base requirement to make it + # available to `find_candidates` for explicit candidate lookup for any + # set of extras. + # The extras are required separately via a second requirement. + cand = self._make_base_candidate_from_link( + ireq.link, + template=install_req_drop_extras(ireq) if ireq.extras else ireq, + name=canonicalize_name(ireq.name) if ireq.name else None, + version=None, + ) + if cand is None: + # There's no way we can satisfy a URL requirement if the underlying + # candidate fails to build. An unnamed URL must be user-supplied, so + # we fail eagerly. If the URL is named, an unsatisfiable requirement + # can make the resolver do the right thing, either backtrack (and + # maybe find some other requirement that's buildable) or raise a + # ResolutionImpossible eventually. + if not ireq.name: + raise self._build_failures[ireq.link] + yield UnsatisfiableRequirement(canonicalize_name(ireq.name)) + else: + # require the base from the link + yield self.make_requirement_from_candidate(cand) + if ireq.extras: + # require the extras on top of the base candidate + yield self.make_requirement_from_candidate( + self._make_extras_candidate(cand, frozenset(ireq.extras)) + ) + + def collect_root_requirements( + self, root_ireqs: List[InstallRequirement] + ) -> CollectedRootRequirements: + collected = CollectedRootRequirements([], {}, {}) + for i, ireq in enumerate(root_ireqs): + if ireq.constraint: + # Ensure we only accept valid constraints + problem = check_invalid_constraint_type(ireq) + if problem: + raise InstallationError(problem) + if not ireq.match_markers(): + continue + assert ireq.name, "Constraint must be named" + name = canonicalize_name(ireq.name) + if name in collected.constraints: + collected.constraints[name] &= ireq + else: + collected.constraints[name] = Constraint.from_ireq(ireq) + else: + reqs = list( + self._make_requirements_from_install_req( + ireq, + requested_extras=(), + ) + ) + if not reqs: + continue + template = reqs[0] + if ireq.user_supplied and template.name not in collected.user_requested: + collected.user_requested[template.name] = i + collected.requirements.extend(reqs) + # Put requirements with extras at the end of the root requires. This does not + # affect resolvelib's picking preference but it does affect its initial criteria + # population: by putting extras at the end we enable the candidate finder to + # present resolvelib with a smaller set of candidates to resolvelib, already + # taking into account any non-transient constraints on the associated base. This + # means resolvelib will have fewer candidates to visit and reject. + # Python's list sort is stable, meaning relative order is kept for objects with + # the same key. + collected.requirements.sort(key=lambda r: r.name != r.project_name) + return collected + + def make_requirement_from_candidate( + self, candidate: Candidate + ) -> ExplicitRequirement: + return ExplicitRequirement(candidate) + + def make_requirements_from_spec( + self, + specifier: str, + comes_from: Optional[InstallRequirement], + requested_extras: Iterable[str] = (), + ) -> Iterator[Requirement]: + """ + Returns requirement objects associated with the given specifier. In most cases + this will be a single object but the following special cases exist: + - the specifier has markers that do not apply -> result is empty + - the specifier has both a constraint and extras -> result is split + in two requirement objects: one with the constraint and one with the + extra. This allows centralized constraint handling for the base, + resulting in fewer candidate rejections. + """ + ireq = self._make_install_req_from_spec(specifier, comes_from) + return self._make_requirements_from_install_req(ireq, requested_extras) + + def make_requires_python_requirement( + self, + specifier: SpecifierSet, + ) -> Optional[Requirement]: + if self._ignore_requires_python: + return None + # Don't bother creating a dependency for an empty Requires-Python. + if not str(specifier): + return None + return RequiresPythonRequirement(specifier, self._python_candidate) + + def get_wheel_cache_entry( + self, link: Link, name: Optional[str] + ) -> Optional[CacheEntry]: + """Look up the link in the wheel cache. + + If ``preparer.require_hashes`` is True, don't use the wheel cache, + because cached wheels, always built locally, have different hashes + than the files downloaded from the index server and thus throw false + hash mismatches. Furthermore, cached wheels at present have + nondeterministic contents due to file modification times. + """ + if self._wheel_cache is None: + return None + return self._wheel_cache.get_cache_entry( + link=link, + package_name=name, + supported_tags=get_supported(), + ) + + def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]: + # TODO: Are there more cases this needs to return True? Editable? + dist = self._installed_dists.get(candidate.project_name) + if dist is None: # Not installed, no uninstallation required. + return None + + # We're installing into global site. The current installation must + # be uninstalled, no matter it's in global or user site, because the + # user site installation has precedence over global. + if not self._use_user_site: + return dist + + # We're installing into user site. Remove the user site installation. + if dist.in_usersite: + return dist + + # We're installing into user site, but the installed incompatible + # package is in global site. We can't uninstall that, and would let + # the new user installation to "shadow" it. But shadowing won't work + # in virtual environments, so we error out. + if running_under_virtualenv() and dist.in_site_packages: + message = ( + f"Will not install to the user site because it will lack " + f"sys.path precedence to {dist.raw_name} in {dist.location}" + ) + raise InstallationError(message) + return None + + def _report_requires_python_error( + self, causes: Sequence["ConflictCause"] + ) -> UnsupportedPythonVersion: + assert causes, "Requires-Python error reported with no cause" + + version = self._python_candidate.version + + if len(causes) == 1: + specifier = str(causes[0].requirement.specifier) + message = ( + f"Package {causes[0].parent.name!r} requires a different " + f"Python: {version} not in {specifier!r}" + ) + return UnsupportedPythonVersion(message) + + message = f"Packages require a different Python. {version} not in:" + for cause in causes: + package = cause.parent.format_for_error() + specifier = str(cause.requirement.specifier) + message += f"\n{specifier!r} (required by {package})" + return UnsupportedPythonVersion(message) + + def _report_single_requirement_conflict( + self, req: Requirement, parent: Optional[Candidate] + ) -> DistributionNotFound: + if parent is None: + req_disp = str(req) + else: + req_disp = f"{req} (from {parent.name})" + + cands = self._finder.find_all_candidates(req.project_name) + skipped_by_requires_python = self._finder.requires_python_skipped_reasons() + + versions_set: Set[CandidateVersion] = set() + yanked_versions_set: Set[CandidateVersion] = set() + for c in cands: + is_yanked = c.link.is_yanked if c.link else False + if is_yanked: + yanked_versions_set.add(c.version) + else: + versions_set.add(c.version) + + versions = [str(v) for v in sorted(versions_set)] + yanked_versions = [str(v) for v in sorted(yanked_versions_set)] + + if yanked_versions: + # Saying "version X is yanked" isn't entirely accurate. + # https://github.com/pypa/pip/issues/11745#issuecomment-1402805842 + logger.critical( + "Ignored the following yanked versions: %s", + ", ".join(yanked_versions) or "none", + ) + if skipped_by_requires_python: + logger.critical( + "Ignored the following versions that require a different python " + "version: %s", + "; ".join(skipped_by_requires_python) or "none", + ) + logger.critical( + "Could not find a version that satisfies the requirement %s " + "(from versions: %s)", + req_disp, + ", ".join(versions) or "none", + ) + if str(req) == "requirements.txt": + logger.info( + "HINT: You are attempting to install a package literally " + 'named "requirements.txt" (which cannot exist). Consider ' + "using the '-r' flag to install the packages listed in " + "requirements.txt" + ) + + return DistributionNotFound(f"No matching distribution found for {req}") + + def get_installation_error( + self, + e: "ResolutionImpossible[Requirement, Candidate]", + constraints: Dict[str, Constraint], + ) -> InstallationError: + assert e.causes, "Installation error reported with no cause" + + # If one of the things we can't solve is "we need Python X.Y", + # that is what we report. + requires_python_causes = [ + cause + for cause in e.causes + if isinstance(cause.requirement, RequiresPythonRequirement) + and not cause.requirement.is_satisfied_by(self._python_candidate) + ] + if requires_python_causes: + # The comprehension above makes sure all Requirement instances are + # RequiresPythonRequirement, so let's cast for convenience. + return self._report_requires_python_error( + cast("Sequence[ConflictCause]", requires_python_causes), + ) + + # Otherwise, we have a set of causes which can't all be satisfied + # at once. + + # The simplest case is when we have *one* cause that can't be + # satisfied. We just report that case. + if len(e.causes) == 1: + req, parent = e.causes[0] + if req.name not in constraints: + return self._report_single_requirement_conflict(req, parent) + + # OK, we now have a list of requirements that can't all be + # satisfied at once. + + # A couple of formatting helpers + def text_join(parts: List[str]) -> str: + if len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def describe_trigger(parent: Candidate) -> str: + ireq = parent.get_install_requirement() + if not ireq or not ireq.comes_from: + return f"{parent.name}=={parent.version}" + if isinstance(ireq.comes_from, InstallRequirement): + return str(ireq.comes_from.name) + return str(ireq.comes_from) + + triggers = set() + for req, parent in e.causes: + if parent is None: + # This is a root requirement, so we can report it directly + trigger = req.format_for_error() + else: + trigger = describe_trigger(parent) + triggers.add(trigger) + + if triggers: + info = text_join(sorted(triggers)) + else: + info = "the requested packages" + + msg = ( + f"Cannot install {info} because these package versions " + "have conflicting dependencies." + ) + logger.critical(msg) + msg = "\nThe conflict is caused by:" + + relevant_constraints = set() + for req, parent in e.causes: + if req.name in constraints: + relevant_constraints.add(req.name) + msg = msg + "\n " + if parent: + msg = msg + f"{parent.name} {parent.version} depends on " + else: + msg = msg + "The user requested " + msg = msg + req.format_for_error() + for key in relevant_constraints: + spec = constraints[key].specifier + msg += f"\n The user requested (constraint) {key}{spec}" + + msg = ( + msg + + "\n\n" + + "To fix this you could try to:\n" + + "1. loosen the range of package versions you've specified\n" + + "2. remove package versions to allow pip attempt to solve " + + "the dependency conflict\n" + ) + + logger.info(msg) + + return DistributionNotFound( + "ResolutionImpossible: for help visit " + "https://pip.pypa.io/en/latest/topics/dependency-resolution/" + "#dealing-with-dependency-conflicts" + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py new file mode 100644 index 0000000..8663097 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py @@ -0,0 +1,155 @@ +"""Utilities to lazily create and visit candidates found. + +Creating and visiting a candidate is a *very* costly operation. It involves +fetching, extracting, potentially building modules from source, and verifying +distribution metadata. It is therefore crucial for performance to keep +everything here lazy all the way down, so we only touch candidates that we +absolutely need, and not "download the world" when we only need one version of +something. +""" + +import functools +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple + +from pip._vendor.packaging.version import _BaseVersion + +from .base import Candidate + +IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]] + +if TYPE_CHECKING: + SequenceCandidate = Sequence[Candidate] +else: + # For compatibility: Python before 3.9 does not support using [] on the + # Sequence class. + # + # >>> from collections.abc import Sequence + # >>> Sequence[str] + # Traceback (most recent call last): + # File "", line 1, in + # TypeError: 'ABCMeta' object is not subscriptable + # + # TODO: Remove this block after dropping Python 3.8 support. + SequenceCandidate = Sequence + + +def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the package is not already installed. Candidates + from index come later in their normal ordering. + """ + versions_found: Set[_BaseVersion] = set() + for version, func in infos: + if version in versions_found: + continue + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + +def _iter_built_with_prepended( + installed: Candidate, infos: Iterator[IndexCandidateInfo] +) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the resolver prefers the already-installed + candidate and NOT to upgrade. The installed candidate is therefore + always yielded first, and candidates from index come later in their + normal ordering, except skipped when the version is already installed. + """ + yield installed + versions_found: Set[_BaseVersion] = {installed.version} + for version, func in infos: + if version in versions_found: + continue + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + +def _iter_built_with_inserted( + installed: Candidate, infos: Iterator[IndexCandidateInfo] +) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the resolver prefers to upgrade an + already-installed package. Candidates from index are returned in their + normal ordering, except replaced when the version is already installed. + + The implementation iterates through and yields other candidates, inserting + the installed candidate exactly once before we start yielding older or + equivalent candidates, or after all other candidates if they are all newer. + """ + versions_found: Set[_BaseVersion] = set() + for version, func in infos: + if version in versions_found: + continue + # If the installed candidate is better, yield it first. + if installed.version >= version: + yield installed + versions_found.add(installed.version) + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + # If the installed candidate is older than all other candidates. + if installed.version not in versions_found: + yield installed + + +class FoundCandidates(SequenceCandidate): + """A lazy sequence to provide candidates to the resolver. + + The intended usage is to return this from `find_matches()` so the resolver + can iterate through the sequence multiple times, but only access the index + page when remote packages are actually needed. This improve performances + when suitable candidates are already installed on disk. + """ + + def __init__( + self, + get_infos: Callable[[], Iterator[IndexCandidateInfo]], + installed: Optional[Candidate], + prefers_installed: bool, + incompatible_ids: Set[int], + ): + self._get_infos = get_infos + self._installed = installed + self._prefers_installed = prefers_installed + self._incompatible_ids = incompatible_ids + + def __getitem__(self, index: Any) -> Any: + # Implemented to satisfy the ABC check. This is not needed by the + # resolver, and should not be used by the provider either (for + # performance reasons). + raise NotImplementedError("don't do this") + + def __iter__(self) -> Iterator[Candidate]: + infos = self._get_infos() + if not self._installed: + iterator = _iter_built(infos) + elif self._prefers_installed: + iterator = _iter_built_with_prepended(self._installed, infos) + else: + iterator = _iter_built_with_inserted(self._installed, infos) + return (c for c in iterator if id(c) not in self._incompatible_ids) + + def __len__(self) -> int: + # Implemented to satisfy the ABC check. This is not needed by the + # resolver, and should not be used by the provider either (for + # performance reasons). + raise NotImplementedError("don't do this") + + @functools.lru_cache(maxsize=1) + def __bool__(self) -> bool: + if self._prefers_installed and self._installed: + return True + return any(self) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/provider.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/provider.py new file mode 100644 index 0000000..315fb9c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/provider.py @@ -0,0 +1,255 @@ +import collections +import math +from typing import ( + TYPE_CHECKING, + Dict, + Iterable, + Iterator, + Mapping, + Sequence, + TypeVar, + Union, +) + +from pip._vendor.resolvelib.providers import AbstractProvider + +from .base import Candidate, Constraint, Requirement +from .candidates import REQUIRES_PYTHON_IDENTIFIER +from .factory import Factory + +if TYPE_CHECKING: + from pip._vendor.resolvelib.providers import Preference + from pip._vendor.resolvelib.resolvers import RequirementInformation + + PreferenceInformation = RequirementInformation[Requirement, Candidate] + + _ProviderBase = AbstractProvider[Requirement, Candidate, str] +else: + _ProviderBase = AbstractProvider + +# Notes on the relationship between the provider, the factory, and the +# candidate and requirement classes. +# +# The provider is a direct implementation of the resolvelib class. Its role +# is to deliver the API that resolvelib expects. +# +# Rather than work with completely abstract "requirement" and "candidate" +# concepts as resolvelib does, pip has concrete classes implementing these two +# ideas. The API of Requirement and Candidate objects are defined in the base +# classes, but essentially map fairly directly to the equivalent provider +# methods. In particular, `find_matches` and `is_satisfied_by` are +# requirement methods, and `get_dependencies` is a candidate method. +# +# The factory is the interface to pip's internal mechanisms. It is stateless, +# and is created by the resolver and held as a property of the provider. It is +# responsible for creating Requirement and Candidate objects, and provides +# services to those objects (access to pip's finder and preparer). + + +D = TypeVar("D") +V = TypeVar("V") + + +def _get_with_identifier( + mapping: Mapping[str, V], + identifier: str, + default: D, +) -> Union[D, V]: + """Get item from a package name lookup mapping with a resolver identifier. + + This extra logic is needed when the target mapping is keyed by package + name, which cannot be directly looked up with an identifier (which may + contain requested extras). Additional logic is added to also look up a value + by "cleaning up" the extras from the identifier. + """ + if identifier in mapping: + return mapping[identifier] + # HACK: Theoretically we should check whether this identifier is a valid + # "NAME[EXTRAS]" format, and parse out the name part with packaging or + # some regular expression. But since pip's resolver only spits out three + # kinds of identifiers: normalized PEP 503 names, normalized names plus + # extras, and Requires-Python, we can cheat a bit here. + name, open_bracket, _ = identifier.partition("[") + if open_bracket and name in mapping: + return mapping[name] + return default + + +class PipProvider(_ProviderBase): + """Pip's provider implementation for resolvelib. + + :params constraints: A mapping of constraints specified by the user. Keys + are canonicalized project names. + :params ignore_dependencies: Whether the user specified ``--no-deps``. + :params upgrade_strategy: The user-specified upgrade strategy. + :params user_requested: A set of canonicalized package names that the user + supplied for pip to install/upgrade. + """ + + def __init__( + self, + factory: Factory, + constraints: Dict[str, Constraint], + ignore_dependencies: bool, + upgrade_strategy: str, + user_requested: Dict[str, int], + ) -> None: + self._factory = factory + self._constraints = constraints + self._ignore_dependencies = ignore_dependencies + self._upgrade_strategy = upgrade_strategy + self._user_requested = user_requested + self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf) + + def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str: + return requirement_or_candidate.name + + def get_preference( + self, + identifier: str, + resolutions: Mapping[str, Candidate], + candidates: Mapping[str, Iterator[Candidate]], + information: Mapping[str, Iterable["PreferenceInformation"]], + backtrack_causes: Sequence["PreferenceInformation"], + ) -> "Preference": + """Produce a sort key for given requirement based on preference. + + The lower the return value is, the more preferred this group of + arguments is. + + Currently pip considers the following in order: + + * Prefer if any of the known requirements is "direct", e.g. points to an + explicit URL. + * If equal, prefer if any requirement is "pinned", i.e. contains + operator ``===`` or ``==``. + * If equal, calculate an approximate "depth" and resolve requirements + closer to the user-specified requirements first. If the depth cannot + by determined (eg: due to no matching parents), it is considered + infinite. + * Order user-specified requirements by the order they are specified. + * If equal, prefers "non-free" requirements, i.e. contains at least one + operator, such as ``>=`` or ``<``. + * If equal, order alphabetically for consistency (helps debuggability). + """ + try: + next(iter(information[identifier])) + except StopIteration: + # There is no information for this identifier, so there's no known + # candidates. + has_information = False + else: + has_information = True + + if has_information: + lookups = (r.get_candidate_lookup() for r, _ in information[identifier]) + candidate, ireqs = zip(*lookups) + else: + candidate, ireqs = None, () + + operators = [ + specifier.operator + for specifier_set in (ireq.specifier for ireq in ireqs if ireq) + for specifier in specifier_set + ] + + direct = candidate is not None + pinned = any(op[:2] == "==" for op in operators) + unfree = bool(operators) + + try: + requested_order: Union[int, float] = self._user_requested[identifier] + except KeyError: + requested_order = math.inf + if has_information: + parent_depths = ( + self._known_depths[parent.name] if parent is not None else 0.0 + for _, parent in information[identifier] + ) + inferred_depth = min(d for d in parent_depths) + 1.0 + else: + inferred_depth = math.inf + else: + inferred_depth = 1.0 + self._known_depths[identifier] = inferred_depth + + requested_order = self._user_requested.get(identifier, math.inf) + + # Requires-Python has only one candidate and the check is basically + # free, so we always do it first to avoid needless work if it fails. + requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER + + # Prefer the causes of backtracking on the assumption that the problem + # resolving the dependency tree is related to the failures that caused + # the backtracking + backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes) + + return ( + not requires_python, + not direct, + not pinned, + not backtrack_cause, + inferred_depth, + requested_order, + not unfree, + identifier, + ) + + def find_matches( + self, + identifier: str, + requirements: Mapping[str, Iterator[Requirement]], + incompatibilities: Mapping[str, Iterator[Candidate]], + ) -> Iterable[Candidate]: + def _eligible_for_upgrade(identifier: str) -> bool: + """Are upgrades allowed for this project? + + This checks the upgrade strategy, and whether the project was one + that the user specified in the command line, in order to decide + whether we should upgrade if there's a newer version available. + + (Note that we don't need access to the `--upgrade` flag, because + an upgrade strategy of "to-satisfy-only" means that `--upgrade` + was not specified). + """ + if self._upgrade_strategy == "eager": + return True + elif self._upgrade_strategy == "only-if-needed": + user_order = _get_with_identifier( + self._user_requested, + identifier, + default=None, + ) + return user_order is not None + return False + + constraint = _get_with_identifier( + self._constraints, + identifier, + default=Constraint.empty(), + ) + return self._factory.find_candidates( + identifier=identifier, + requirements=requirements, + constraint=constraint, + prefers_installed=(not _eligible_for_upgrade(identifier)), + incompatibilities=incompatibilities, + ) + + def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool: + return requirement.is_satisfied_by(candidate) + + def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]: + with_requires = not self._ignore_dependencies + return [r for r in candidate.iter_dependencies(with_requires) if r is not None] + + @staticmethod + def is_backtrack_cause( + identifier: str, backtrack_causes: Sequence["PreferenceInformation"] + ) -> bool: + for backtrack_cause in backtrack_causes: + if identifier == backtrack_cause.requirement.name: + return True + if backtrack_cause.parent and identifier == backtrack_cause.parent.name: + return True + return False diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/reporter.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/reporter.py new file mode 100644 index 0000000..12adeff --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/reporter.py @@ -0,0 +1,80 @@ +from collections import defaultdict +from logging import getLogger +from typing import Any, DefaultDict + +from pip._vendor.resolvelib.reporters import BaseReporter + +from .base import Candidate, Requirement + +logger = getLogger(__name__) + + +class PipReporter(BaseReporter): + def __init__(self) -> None: + self.reject_count_by_package: DefaultDict[str, int] = defaultdict(int) + + self._messages_at_reject_count = { + 1: ( + "pip is looking at multiple versions of {package_name} to " + "determine which version is compatible with other " + "requirements. This could take a while." + ), + 8: ( + "pip is still looking at multiple versions of {package_name} to " + "determine which version is compatible with other " + "requirements. This could take a while." + ), + 13: ( + "This is taking longer than usual. You might need to provide " + "the dependency resolver with stricter constraints to reduce " + "runtime. See https://pip.pypa.io/warnings/backtracking for " + "guidance. If you want to abort this run, press Ctrl + C." + ), + } + + def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None: + self.reject_count_by_package[candidate.name] += 1 + + count = self.reject_count_by_package[candidate.name] + if count not in self._messages_at_reject_count: + return + + message = self._messages_at_reject_count[count] + logger.info("INFO: %s", message.format(package_name=candidate.name)) + + msg = "Will try a different candidate, due to conflict:" + for req_info in criterion.information: + req, parent = req_info.requirement, req_info.parent + # Inspired by Factory.get_installation_error + msg += "\n " + if parent: + msg += f"{parent.name} {parent.version} depends on " + else: + msg += "The user requested " + msg += req.format_for_error() + logger.debug(msg) + + +class PipDebuggingReporter(BaseReporter): + """A reporter that does an info log for every event it sees.""" + + def starting(self) -> None: + logger.info("Reporter.starting()") + + def starting_round(self, index: int) -> None: + logger.info("Reporter.starting_round(%r)", index) + + def ending_round(self, index: int, state: Any) -> None: + logger.info("Reporter.ending_round(%r, state)", index) + + def ending(self, state: Any) -> None: + logger.info("Reporter.ending(%r)", state) + + def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None: + logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent) + + def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None: + logger.info("Reporter.rejecting_candidate(%r, %r)", criterion, candidate) + + def pinning(self, candidate: Candidate) -> None: + logger.info("Reporter.pinning(%r)", candidate) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/requirements.py new file mode 100644 index 0000000..4af4a9f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/requirements.py @@ -0,0 +1,166 @@ +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name + +from pip._internal.req.constructors import install_req_drop_extras +from pip._internal.req.req_install import InstallRequirement + +from .base import Candidate, CandidateLookup, Requirement, format_name + + +class ExplicitRequirement(Requirement): + def __init__(self, candidate: Candidate) -> None: + self.candidate = candidate + + def __str__(self) -> str: + return str(self.candidate) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.candidate!r})" + + @property + def project_name(self) -> NormalizedName: + # No need to canonicalize - the candidate did this + return self.candidate.project_name + + @property + def name(self) -> str: + # No need to canonicalize - the candidate did this + return self.candidate.name + + def format_for_error(self) -> str: + return self.candidate.format_for_error() + + def get_candidate_lookup(self) -> CandidateLookup: + return self.candidate, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + return candidate == self.candidate + + +class SpecifierRequirement(Requirement): + def __init__(self, ireq: InstallRequirement) -> None: + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = ireq + self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras) + + def __str__(self) -> str: + return str(self._ireq.req) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({str(self._ireq.req)!r})" + + @property + def project_name(self) -> NormalizedName: + assert self._ireq.req, "Specifier-backed ireq is always PEP 508" + return canonicalize_name(self._ireq.req.name) + + @property + def name(self) -> str: + return format_name(self.project_name, self._extras) + + def format_for_error(self) -> str: + # Convert comma-separated specifiers into "A, B, ..., F and G" + # This makes the specifier a bit more "human readable", without + # risking a change in meaning. (Hopefully! Not all edge cases have + # been checked) + parts = [s.strip() for s in str(self).split(",")] + if len(parts) == 0: + return "" + elif len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def get_candidate_lookup(self) -> CandidateLookup: + return None, self._ireq + + def is_satisfied_by(self, candidate: Candidate) -> bool: + assert candidate.name == self.name, ( + f"Internal issue: Candidate is not for this requirement " + f"{candidate.name} vs {self.name}" + ) + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + assert self._ireq.req, "Specifier-backed ireq is always PEP 508" + spec = self._ireq.req.specifier + return spec.contains(candidate.version, prereleases=True) + + +class SpecifierWithoutExtrasRequirement(SpecifierRequirement): + """ + Requirement backed by an install requirement on a base package. + Trims extras from its install requirement if there are any. + """ + + def __init__(self, ireq: InstallRequirement) -> None: + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = install_req_drop_extras(ireq) + self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras) + + +class RequiresPythonRequirement(Requirement): + """A requirement representing Requires-Python metadata.""" + + def __init__(self, specifier: SpecifierSet, match: Candidate) -> None: + self.specifier = specifier + self._candidate = match + + def __str__(self) -> str: + return f"Python {self.specifier}" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({str(self.specifier)!r})" + + @property + def project_name(self) -> NormalizedName: + return self._candidate.project_name + + @property + def name(self) -> str: + return self._candidate.name + + def format_for_error(self) -> str: + return str(self) + + def get_candidate_lookup(self) -> CandidateLookup: + if self.specifier.contains(self._candidate.version, prereleases=True): + return self._candidate, None + return None, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + assert candidate.name == self._candidate.name, "Not Python candidate" + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) + + +class UnsatisfiableRequirement(Requirement): + """A requirement that cannot be satisfied.""" + + def __init__(self, name: NormalizedName) -> None: + self._name = name + + def __str__(self) -> str: + return f"{self._name} (unavailable)" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({str(self._name)!r})" + + @property + def project_name(self) -> NormalizedName: + return self._name + + @property + def name(self) -> str: + return self._name + + def format_for_error(self) -> str: + return str(self) + + def get_candidate_lookup(self) -> CandidateLookup: + return None, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + return False diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/resolver.py new file mode 100644 index 0000000..c12beef --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/resolver.py @@ -0,0 +1,317 @@ +import contextlib +import functools +import logging +import os +from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible +from pip._vendor.resolvelib import Resolver as RLResolver +from pip._vendor.resolvelib.structs import DirectedGraph + +from pip._internal.cache import WheelCache +from pip._internal.index.package_finder import PackageFinder +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import install_req_extend_extras +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider +from pip._internal.resolution.resolvelib.provider import PipProvider +from pip._internal.resolution.resolvelib.reporter import ( + PipDebuggingReporter, + PipReporter, +) +from pip._internal.utils.packaging import get_requirement + +from .base import Candidate, Requirement +from .factory import Factory + +if TYPE_CHECKING: + from pip._vendor.resolvelib.resolvers import Result as RLResult + + Result = RLResult[Requirement, Candidate, str] + + +logger = logging.getLogger(__name__) + + +class Resolver(BaseResolver): + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer: RequirementPreparer, + finder: PackageFinder, + wheel_cache: Optional[WheelCache], + make_install_req: InstallRequirementProvider, + use_user_site: bool, + ignore_dependencies: bool, + ignore_installed: bool, + ignore_requires_python: bool, + force_reinstall: bool, + upgrade_strategy: str, + py_version_info: Optional[Tuple[int, ...]] = None, + ): + super().__init__() + assert upgrade_strategy in self._allowed_strategies + + self.factory = Factory( + finder=finder, + preparer=preparer, + make_install_req=make_install_req, + wheel_cache=wheel_cache, + use_user_site=use_user_site, + force_reinstall=force_reinstall, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + py_version_info=py_version_info, + ) + self.ignore_dependencies = ignore_dependencies + self.upgrade_strategy = upgrade_strategy + self._result: Optional[Result] = None + + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + collected = self.factory.collect_root_requirements(root_reqs) + provider = PipProvider( + factory=self.factory, + constraints=collected.constraints, + ignore_dependencies=self.ignore_dependencies, + upgrade_strategy=self.upgrade_strategy, + user_requested=collected.user_requested, + ) + if "PIP_RESOLVER_DEBUG" in os.environ: + reporter: BaseReporter = PipDebuggingReporter() + else: + reporter = PipReporter() + resolver: RLResolver[Requirement, Candidate, str] = RLResolver( + provider, + reporter, + ) + + try: + limit_how_complex_resolution_can_be = 200000 + result = self._result = resolver.resolve( + collected.requirements, max_rounds=limit_how_complex_resolution_can_be + ) + + except ResolutionImpossible as e: + error = self.factory.get_installation_error( + cast("ResolutionImpossible[Requirement, Candidate]", e), + collected.constraints, + ) + raise error from e + + req_set = RequirementSet(check_supported_wheels=check_supported_wheels) + # process candidates with extras last to ensure their base equivalent is + # already in the req_set if appropriate. + # Python's sort is stable so using a binary key function keeps relative order + # within both subsets. + for candidate in sorted( + result.mapping.values(), key=lambda c: c.name != c.project_name + ): + ireq = candidate.get_install_requirement() + if ireq is None: + if candidate.name != candidate.project_name: + # extend existing req's extras + with contextlib.suppress(KeyError): + req = req_set.get_requirement(candidate.project_name) + req_set.add_named_requirement( + install_req_extend_extras( + req, get_requirement(candidate.name).extras + ) + ) + continue + + # Check if there is already an installation under the same name, + # and set a flag for later stages to uninstall it, if needed. + installed_dist = self.factory.get_dist_to_uninstall(candidate) + if installed_dist is None: + # There is no existing installation -- nothing to uninstall. + ireq.should_reinstall = False + elif self.factory.force_reinstall: + # The --force-reinstall flag is set -- reinstall. + ireq.should_reinstall = True + elif installed_dist.version != candidate.version: + # The installation is different in version -- reinstall. + ireq.should_reinstall = True + elif candidate.is_editable or installed_dist.editable: + # The incoming distribution is editable, or different in + # editable-ness to installation -- reinstall. + ireq.should_reinstall = True + elif candidate.source_link and candidate.source_link.is_file: + # The incoming distribution is under file:// + if candidate.source_link.is_wheel: + # is a local wheel -- do nothing. + logger.info( + "%s is already installed with the same version as the " + "provided wheel. Use --force-reinstall to force an " + "installation of the wheel.", + ireq.name, + ) + continue + + # is a local sdist or path -- reinstall + ireq.should_reinstall = True + else: + continue + + link = candidate.source_link + if link and link.is_yanked: + # The reason can contain non-ASCII characters, Unicode + # is required for Python 2. + msg = ( + "The candidate selected for download or install is a " + "yanked version: {name!r} candidate (version {version} " + "at {link})\nReason for being yanked: {reason}" + ).format( + name=candidate.name, + version=candidate.version, + link=link, + reason=link.yanked_reason or "", + ) + logger.warning(msg) + + req_set.add_named_requirement(ireq) + + reqs = req_set.all_requirements + self.factory.preparer.prepare_linked_requirements_more(reqs) + for req in reqs: + req.prepared = True + req.needs_more_preparation = False + return req_set + + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: + """Get order for installation of requirements in RequirementSet. + + The returned list contains a requirement before another that depends on + it. This helps ensure that the environment is kept consistent as they + get installed one-by-one. + + The current implementation creates a topological ordering of the + dependency graph, giving more weight to packages with less + or no dependencies, while breaking any cycles in the graph at + arbitrary points. We make no guarantees about where the cycle + would be broken, other than it *would* be broken. + """ + assert self._result is not None, "must call resolve() first" + + if not req_set.requirements: + # Nothing is left to install, so we do not need an order. + return [] + + graph = self._result.graph + weights = get_topological_weights(graph, set(req_set.requirements.keys())) + + sorted_items = sorted( + req_set.requirements.items(), + key=functools.partial(_req_set_item_sorter, weights=weights), + reverse=True, + ) + return [ireq for _, ireq in sorted_items] + + +def get_topological_weights( + graph: "DirectedGraph[Optional[str]]", requirement_keys: Set[str] +) -> Dict[Optional[str], int]: + """Assign weights to each node based on how "deep" they are. + + This implementation may change at any point in the future without prior + notice. + + We first simplify the dependency graph by pruning any leaves and giving them + the highest weight: a package without any dependencies should be installed + first. This is done again and again in the same way, giving ever less weight + to the newly found leaves. The loop stops when no leaves are left: all + remaining packages have at least one dependency left in the graph. + + Then we continue with the remaining graph, by taking the length for the + longest path to any node from root, ignoring any paths that contain a single + node twice (i.e. cycles). This is done through a depth-first search through + the graph, while keeping track of the path to the node. + + Cycles in the graph result would result in node being revisited while also + being on its own path. In this case, take no action. This helps ensure we + don't get stuck in a cycle. + + When assigning weight, the longer path (i.e. larger length) is preferred. + + We are only interested in the weights of packages that are in the + requirement_keys. + """ + path: Set[Optional[str]] = set() + weights: Dict[Optional[str], int] = {} + + def visit(node: Optional[str]) -> None: + if node in path: + # We hit a cycle, so we'll break it here. + return + + # Time to visit the children! + path.add(node) + for child in graph.iter_children(node): + visit(child) + path.remove(node) + + if node not in requirement_keys: + return + + last_known_parent_count = weights.get(node, 0) + weights[node] = max(last_known_parent_count, len(path)) + + # Simplify the graph, pruning leaves that have no dependencies. + # This is needed for large graphs (say over 200 packages) because the + # `visit` function is exponentially slower then, taking minutes. + # See https://github.com/pypa/pip/issues/10557 + # We will loop until we explicitly break the loop. + while True: + leaves = set() + for key in graph: + if key is None: + continue + for _child in graph.iter_children(key): + # This means we have at least one child + break + else: + # No child. + leaves.add(key) + if not leaves: + # We are done simplifying. + break + # Calculate the weight for the leaves. + weight = len(graph) - 1 + for leaf in leaves: + if leaf not in requirement_keys: + continue + weights[leaf] = weight + # Remove the leaves from the graph, making it simpler. + for leaf in leaves: + graph.remove(leaf) + + # Visit the remaining graph. + # `None` is guaranteed to be the root node by resolvelib. + visit(None) + + # Sanity check: all requirement keys should be in the weights, + # and no other keys should be in the weights. + difference = set(weights.keys()).difference(requirement_keys) + assert not difference, difference + + return weights + + +def _req_set_item_sorter( + item: Tuple[str, InstallRequirement], + weights: Dict[Optional[str], int], +) -> Tuple[int, str]: + """Key function used to sort install requirements for installation. + + Based on the "weight" mapping calculated in ``get_installation_order()``. + The canonical package name is returned as the second member as a tie- + breaker to ensure the result is predictable, which is useful in tests. + """ + name = canonicalize_name(item[0]) + return weights[name], name diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/self_outdated_check.py b/myenv/lib/python3.12/site-packages/pip/_internal/self_outdated_check.py new file mode 100644 index 0000000..0f64ae0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/self_outdated_check.py @@ -0,0 +1,248 @@ +import datetime +import functools +import hashlib +import json +import logging +import optparse +import os.path +import sys +from dataclasses import dataclass +from typing import Any, Callable, Dict, Optional + +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.rich.console import Group +from pip._vendor.rich.markup import escape +from pip._vendor.rich.text import Text + +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import get_default_environment +from pip._internal.metadata.base import DistributionVersion +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.session import PipSession +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.entrypoints import ( + get_best_invocation_for_this_pip, + get_best_invocation_for_this_python, +) +from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace +from pip._internal.utils.misc import ensure_dir + +_WEEK = datetime.timedelta(days=7) + +logger = logging.getLogger(__name__) + + +def _get_statefile_name(key: str) -> str: + key_bytes = key.encode() + name = hashlib.sha224(key_bytes).hexdigest() + return name + + +def _convert_date(isodate: str) -> datetime.datetime: + """Convert an ISO format string to a date. + + Handles the format 2020-01-22T14:24:01Z (trailing Z) + which is not supported by older versions of fromisoformat. + """ + return datetime.datetime.fromisoformat(isodate.replace("Z", "+00:00")) + + +class SelfCheckState: + def __init__(self, cache_dir: str) -> None: + self._state: Dict[str, Any] = {} + self._statefile_path = None + + # Try to load the existing state + if cache_dir: + self._statefile_path = os.path.join( + cache_dir, "selfcheck", _get_statefile_name(self.key) + ) + try: + with open(self._statefile_path, encoding="utf-8") as statefile: + self._state = json.load(statefile) + except (OSError, ValueError, KeyError): + # Explicitly suppressing exceptions, since we don't want to + # error out if the cache file is invalid. + pass + + @property + def key(self) -> str: + return sys.prefix + + def get(self, current_time: datetime.datetime) -> Optional[str]: + """Check if we have a not-outdated version loaded already.""" + if not self._state: + return None + + if "last_check" not in self._state: + return None + + if "pypi_version" not in self._state: + return None + + # Determine if we need to refresh the state + last_check = _convert_date(self._state["last_check"]) + time_since_last_check = current_time - last_check + if time_since_last_check > _WEEK: + return None + + return self._state["pypi_version"] + + def set(self, pypi_version: str, current_time: datetime.datetime) -> None: + # If we do not have a path to cache in, don't bother saving. + if not self._statefile_path: + return + + # Check to make sure that we own the directory + if not check_path_owner(os.path.dirname(self._statefile_path)): + return + + # Now that we've ensured the directory is owned by this user, we'll go + # ahead and make sure that all our directories are created. + ensure_dir(os.path.dirname(self._statefile_path)) + + state = { + # Include the key so it's easy to tell which pip wrote the + # file. + "key": self.key, + "last_check": current_time.isoformat(), + "pypi_version": pypi_version, + } + + text = json.dumps(state, sort_keys=True, separators=(",", ":")) + + with adjacent_tmp_file(self._statefile_path) as f: + f.write(text.encode()) + + try: + # Since we have a prefix-specific state file, we can just + # overwrite whatever is there, no need to check. + replace(f.name, self._statefile_path) + except OSError: + # Best effort. + pass + + +@dataclass +class UpgradePrompt: + old: str + new: str + + def __rich__(self) -> Group: + if WINDOWS: + pip_cmd = f"{get_best_invocation_for_this_python()} -m pip" + else: + pip_cmd = get_best_invocation_for_this_pip() + + notice = "[bold][[reset][blue]notice[reset][bold]][reset]" + return Group( + Text(), + Text.from_markup( + f"{notice} A new release of pip is available: " + f"[red]{self.old}[reset] -> [green]{self.new}[reset]" + ), + Text.from_markup( + f"{notice} To update, run: " + f"[green]{escape(pip_cmd)} install --upgrade pip" + ), + ) + + +def was_installed_by_pip(pkg: str) -> bool: + """Checks whether pkg was installed by pip + + This is used not to display the upgrade message when pip is in fact + installed by system package manager, such as dnf on Fedora. + """ + dist = get_default_environment().get_distribution(pkg) + return dist is not None and "pip" == dist.installer + + +def _get_current_remote_pip_version( + session: PipSession, options: optparse.Values +) -> Optional[str]: + # Lets use PackageFinder to see what the latest pip version is + link_collector = LinkCollector.create( + session, + options=options, + suppress_no_index=True, + ) + + # Pass allow_yanked=False so we don't suggest upgrading to a + # yanked version. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=False, # Explicitly set to False + ) + + finder = PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + ) + best_candidate = finder.find_best_candidate("pip").best_candidate + if best_candidate is None: + return None + + return str(best_candidate.version) + + +def _self_version_check_logic( + *, + state: SelfCheckState, + current_time: datetime.datetime, + local_version: DistributionVersion, + get_remote_version: Callable[[], Optional[str]], +) -> Optional[UpgradePrompt]: + remote_version_str = state.get(current_time) + if remote_version_str is None: + remote_version_str = get_remote_version() + if remote_version_str is None: + logger.debug("No remote pip version found") + return None + state.set(remote_version_str, current_time) + + remote_version = parse_version(remote_version_str) + logger.debug("Remote version of pip: %s", remote_version) + logger.debug("Local version of pip: %s", local_version) + + pip_installed_by_pip = was_installed_by_pip("pip") + logger.debug("Was pip installed by pip? %s", pip_installed_by_pip) + if not pip_installed_by_pip: + return None # Only suggest upgrade if pip is installed by pip. + + local_version_is_older = ( + local_version < remote_version + and local_version.base_version != remote_version.base_version + ) + if local_version_is_older: + return UpgradePrompt(old=str(local_version), new=remote_version_str) + + return None + + +def pip_self_version_check(session: PipSession, options: optparse.Values) -> None: + """Check for an update for pip. + + Limit the frequency of checks to once per week. State is stored either in + the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix + of the pip script path. + """ + installed_dist = get_default_environment().get_distribution("pip") + if not installed_dist: + return + + try: + upgrade_prompt = _self_version_check_logic( + state=SelfCheckState(cache_dir=options.cache_dir), + current_time=datetime.datetime.now(datetime.timezone.utc), + local_version=installed_dist.version, + get_remote_version=functools.partial( + _get_current_remote_pip_version, session, options + ), + ) + if upgrade_prompt is not None: + logger.warning("%s", upgrade_prompt, extra={"rich": True}) + except Exception: + logger.warning("There was an error checking the latest version of pip.") + logger.debug("See below for error", exc_info=True) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1b6004e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc new file mode 100644 index 0000000..c083d4e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_log.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_log.cpython-312.pyc new file mode 100644 index 0000000..410d2bb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_log.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc new file mode 100644 index 0000000..bb5fc61 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compat.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..1fefda6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compat.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc new file mode 100644 index 0000000..9ccd87f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-312.pyc new file mode 100644 index 0000000..e75f7dc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc new file mode 100644 index 0000000..6a158f2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc new file mode 100644 index 0000000..84104a5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc new file mode 100644 index 0000000..c3a10b3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-312.pyc new file mode 100644 index 0000000..39a1521 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc new file mode 100644 index 0000000..d3da102 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc new file mode 100644 index 0000000..ecd430d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc new file mode 100644 index 0000000..6970fe1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-312.pyc new file mode 100644 index 0000000..9aa204d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-312.pyc new file mode 100644 index 0000000..6c26dcf Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/logging.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/logging.cpython-312.pyc new file mode 100644 index 0000000..e793af9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/logging.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/misc.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/misc.cpython-312.pyc new file mode 100644 index 0000000..9b4ad1c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/misc.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..aacfd83 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-312.pyc new file mode 100644 index 0000000..84abda6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-312.pyc new file mode 100644 index 0000000..148173e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc new file mode 100644 index 0000000..cbde304 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc new file mode 100644 index 0000000..86a4305 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc new file mode 100644 index 0000000..c7cf082 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/urls.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/urls.cpython-312.pyc new file mode 100644 index 0000000..a32f08b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/urls.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc new file mode 100644 index 0000000..4fcca38 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..b0acb47 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/_jaraco_text.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/_jaraco_text.py new file mode 100644 index 0000000..e06947c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/_jaraco_text.py @@ -0,0 +1,109 @@ +"""Functions brought over from jaraco.text. + +These functions are not supposed to be used within `pip._internal`. These are +helper functions brought over from `jaraco.text` to enable vendoring newer +copies of `pkg_resources` without having to vendor `jaraco.text` and its entire +dependency cone; something that our vendoring setup is not currently capable of +handling. + +License reproduced from original source below: + +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +import functools +import itertools + + +def _nonblank(str): + return str and not str.startswith("#") + + +@functools.singledispatch +def yield_lines(iterable): + r""" + Yield valid lines of a string or iterable. + + >>> list(yield_lines('')) + [] + >>> list(yield_lines(['foo', 'bar'])) + ['foo', 'bar'] + >>> list(yield_lines('foo\nbar')) + ['foo', 'bar'] + >>> list(yield_lines('\nfoo\n#bar\nbaz #comment')) + ['foo', 'baz #comment'] + >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n'])) + ['foo', 'bar', 'baz', 'bing'] + """ + return itertools.chain.from_iterable(map(yield_lines, iterable)) + + +@yield_lines.register(str) +def _(text): + return filter(_nonblank, map(str.strip, text.splitlines())) + + +def drop_comment(line): + """ + Drop comments. + + >>> drop_comment('foo # bar') + 'foo' + + A hash without a space may be in a URL. + + >>> drop_comment('http://example.com/foo#bar') + 'http://example.com/foo#bar' + """ + return line.partition(" #")[0] + + +def join_continuation(lines): + r""" + Join lines continued by a trailing backslash. + + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar \\', 'baz'])) + ['foobarbaz'] + + Not sure why, but... + The character preceeding the backslash is also elided. + + >>> list(join_continuation(['goo\\', 'dly'])) + ['godly'] + + A terrible idea, but... + If no line is available to continue, suppress the lines. + + >>> list(join_continuation(['foo', 'bar\\', 'baz\\'])) + ['foo'] + """ + lines = iter(lines) + for item in lines: + while item.endswith("\\"): + try: + item = item[:-2].strip() + next(lines) + except StopIteration: + return + yield item diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/_log.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/_log.py new file mode 100644 index 0000000..92c4c6a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/_log.py @@ -0,0 +1,38 @@ +"""Customize logging + +Defines custom logger class for the `logger.verbose(...)` method. + +init_logging() must be called before any other modules that call logging.getLogger. +""" + +import logging +from typing import Any, cast + +# custom log level for `--verbose` output +# between DEBUG and INFO +VERBOSE = 15 + + +class VerboseLogger(logging.Logger): + """Custom Logger, defining a verbose log-level + + VERBOSE is between INFO and DEBUG. + """ + + def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None: + return self.log(VERBOSE, msg, *args, **kwargs) + + +def getLogger(name: str) -> VerboseLogger: + """logging.getLogger, but ensures our VerboseLogger class is returned""" + return cast(VerboseLogger, logging.getLogger(name)) + + +def init_logging() -> None: + """Register our VerboseLogger and VERBOSE log level. + + Should be called before any calls to getLogger(), + i.e. in pip._internal.__init__ + """ + logging.setLoggerClass(VerboseLogger) + logging.addLevelName(VERBOSE, "VERBOSE") diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/appdirs.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/appdirs.py new file mode 100644 index 0000000..16933bf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/appdirs.py @@ -0,0 +1,52 @@ +""" +This code wraps the vendored appdirs module to so the return values are +compatible for the current pip code base. + +The intention is to rewrite current usages gradually, keeping the tests pass, +and eventually drop this after all usages are changed. +""" + +import os +import sys +from typing import List + +from pip._vendor import platformdirs as _appdirs + + +def user_cache_dir(appname: str) -> str: + return _appdirs.user_cache_dir(appname, appauthor=False) + + +def _macos_user_config_dir(appname: str, roaming: bool = True) -> str: + # Use ~/Application Support/pip, if the directory exists. + path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming) + if os.path.isdir(path): + return path + + # Use a Linux-like ~/.config/pip, by default. + linux_like_path = "~/.config/" + if appname: + linux_like_path = os.path.join(linux_like_path, appname) + + return os.path.expanduser(linux_like_path) + + +def user_config_dir(appname: str, roaming: bool = True) -> str: + if sys.platform == "darwin": + return _macos_user_config_dir(appname, roaming) + + return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming) + + +# for the discussion regarding site_config_dir locations +# see +def site_config_dirs(appname: str) -> List[str]: + if sys.platform == "darwin": + return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)] + + dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True) + if sys.platform == "win32": + return [dirval] + + # Unix-y system. Look in /etc as well. + return dirval.split(os.pathsep) + ["/etc"] diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/compat.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/compat.py new file mode 100644 index 0000000..3f4d300 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/compat.py @@ -0,0 +1,63 @@ +"""Stuff that differs in different Python versions and platform +distributions.""" + +import logging +import os +import sys + +__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"] + + +logger = logging.getLogger(__name__) + + +def has_tls() -> bool: + try: + import _ssl # noqa: F401 # ignore unused + + return True + except ImportError: + pass + + from pip._vendor.urllib3.util import IS_PYOPENSSL + + return IS_PYOPENSSL + + +def get_path_uid(path: str) -> int: + """ + Return path's uid. + + Does not follow symlinks: + https://github.com/pypa/pip/pull/935#discussion_r5307003 + + Placed this function in compat due to differences on AIX and + Jython, that should eventually go away. + + :raises OSError: When path is a symlink or can't be read. + """ + if hasattr(os, "O_NOFOLLOW"): + fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) + file_uid = os.fstat(fd).st_uid + os.close(fd) + else: # AIX and Jython + # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW + if not os.path.islink(path): + # older versions of Jython don't have `os.fstat` + file_uid = os.stat(path).st_uid + else: + # raise OSError for parity with os.O_NOFOLLOW above + raise OSError(f"{path} is a symlink; Will not return uid for symlinks") + return file_uid + + +# packages in the stdlib that may have installation metadata, but should not be +# considered 'installed'. this theoretically could be determined based on +# dist.location (py27:`sysconfig.get_paths()['stdlib']`, +# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may +# make this ineffective, so hard-coding +stdlib_pkgs = {"python", "wsgiref", "argparse"} + + +# windows detection, covers cpython and ironpython +WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt") diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/compatibility_tags.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/compatibility_tags.py new file mode 100644 index 0000000..b6ed9a7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/compatibility_tags.py @@ -0,0 +1,165 @@ +"""Generate and work with PEP 425 Compatibility Tags. +""" + +import re +from typing import List, Optional, Tuple + +from pip._vendor.packaging.tags import ( + PythonVersion, + Tag, + compatible_tags, + cpython_tags, + generic_tags, + interpreter_name, + interpreter_version, + mac_platforms, +) + +_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") + + +def version_info_to_nodot(version_info: Tuple[int, ...]) -> str: + # Only use up to the first two numbers. + return "".join(map(str, version_info[:2])) + + +def _mac_platforms(arch: str) -> List[str]: + match = _osx_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + mac_version = (int(major), int(minor)) + arches = [ + # Since we have always only checked that the platform starts + # with "macosx", for backwards-compatibility we extract the + # actual prefix provided by the user in case they provided + # something like "macosxcustom_". It may be good to remove + # this as undocumented or deprecate it in the future. + "{}_{}".format(name, arch[len("macosx_") :]) + for arch in mac_platforms(mac_version, actual_arch) + ] + else: + # arch pattern didn't match (?!) + arches = [arch] + return arches + + +def _custom_manylinux_platforms(arch: str) -> List[str]: + arches = [arch] + arch_prefix, arch_sep, arch_suffix = arch.partition("_") + if arch_prefix == "manylinux2014": + # manylinux1/manylinux2010 wheels run on most manylinux2014 systems + # with the exception of wheels depending on ncurses. PEP 599 states + # manylinux1/manylinux2010 wheels should be considered + # manylinux2014 wheels: + # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels + if arch_suffix in {"i686", "x86_64"}: + arches.append("manylinux2010" + arch_sep + arch_suffix) + arches.append("manylinux1" + arch_sep + arch_suffix) + elif arch_prefix == "manylinux2010": + # manylinux1 wheels run on most manylinux2010 systems with the + # exception of wheels depending on ncurses. PEP 571 states + # manylinux1 wheels should be considered manylinux2010 wheels: + # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels + arches.append("manylinux1" + arch_sep + arch_suffix) + return arches + + +def _get_custom_platforms(arch: str) -> List[str]: + arch_prefix, arch_sep, arch_suffix = arch.partition("_") + if arch.startswith("macosx"): + arches = _mac_platforms(arch) + elif arch_prefix in ["manylinux2014", "manylinux2010"]: + arches = _custom_manylinux_platforms(arch) + else: + arches = [arch] + return arches + + +def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]: + if not platforms: + return None + + seen = set() + result = [] + + for p in platforms: + if p in seen: + continue + additions = [c for c in _get_custom_platforms(p) if c not in seen] + seen.update(additions) + result.extend(additions) + + return result + + +def _get_python_version(version: str) -> PythonVersion: + if len(version) > 1: + return int(version[0]), int(version[1:]) + else: + return (int(version[0]),) + + +def _get_custom_interpreter( + implementation: Optional[str] = None, version: Optional[str] = None +) -> str: + if implementation is None: + implementation = interpreter_name() + if version is None: + version = interpreter_version() + return f"{implementation}{version}" + + +def get_supported( + version: Optional[str] = None, + platforms: Optional[List[str]] = None, + impl: Optional[str] = None, + abis: Optional[List[str]] = None, +) -> List[Tag]: + """Return a list of supported tags for each version specified in + `versions`. + + :param version: a string version, of the form "33" or "32", + or None. The version will be assumed to support our ABI. + :param platform: specify a list of platforms you want valid + tags for, or None. If None, use the local system platform. + :param impl: specify the exact implementation you want valid + tags for, or None. If None, use the local interpreter impl. + :param abis: specify a list of abis you want valid + tags for, or None. If None, use the local interpreter abi. + """ + supported: List[Tag] = [] + + python_version: Optional[PythonVersion] = None + if version is not None: + python_version = _get_python_version(version) + + interpreter = _get_custom_interpreter(impl, version) + + platforms = _expand_allowed_platforms(platforms) + + is_cpython = (impl or interpreter_name()) == "cp" + if is_cpython: + supported.extend( + cpython_tags( + python_version=python_version, + abis=abis, + platforms=platforms, + ) + ) + else: + supported.extend( + generic_tags( + interpreter=interpreter, + abis=abis, + platforms=platforms, + ) + ) + supported.extend( + compatible_tags( + python_version=python_version, + interpreter=interpreter, + platforms=platforms, + ) + ) + + return supported diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/datetime.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/datetime.py new file mode 100644 index 0000000..8668b3b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/datetime.py @@ -0,0 +1,11 @@ +"""For when pip wants to check the date or time. +""" + +import datetime + + +def today_is_later_than(year: int, month: int, day: int) -> bool: + today = datetime.date.today() + given = datetime.date(year, month, day) + + return today > given diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/deprecation.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/deprecation.py new file mode 100644 index 0000000..72bd6f2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/deprecation.py @@ -0,0 +1,120 @@ +""" +A module that implements tooling to enable easy warnings about deprecations. +""" + +import logging +import warnings +from typing import Any, Optional, TextIO, Type, Union + +from pip._vendor.packaging.version import parse + +from pip import __version__ as current_version # NOTE: tests patch this name. + +DEPRECATION_MSG_PREFIX = "DEPRECATION: " + + +class PipDeprecationWarning(Warning): + pass + + +_original_showwarning: Any = None + + +# Warnings <-> Logging Integration +def _showwarning( + message: Union[Warning, str], + category: Type[Warning], + filename: str, + lineno: int, + file: Optional[TextIO] = None, + line: Optional[str] = None, +) -> None: + if file is not None: + if _original_showwarning is not None: + _original_showwarning(message, category, filename, lineno, file, line) + elif issubclass(category, PipDeprecationWarning): + # We use a specially named logger which will handle all of the + # deprecation messages for pip. + logger = logging.getLogger("pip._internal.deprecations") + logger.warning(message) + else: + _original_showwarning(message, category, filename, lineno, file, line) + + +def install_warning_logger() -> None: + # Enable our Deprecation Warnings + warnings.simplefilter("default", PipDeprecationWarning, append=True) + + global _original_showwarning + + if _original_showwarning is None: + _original_showwarning = warnings.showwarning + warnings.showwarning = _showwarning + + +def deprecated( + *, + reason: str, + replacement: Optional[str], + gone_in: Optional[str], + feature_flag: Optional[str] = None, + issue: Optional[int] = None, +) -> None: + """Helper to deprecate existing functionality. + + reason: + Textual reason shown to the user about why this functionality has + been deprecated. Should be a complete sentence. + replacement: + Textual suggestion shown to the user about what alternative + functionality they can use. + gone_in: + The version of pip does this functionality should get removed in. + Raises an error if pip's current version is greater than or equal to + this. + feature_flag: + Command-line flag of the form --use-feature={feature_flag} for testing + upcoming functionality. + issue: + Issue number on the tracker that would serve as a useful place for + users to find related discussion and provide feedback. + """ + + # Determine whether or not the feature is already gone in this version. + is_gone = gone_in is not None and parse(current_version) >= parse(gone_in) + + message_parts = [ + (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"), + ( + gone_in, + "pip {} will enforce this behaviour change." + if not is_gone + else "Since pip {}, this is no longer supported.", + ), + ( + replacement, + "A possible replacement is {}.", + ), + ( + feature_flag, + "You can use the flag --use-feature={} to test the upcoming behaviour." + if not is_gone + else None, + ), + ( + issue, + "Discussion can be found at https://github.com/pypa/pip/issues/{}", + ), + ] + + message = " ".join( + format_str.format(value) + for value, format_str in message_parts + if format_str is not None and value is not None + ) + + # Raise as an error if this behaviour is deprecated. + if is_gone: + raise PipDeprecationWarning(message) + + warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/direct_url_helpers.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/direct_url_helpers.py new file mode 100644 index 0000000..0e8e5e1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/direct_url_helpers.py @@ -0,0 +1,87 @@ +from typing import Optional + +from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo +from pip._internal.models.link import Link +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + + +def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str: + """Convert a DirectUrl to a pip requirement string.""" + direct_url.validate() # if invalid, this is a pip bug + requirement = name + " @ " + fragments = [] + if isinstance(direct_url.info, VcsInfo): + requirement += "{}+{}@{}".format( + direct_url.info.vcs, direct_url.url, direct_url.info.commit_id + ) + elif isinstance(direct_url.info, ArchiveInfo): + requirement += direct_url.url + if direct_url.info.hash: + fragments.append(direct_url.info.hash) + else: + assert isinstance(direct_url.info, DirInfo) + requirement += direct_url.url + if direct_url.subdirectory: + fragments.append("subdirectory=" + direct_url.subdirectory) + if fragments: + requirement += "#" + "&".join(fragments) + return requirement + + +def direct_url_for_editable(source_dir: str) -> DirectUrl: + return DirectUrl( + url=path_to_url(source_dir), + info=DirInfo(editable=True), + ) + + +def direct_url_from_link( + link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False +) -> DirectUrl: + if link.is_vcs: + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend + url, requested_revision, _ = vcs_backend.get_url_rev_and_auth( + link.url_without_fragment + ) + # For VCS links, we need to find out and add commit_id. + if link_is_in_wheel_cache: + # If the requested VCS link corresponds to a cached + # wheel, it means the requested revision was an + # immutable commit hash, otherwise it would not have + # been cached. In that case we don't have a source_dir + # with the VCS checkout. + assert requested_revision + commit_id = requested_revision + else: + # If the wheel was not in cache, it means we have + # had to checkout from VCS to build and we have a source_dir + # which we can inspect to find out the commit id. + assert source_dir + commit_id = vcs_backend.get_revision(source_dir) + return DirectUrl( + url=url, + info=VcsInfo( + vcs=vcs_backend.name, + commit_id=commit_id, + requested_revision=requested_revision, + ), + subdirectory=link.subdirectory_fragment, + ) + elif link.is_existing_dir(): + return DirectUrl( + url=link.url_without_fragment, + info=DirInfo(), + subdirectory=link.subdirectory_fragment, + ) + else: + hash = None + hash_name = link.hash_name + if hash_name: + hash = f"{hash_name}={link.hash}" + return DirectUrl( + url=link.url_without_fragment, + info=ArchiveInfo(hash=hash), + subdirectory=link.subdirectory_fragment, + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/egg_link.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/egg_link.py new file mode 100644 index 0000000..4a384a6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/egg_link.py @@ -0,0 +1,80 @@ +import os +import re +import sys +from typing import List, Optional + +from pip._internal.locations import site_packages, user_site +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, +) + +__all__ = [ + "egg_link_path_from_sys_path", + "egg_link_path_from_location", +] + + +def _egg_link_names(raw_name: str) -> List[str]: + """ + Convert a Name metadata value to a .egg-link name, by applying + the same substitution as pkg_resources's safe_name function. + Note: we cannot use canonicalize_name because it has a different logic. + + We also look for the raw name (without normalization) as setuptools 69 changed + the way it names .egg-link files (https://github.com/pypa/setuptools/issues/4167). + """ + return [ + re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link", + f"{raw_name}.egg-link", + ] + + +def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]: + """ + Look for a .egg-link file for project name, by walking sys.path. + """ + egg_link_names = _egg_link_names(raw_name) + for path_item in sys.path: + for egg_link_name in egg_link_names: + egg_link = os.path.join(path_item, egg_link_name) + if os.path.isfile(egg_link): + return egg_link + return None + + +def egg_link_path_from_location(raw_name: str) -> Optional[str]: + """ + Return the path for the .egg-link file if it exists, otherwise, None. + + There's 3 scenarios: + 1) not in a virtualenv + try to find in site.USER_SITE, then site_packages + 2) in a no-global virtualenv + try to find in site_packages + 3) in a yes-global virtualenv + try to find in site_packages, then site.USER_SITE + (don't look in global location) + + For #1 and #3, there could be odd cases, where there's an egg-link in 2 + locations. + + This method will just return the first one found. + """ + sites: List[str] = [] + if running_under_virtualenv(): + sites.append(site_packages) + if not virtualenv_no_global() and user_site: + sites.append(user_site) + else: + if user_site: + sites.append(user_site) + sites.append(site_packages) + + egg_link_names = _egg_link_names(raw_name) + for site in sites: + for egg_link_name in egg_link_names: + egglink = os.path.join(site, egg_link_name) + if os.path.isfile(egglink): + return egglink + return None diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/encoding.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/encoding.py new file mode 100644 index 0000000..008f06a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/encoding.py @@ -0,0 +1,36 @@ +import codecs +import locale +import re +import sys +from typing import List, Tuple + +BOMS: List[Tuple[bytes, str]] = [ + (codecs.BOM_UTF8, "utf-8"), + (codecs.BOM_UTF16, "utf-16"), + (codecs.BOM_UTF16_BE, "utf-16-be"), + (codecs.BOM_UTF16_LE, "utf-16-le"), + (codecs.BOM_UTF32, "utf-32"), + (codecs.BOM_UTF32_BE, "utf-32-be"), + (codecs.BOM_UTF32_LE, "utf-32-le"), +] + +ENCODING_RE = re.compile(rb"coding[:=]\s*([-\w.]+)") + + +def auto_decode(data: bytes) -> str: + """Check a bytes string for a BOM to correctly detect the encoding + + Fallback to locale.getpreferredencoding(False) like open() on Python3""" + for bom, encoding in BOMS: + if data.startswith(bom): + return data[len(bom) :].decode(encoding) + # Lets check the first two lines as in PEP263 + for line in data.split(b"\n")[:2]: + if line[0:1] == b"#" and ENCODING_RE.search(line): + result = ENCODING_RE.search(line) + assert result is not None + encoding = result.groups()[0].decode("ascii") + return data.decode(encoding) + return data.decode( + locale.getpreferredencoding(False) or sys.getdefaultencoding(), + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/entrypoints.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/entrypoints.py new file mode 100644 index 0000000..1501369 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/entrypoints.py @@ -0,0 +1,84 @@ +import itertools +import os +import shutil +import sys +from typing import List, Optional + +from pip._internal.cli.main import main +from pip._internal.utils.compat import WINDOWS + +_EXECUTABLE_NAMES = [ + "pip", + f"pip{sys.version_info.major}", + f"pip{sys.version_info.major}.{sys.version_info.minor}", +] +if WINDOWS: + _allowed_extensions = {"", ".exe"} + _EXECUTABLE_NAMES = [ + "".join(parts) + for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions) + ] + + +def _wrapper(args: Optional[List[str]] = None) -> int: + """Central wrapper for all old entrypoints. + + Historically pip has had several entrypoints defined. Because of issues + arising from PATH, sys.path, multiple Pythons, their interactions, and most + of them having a pip installed, users suffer every time an entrypoint gets + moved. + + To alleviate this pain, and provide a mechanism for warning users and + directing them to an appropriate place for help, we now define all of + our old entrypoints as wrappers for the current one. + """ + sys.stderr.write( + "WARNING: pip is being invoked by an old script wrapper. This will " + "fail in a future version of pip.\n" + "Please see https://github.com/pypa/pip/issues/5599 for advice on " + "fixing the underlying issue.\n" + "To avoid this problem you can invoke Python with '-m pip' instead of " + "running pip directly.\n" + ) + return main(args) + + +def get_best_invocation_for_this_pip() -> str: + """Try to figure out the best way to invoke pip in the current environment.""" + binary_directory = "Scripts" if WINDOWS else "bin" + binary_prefix = os.path.join(sys.prefix, binary_directory) + + # Try to use pip[X[.Y]] names, if those executables for this environment are + # the first on PATH with that name. + path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep) + exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts + if exe_are_in_PATH: + for exe_name in _EXECUTABLE_NAMES: + found_executable = shutil.which(exe_name) + binary_executable = os.path.join(binary_prefix, exe_name) + if ( + found_executable + and os.path.exists(binary_executable) + and os.path.samefile( + found_executable, + binary_executable, + ) + ): + return exe_name + + # Use the `-m` invocation, if there's no "nice" invocation. + return f"{get_best_invocation_for_this_python()} -m pip" + + +def get_best_invocation_for_this_python() -> str: + """Try to figure out the best way to invoke the current Python.""" + exe = sys.executable + exe_name = os.path.basename(exe) + + # Try to use the basename, if it's the first executable. + found_executable = shutil.which(exe_name) + if found_executable and os.path.samefile(found_executable, exe): + return exe_name + + # Use the full executable name, because we couldn't find something simpler. + return exe diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/filesystem.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/filesystem.py new file mode 100644 index 0000000..83c2df7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/filesystem.py @@ -0,0 +1,153 @@ +import fnmatch +import os +import os.path +import random +import sys +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from typing import Any, BinaryIO, Generator, List, Union, cast + +from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed + +from pip._internal.utils.compat import get_path_uid +from pip._internal.utils.misc import format_size + + +def check_path_owner(path: str) -> bool: + # If we don't have a way to check the effective uid of this process, then + # we'll just assume that we own the directory. + if sys.platform == "win32" or not hasattr(os, "geteuid"): + return True + + assert os.path.isabs(path) + + previous = None + while path != previous: + if os.path.lexists(path): + # Check if path is writable by current user. + if os.geteuid() == 0: + # Special handling for root user in order to handle properly + # cases where users use sudo without -H flag. + try: + path_uid = get_path_uid(path) + except OSError: + return False + return path_uid == 0 + else: + return os.access(path, os.W_OK) + else: + previous, path = path, os.path.dirname(path) + return False # assume we don't own the path + + +@contextmanager +def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]: + """Return a file-like object pointing to a tmp file next to path. + + The file is created securely and is ensured to be written to disk + after the context reaches its end. + + kwargs will be passed to tempfile.NamedTemporaryFile to control + the way the temporary file will be opened. + """ + with NamedTemporaryFile( + delete=False, + dir=os.path.dirname(path), + prefix=os.path.basename(path), + suffix=".tmp", + **kwargs, + ) as f: + result = cast(BinaryIO, f) + try: + yield result + finally: + result.flush() + os.fsync(result.fileno()) + + +# Tenacity raises RetryError by default, explicitly raise the original exception +_replace_retry = retry(reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.25)) + +replace = _replace_retry(os.replace) + + +# test_writable_dir and _test_writable_dir_win are copied from Flit, +# with the author's agreement to also place them under pip's license. +def test_writable_dir(path: str) -> bool: + """Check if a directory is writable. + + Uses os.access() on POSIX, tries creating files on Windows. + """ + # If the directory doesn't exist, find the closest parent that does. + while not os.path.isdir(path): + parent = os.path.dirname(path) + if parent == path: + break # Should never get here, but infinite loops are bad + path = parent + + if os.name == "posix": + return os.access(path, os.W_OK) + + return _test_writable_dir_win(path) + + +def _test_writable_dir_win(path: str) -> bool: + # os.access doesn't work on Windows: http://bugs.python.org/issue2528 + # and we can't use tempfile: http://bugs.python.org/issue22107 + basename = "accesstest_deleteme_fishfingers_custard_" + alphabet = "abcdefghijklmnopqrstuvwxyz0123456789" + for _ in range(10): + name = basename + "".join(random.choice(alphabet) for _ in range(6)) + file = os.path.join(path, name) + try: + fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL) + except FileExistsError: + pass + except PermissionError: + # This could be because there's a directory with the same name. + # But it's highly unlikely there's a directory called that, + # so we'll assume it's because the parent dir is not writable. + # This could as well be because the parent dir is not readable, + # due to non-privileged user access. + return False + else: + os.close(fd) + os.unlink(file) + return True + + # This should never be reached + raise OSError("Unexpected condition testing for writable directory") + + +def find_files(path: str, pattern: str) -> List[str]: + """Returns a list of absolute paths of files beneath path, recursively, + with filenames which match the UNIX-style shell glob pattern.""" + result: List[str] = [] + for root, _, files in os.walk(path): + matches = fnmatch.filter(files, pattern) + result.extend(os.path.join(root, f) for f in matches) + return result + + +def file_size(path: str) -> Union[int, float]: + # If it's a symlink, return 0. + if os.path.islink(path): + return 0 + return os.path.getsize(path) + + +def format_file_size(path: str) -> str: + return format_size(file_size(path)) + + +def directory_size(path: str) -> Union[int, float]: + size = 0.0 + for root, _dirs, files in os.walk(path): + for filename in files: + file_path = os.path.join(root, filename) + size += file_size(file_path) + return size + + +def format_directory_size(path: str) -> str: + return format_size(directory_size(path)) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/filetypes.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/filetypes.py new file mode 100644 index 0000000..5948570 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/filetypes.py @@ -0,0 +1,27 @@ +"""Filetype information. +""" + +from typing import Tuple + +from pip._internal.utils.misc import splitext + +WHEEL_EXTENSION = ".whl" +BZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz") +XZ_EXTENSIONS: Tuple[str, ...] = ( + ".tar.xz", + ".txz", + ".tlz", + ".tar.lz", + ".tar.lzma", +) +ZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION) +TAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar") +ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS + + +def is_archive_file(name: str) -> bool: + """Return True if `name` is a considered as an archive file.""" + ext = splitext(name)[1].lower() + if ext in ARCHIVE_EXTENSIONS: + return True + return False diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/glibc.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/glibc.py new file mode 100644 index 0000000..81342af --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/glibc.py @@ -0,0 +1,88 @@ +import os +import sys +from typing import Optional, Tuple + + +def glibc_version_string() -> Optional[str]: + "Returns glibc version string, or None if not using glibc." + return glibc_version_string_confstr() or glibc_version_string_ctypes() + + +def glibc_version_string_confstr() -> Optional[str]: + "Primary implementation of glibc_version_string using os.confstr." + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module: + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + if sys.platform == "win32": + return None + try: + gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION") + if gnu_libc_version is None: + return None + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": + _, version = gnu_libc_version.split() + except (AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def glibc_version_string_ctypes() -> Optional[str]: + "Fallback implementation of glibc_version_string using ctypes." + + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + process_namespace = ctypes.CDLL(None) + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# platform.libc_ver regularly returns completely nonsensical glibc +# versions. E.g. on my computer, platform says: +# +# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.7') +# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.9') +# +# But the truth is: +# +# ~$ ldd --version +# ldd (Debian GLIBC 2.22-11) 2.22 +# +# This is unfortunate, because it means that the linehaul data on libc +# versions that was generated by pip 8.1.2 and earlier is useless and +# misleading. Solution: instead of using platform, use our code that actually +# works. +def libc_ver() -> Tuple[str, str]: + """Try to determine the glibc version + + Returns a tuple of strings (lib, version) which default to empty strings + in case the lookup fails. + """ + glibc_version = glibc_version_string() + if glibc_version is None: + return ("", "") + else: + return ("glibc", glibc_version) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/hashes.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/hashes.py new file mode 100644 index 0000000..843cffc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/hashes.py @@ -0,0 +1,151 @@ +import hashlib +from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional + +from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError +from pip._internal.utils.misc import read_chunks + +if TYPE_CHECKING: + from hashlib import _Hash + + # NoReturn introduced in 3.6.2; imported only for type checking to maintain + # pip compatibility with older patch versions of Python 3.6 + from typing import NoReturn + + +# The recommended hash algo of the moment. Change this whenever the state of +# the art changes; it won't hurt backward compatibility. +FAVORITE_HASH = "sha256" + + +# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` +# Currently, those are the ones at least as collision-resistant as sha256. +STRONG_HASHES = ["sha256", "sha384", "sha512"] + + +class Hashes: + """A wrapper that builds multiple hashes at once and checks them against + known-good values + + """ + + def __init__(self, hashes: Optional[Dict[str, List[str]]] = None) -> None: + """ + :param hashes: A dict of algorithm names pointing to lists of allowed + hex digests + """ + allowed = {} + if hashes is not None: + for alg, keys in hashes.items(): + # Make sure values are always sorted (to ease equality checks) + allowed[alg] = sorted(keys) + self._allowed = allowed + + def __and__(self, other: "Hashes") -> "Hashes": + if not isinstance(other, Hashes): + return NotImplemented + + # If either of the Hashes object is entirely empty (i.e. no hash + # specified at all), all hashes from the other object are allowed. + if not other: + return self + if not self: + return other + + # Otherwise only hashes that present in both objects are allowed. + new = {} + for alg, values in other._allowed.items(): + if alg not in self._allowed: + continue + new[alg] = [v for v in values if v in self._allowed[alg]] + return Hashes(new) + + @property + def digest_count(self) -> int: + return sum(len(digests) for digests in self._allowed.values()) + + def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool: + """Return whether the given hex digest is allowed.""" + return hex_digest in self._allowed.get(hash_name, []) + + def check_against_chunks(self, chunks: Iterable[bytes]) -> None: + """Check good hashes against ones built from iterable of chunks of + data. + + Raise HashMismatch if none match. + + """ + gots = {} + for hash_name in self._allowed.keys(): + try: + gots[hash_name] = hashlib.new(hash_name) + except (ValueError, TypeError): + raise InstallationError(f"Unknown hash name: {hash_name}") + + for chunk in chunks: + for hash in gots.values(): + hash.update(chunk) + + for hash_name, got in gots.items(): + if got.hexdigest() in self._allowed[hash_name]: + return + self._raise(gots) + + def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn": + raise HashMismatch(self._allowed, gots) + + def check_against_file(self, file: BinaryIO) -> None: + """Check good hashes against a file-like object + + Raise HashMismatch if none match. + + """ + return self.check_against_chunks(read_chunks(file)) + + def check_against_path(self, path: str) -> None: + with open(path, "rb") as file: + return self.check_against_file(file) + + def has_one_of(self, hashes: Dict[str, str]) -> bool: + """Return whether any of the given hashes are allowed.""" + for hash_name, hex_digest in hashes.items(): + if self.is_hash_allowed(hash_name, hex_digest): + return True + return False + + def __bool__(self) -> bool: + """Return whether I know any known-good hashes.""" + return bool(self._allowed) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Hashes): + return NotImplemented + return self._allowed == other._allowed + + def __hash__(self) -> int: + return hash( + ",".join( + sorted( + ":".join((alg, digest)) + for alg, digest_list in self._allowed.items() + for digest in digest_list + ) + ) + ) + + +class MissingHashes(Hashes): + """A workalike for Hashes used when we're missing a hash for a requirement + + It computes the actual hash of the requirement and raises a HashMissing + exception showing it to the user. + + """ + + def __init__(self) -> None: + """Don't offer the ``hashes`` kwarg.""" + # Pass our favorite hash in to generate a "gotten hash". With the + # empty list, it will never match, so an error will always raise. + super().__init__(hashes={FAVORITE_HASH: []}) + + def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn": + raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/logging.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/logging.py new file mode 100644 index 0000000..95982df --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/logging.py @@ -0,0 +1,348 @@ +import contextlib +import errno +import logging +import logging.handlers +import os +import sys +import threading +from dataclasses import dataclass +from io import TextIOWrapper +from logging import Filter +from typing import Any, ClassVar, Generator, List, Optional, TextIO, Type + +from pip._vendor.rich.console import ( + Console, + ConsoleOptions, + ConsoleRenderable, + RenderableType, + RenderResult, + RichCast, +) +from pip._vendor.rich.highlighter import NullHighlighter +from pip._vendor.rich.logging import RichHandler +from pip._vendor.rich.segment import Segment +from pip._vendor.rich.style import Style + +from pip._internal.utils._log import VERBOSE, getLogger +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX +from pip._internal.utils.misc import ensure_dir + +_log_state = threading.local() +subprocess_logger = getLogger("pip.subprocessor") + + +class BrokenStdoutLoggingError(Exception): + """ + Raised if BrokenPipeError occurs for the stdout stream while logging. + """ + + +def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool: + if exc_class is BrokenPipeError: + return True + + # On Windows, a broken pipe can show up as EINVAL rather than EPIPE: + # https://bugs.python.org/issue19612 + # https://bugs.python.org/issue30418 + if not WINDOWS: + return False + + return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE) + + +@contextlib.contextmanager +def indent_log(num: int = 2) -> Generator[None, None, None]: + """ + A context manager which will cause the log output to be indented for any + log messages emitted inside it. + """ + # For thread-safety + _log_state.indentation = get_indentation() + _log_state.indentation += num + try: + yield + finally: + _log_state.indentation -= num + + +def get_indentation() -> int: + return getattr(_log_state, "indentation", 0) + + +class IndentingFormatter(logging.Formatter): + default_time_format = "%Y-%m-%dT%H:%M:%S" + + def __init__( + self, + *args: Any, + add_timestamp: bool = False, + **kwargs: Any, + ) -> None: + """ + A logging.Formatter that obeys the indent_log() context manager. + + :param add_timestamp: A bool indicating output lines should be prefixed + with their record's timestamp. + """ + self.add_timestamp = add_timestamp + super().__init__(*args, **kwargs) + + def get_message_start(self, formatted: str, levelno: int) -> str: + """ + Return the start of the formatted log message (not counting the + prefix to add to each line). + """ + if levelno < logging.WARNING: + return "" + if formatted.startswith(DEPRECATION_MSG_PREFIX): + # Then the message already has a prefix. We don't want it to + # look like "WARNING: DEPRECATION: ...." + return "" + if levelno < logging.ERROR: + return "WARNING: " + + return "ERROR: " + + def format(self, record: logging.LogRecord) -> str: + """ + Calls the standard formatter, but will indent all of the log message + lines by our current indentation level. + """ + formatted = super().format(record) + message_start = self.get_message_start(formatted, record.levelno) + formatted = message_start + formatted + + prefix = "" + if self.add_timestamp: + prefix = f"{self.formatTime(record)} " + prefix += " " * get_indentation() + formatted = "".join([prefix + line for line in formatted.splitlines(True)]) + return formatted + + +@dataclass +class IndentedRenderable: + renderable: RenderableType + indent: int + + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + segments = console.render(self.renderable, options) + lines = Segment.split_lines(segments) + for line in lines: + yield Segment(" " * self.indent) + yield from line + yield Segment("\n") + + +class RichPipStreamHandler(RichHandler): + KEYWORDS: ClassVar[Optional[List[str]]] = [] + + def __init__(self, stream: Optional[TextIO], no_color: bool) -> None: + super().__init__( + console=Console(file=stream, no_color=no_color, soft_wrap=True), + show_time=False, + show_level=False, + show_path=False, + highlighter=NullHighlighter(), + ) + + # Our custom override on Rich's logger, to make things work as we need them to. + def emit(self, record: logging.LogRecord) -> None: + style: Optional[Style] = None + + # If we are given a diagnostic error to present, present it with indentation. + assert isinstance(record.args, tuple) + if getattr(record, "rich", False): + (rich_renderable,) = record.args + assert isinstance( + rich_renderable, (ConsoleRenderable, RichCast, str) + ), f"{rich_renderable} is not rich-console-renderable" + + renderable: RenderableType = IndentedRenderable( + rich_renderable, indent=get_indentation() + ) + else: + message = self.format(record) + renderable = self.render_message(record, message) + if record.levelno is not None: + if record.levelno >= logging.ERROR: + style = Style(color="red") + elif record.levelno >= logging.WARNING: + style = Style(color="yellow") + + try: + self.console.print(renderable, overflow="ignore", crop=False, style=style) + except Exception: + self.handleError(record) + + def handleError(self, record: logging.LogRecord) -> None: + """Called when logging is unable to log some output.""" + + exc_class, exc = sys.exc_info()[:2] + # If a broken pipe occurred while calling write() or flush() on the + # stdout stream in logging's Handler.emit(), then raise our special + # exception so we can handle it in main() instead of logging the + # broken pipe error and continuing. + if ( + exc_class + and exc + and self.console.file is sys.stdout + and _is_broken_pipe_error(exc_class, exc) + ): + raise BrokenStdoutLoggingError() + + return super().handleError(record) + + +class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): + def _open(self) -> TextIOWrapper: + ensure_dir(os.path.dirname(self.baseFilename)) + return super()._open() + + +class MaxLevelFilter(Filter): + def __init__(self, level: int) -> None: + self.level = level + + def filter(self, record: logging.LogRecord) -> bool: + return record.levelno < self.level + + +class ExcludeLoggerFilter(Filter): + + """ + A logging Filter that excludes records from a logger (or its children). + """ + + def filter(self, record: logging.LogRecord) -> bool: + # The base Filter class allows only records from a logger (or its + # children). + return not super().filter(record) + + +def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int: + """Configures and sets up all of the logging + + Returns the requested logging level, as its integer value. + """ + + # Determine the level to be logging at. + if verbosity >= 2: + level_number = logging.DEBUG + elif verbosity == 1: + level_number = VERBOSE + elif verbosity == -1: + level_number = logging.WARNING + elif verbosity == -2: + level_number = logging.ERROR + elif verbosity <= -3: + level_number = logging.CRITICAL + else: + level_number = logging.INFO + + level = logging.getLevelName(level_number) + + # The "root" logger should match the "console" level *unless* we also need + # to log to a user log file. + include_user_log = user_log_file is not None + if include_user_log: + additional_log_file = user_log_file + root_level = "DEBUG" + else: + additional_log_file = "/dev/null" + root_level = level + + # Disable any logging besides WARNING unless we have DEBUG level logging + # enabled for vendored libraries. + vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" + + # Shorthands for clarity + log_streams = { + "stdout": "ext://sys.stdout", + "stderr": "ext://sys.stderr", + } + handler_classes = { + "stream": "pip._internal.utils.logging.RichPipStreamHandler", + "file": "pip._internal.utils.logging.BetterRotatingFileHandler", + } + handlers = ["console", "console_errors", "console_subprocess"] + ( + ["user_log"] if include_user_log else [] + ) + + logging.config.dictConfig( + { + "version": 1, + "disable_existing_loggers": False, + "filters": { + "exclude_warnings": { + "()": "pip._internal.utils.logging.MaxLevelFilter", + "level": logging.WARNING, + }, + "restrict_to_subprocess": { + "()": "logging.Filter", + "name": subprocess_logger.name, + }, + "exclude_subprocess": { + "()": "pip._internal.utils.logging.ExcludeLoggerFilter", + "name": subprocess_logger.name, + }, + }, + "formatters": { + "indent": { + "()": IndentingFormatter, + "format": "%(message)s", + }, + "indent_with_timestamp": { + "()": IndentingFormatter, + "format": "%(message)s", + "add_timestamp": True, + }, + }, + "handlers": { + "console": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stdout"], + "filters": ["exclude_subprocess", "exclude_warnings"], + "formatter": "indent", + }, + "console_errors": { + "level": "WARNING", + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "filters": ["exclude_subprocess"], + "formatter": "indent", + }, + # A handler responsible for logging to the console messages + # from the "subprocessor" logger. + "console_subprocess": { + "level": level, + "class": handler_classes["stream"], + "stream": log_streams["stderr"], + "no_color": no_color, + "filters": ["restrict_to_subprocess"], + "formatter": "indent", + }, + "user_log": { + "level": "DEBUG", + "class": handler_classes["file"], + "filename": additional_log_file, + "encoding": "utf-8", + "delay": True, + "formatter": "indent_with_timestamp", + }, + }, + "root": { + "level": root_level, + "handlers": handlers, + }, + "loggers": {"pip._vendor": {"level": vendored_log_level}}, + } + ) + + return level_number diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/misc.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/misc.py new file mode 100644 index 0000000..1ad3f61 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/misc.py @@ -0,0 +1,783 @@ +import contextlib +import errno +import getpass +import hashlib +import io +import logging +import os +import posixpath +import shutil +import stat +import sys +import sysconfig +import urllib.parse +from functools import partial +from io import StringIO +from itertools import filterfalse, tee, zip_longest +from pathlib import Path +from types import FunctionType, TracebackType +from typing import ( + Any, + BinaryIO, + Callable, + ContextManager, + Dict, + Generator, + Iterable, + Iterator, + List, + Optional, + TextIO, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.pyproject_hooks import BuildBackendHookCaller +from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed + +from pip import __version__ +from pip._internal.exceptions import CommandError, ExternallyManagedEnvironment +from pip._internal.locations import get_major_minor_version +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.virtualenv import running_under_virtualenv + +__all__ = [ + "rmtree", + "display_path", + "backup_dir", + "ask", + "splitext", + "format_size", + "is_installable_dir", + "normalize_path", + "renames", + "get_prog", + "captured_stdout", + "ensure_dir", + "remove_auth_from_url", + "check_externally_managed", + "ConfiguredBuildBackendHookCaller", +] + +logger = logging.getLogger(__name__) + +T = TypeVar("T") +ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] +VersionInfo = Tuple[int, int, int] +NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]] +OnExc = Callable[[FunctionType, Path, BaseException], Any] +OnErr = Callable[[FunctionType, Path, ExcInfo], Any] + + +def get_pip_version() -> str: + pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") + pip_pkg_dir = os.path.abspath(pip_pkg_dir) + + return f"pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})" + + +def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]: + """ + Convert a tuple of ints representing a Python version to one of length + three. + + :param py_version_info: a tuple of ints representing a Python version, + or None to specify no version. The tuple can have any length. + + :return: a tuple of length three if `py_version_info` is non-None. + Otherwise, return `py_version_info` unchanged (i.e. None). + """ + if len(py_version_info) < 3: + py_version_info += (3 - len(py_version_info)) * (0,) + elif len(py_version_info) > 3: + py_version_info = py_version_info[:3] + + return cast("VersionInfo", py_version_info) + + +def ensure_dir(path: str) -> None: + """os.path.makedirs without EEXIST.""" + try: + os.makedirs(path) + except OSError as e: + # Windows can raise spurious ENOTEMPTY errors. See #6426. + if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: + raise + + +def get_prog() -> str: + try: + prog = os.path.basename(sys.argv[0]) + if prog in ("__main__.py", "-c"): + return f"{sys.executable} -m pip" + else: + return prog + except (AttributeError, TypeError, IndexError): + pass + return "pip" + + +# Retry every half second for up to 3 seconds +# Tenacity raises RetryError by default, explicitly raise the original exception +@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5)) +def rmtree( + dir: str, + ignore_errors: bool = False, + onexc: Optional[OnExc] = None, +) -> None: + if ignore_errors: + onexc = _onerror_ignore + if onexc is None: + onexc = _onerror_reraise + handler: OnErr = partial( + # `[func, path, Union[ExcInfo, BaseException]] -> Any` is equivalent to + # `Union[([func, path, ExcInfo] -> Any), ([func, path, BaseException] -> Any)]`. + cast(Union[OnExc, OnErr], rmtree_errorhandler), + onexc=onexc, + ) + if sys.version_info >= (3, 12): + # See https://docs.python.org/3.12/whatsnew/3.12.html#shutil. + shutil.rmtree(dir, onexc=handler) # type: ignore + else: + shutil.rmtree(dir, onerror=handler) # type: ignore + + +def _onerror_ignore(*_args: Any) -> None: + pass + + +def _onerror_reraise(*_args: Any) -> None: + raise + + +def rmtree_errorhandler( + func: FunctionType, + path: Path, + exc_info: Union[ExcInfo, BaseException], + *, + onexc: OnExc = _onerror_reraise, +) -> None: + """ + `rmtree` error handler to 'force' a file remove (i.e. like `rm -f`). + + * If a file is readonly then it's write flag is set and operation is + retried. + + * `onerror` is the original callback from `rmtree(... onerror=onerror)` + that is chained at the end if the "rm -f" still fails. + """ + try: + st_mode = os.stat(path).st_mode + except OSError: + # it's equivalent to os.path.exists + return + + if not st_mode & stat.S_IWRITE: + # convert to read/write + try: + os.chmod(path, st_mode | stat.S_IWRITE) + except OSError: + pass + else: + # use the original function to repeat the operation + try: + func(path) + return + except OSError: + pass + + if not isinstance(exc_info, BaseException): + _, exc_info, _ = exc_info + onexc(func, path, exc_info) + + +def display_path(path: str) -> str: + """Gives the display value for a given path, making it relative to cwd + if possible.""" + path = os.path.normcase(os.path.abspath(path)) + if path.startswith(os.getcwd() + os.path.sep): + path = "." + path[len(os.getcwd()) :] + return path + + +def backup_dir(dir: str, ext: str = ".bak") -> str: + """Figure out the name of a directory to back up the given dir to + (adding .bak, .bak2, etc)""" + n = 1 + extension = ext + while os.path.exists(dir + extension): + n += 1 + extension = ext + str(n) + return dir + extension + + +def ask_path_exists(message: str, options: Iterable[str]) -> str: + for action in os.environ.get("PIP_EXISTS_ACTION", "").split(): + if action in options: + return action + return ask(message, options) + + +def _check_no_input(message: str) -> None: + """Raise an error if no input is allowed.""" + if os.environ.get("PIP_NO_INPUT"): + raise Exception( + f"No input was expected ($PIP_NO_INPUT set); question: {message}" + ) + + +def ask(message: str, options: Iterable[str]) -> str: + """Ask the message interactively, with the given possible responses""" + while 1: + _check_no_input(message) + response = input(message) + response = response.strip().lower() + if response not in options: + print( + "Your response ({!r}) was not one of the expected responses: " + "{}".format(response, ", ".join(options)) + ) + else: + return response + + +def ask_input(message: str) -> str: + """Ask for input interactively.""" + _check_no_input(message) + return input(message) + + +def ask_password(message: str) -> str: + """Ask for a password interactively.""" + _check_no_input(message) + return getpass.getpass(message) + + +def strtobool(val: str) -> int: + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return 1 + elif val in ("n", "no", "f", "false", "off", "0"): + return 0 + else: + raise ValueError(f"invalid truth value {val!r}") + + +def format_size(bytes: float) -> str: + if bytes > 1000 * 1000: + return f"{bytes / 1000.0 / 1000:.1f} MB" + elif bytes > 10 * 1000: + return f"{int(bytes / 1000)} kB" + elif bytes > 1000: + return f"{bytes / 1000.0:.1f} kB" + else: + return f"{int(bytes)} bytes" + + +def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]: + """Return a list of formatted rows and a list of column sizes. + + For example:: + + >>> tabulate([['foobar', 2000], [0xdeadbeef]]) + (['foobar 2000', '3735928559'], [10, 4]) + """ + rows = [tuple(map(str, row)) for row in rows] + sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")] + table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows] + return table, sizes + + +def is_installable_dir(path: str) -> bool: + """Is path is a directory containing pyproject.toml or setup.py? + + If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for + a legacy setuptools layout by identifying setup.py. We don't check for the + setup.cfg because using it without setup.py is only available for PEP 517 + projects, which are already covered by the pyproject.toml check. + """ + if not os.path.isdir(path): + return False + if os.path.isfile(os.path.join(path, "pyproject.toml")): + return True + if os.path.isfile(os.path.join(path, "setup.py")): + return True + return False + + +def read_chunks( + file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE +) -> Generator[bytes, None, None]: + """Yield pieces of data from a file-like object until EOF.""" + while True: + chunk = file.read(size) + if not chunk: + break + yield chunk + + +def normalize_path(path: str, resolve_symlinks: bool = True) -> str: + """ + Convert a path to its canonical, case-normalized, absolute version. + + """ + path = os.path.expanduser(path) + if resolve_symlinks: + path = os.path.realpath(path) + else: + path = os.path.abspath(path) + return os.path.normcase(path) + + +def splitext(path: str) -> Tuple[str, str]: + """Like os.path.splitext, but take off .tar too""" + base, ext = posixpath.splitext(path) + if base.lower().endswith(".tar"): + ext = base[-4:] + ext + base = base[:-4] + return base, ext + + +def renames(old: str, new: str) -> None: + """Like os.renames(), but handles renaming across devices.""" + # Implementation borrowed from os.renames(). + head, tail = os.path.split(new) + if head and tail and not os.path.exists(head): + os.makedirs(head) + + shutil.move(old, new) + + head, tail = os.path.split(old) + if head and tail: + try: + os.removedirs(head) + except OSError: + pass + + +def is_local(path: str) -> bool: + """ + Return True if path is within sys.prefix, if we're running in a virtualenv. + + If we're not in a virtualenv, all paths are considered "local." + + Caution: this function assumes the head of path has been normalized + with normalize_path. + """ + if not running_under_virtualenv(): + return True + return path.startswith(normalize_path(sys.prefix)) + + +def write_output(msg: Any, *args: Any) -> None: + logger.info(msg, *args) + + +class StreamWrapper(StringIO): + orig_stream: TextIO + + @classmethod + def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper": + ret = cls() + ret.orig_stream = orig_stream + return ret + + # compileall.compile_dir() needs stdout.encoding to print to stdout + # type ignore is because TextIOBase.encoding is writeable + @property + def encoding(self) -> str: # type: ignore + return self.orig_stream.encoding + + +@contextlib.contextmanager +def captured_output(stream_name: str) -> Generator[StreamWrapper, None, None]: + """Return a context manager used by captured_stdout/stdin/stderr + that temporarily replaces the sys stream *stream_name* with a StringIO. + + Taken from Lib/support/__init__.py in the CPython repo. + """ + orig_stdout = getattr(sys, stream_name) + setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) + try: + yield getattr(sys, stream_name) + finally: + setattr(sys, stream_name, orig_stdout) + + +def captured_stdout() -> ContextManager[StreamWrapper]: + """Capture the output of sys.stdout: + + with captured_stdout() as stdout: + print('hello') + self.assertEqual(stdout.getvalue(), 'hello\n') + + Taken from Lib/support/__init__.py in the CPython repo. + """ + return captured_output("stdout") + + +def captured_stderr() -> ContextManager[StreamWrapper]: + """ + See captured_stdout(). + """ + return captured_output("stderr") + + +# Simulates an enum +def enum(*sequential: Any, **named: Any) -> Type[Any]: + enums = dict(zip(sequential, range(len(sequential))), **named) + reverse = {value: key for key, value in enums.items()} + enums["reverse_mapping"] = reverse + return type("Enum", (), enums) + + +def build_netloc(host: str, port: Optional[int]) -> str: + """ + Build a netloc from a host-port pair + """ + if port is None: + return host + if ":" in host: + # Only wrap host with square brackets when it is IPv6 + host = f"[{host}]" + return f"{host}:{port}" + + +def build_url_from_netloc(netloc: str, scheme: str = "https") -> str: + """ + Build a full URL from a netloc. + """ + if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc: + # It must be a bare IPv6 address, so wrap it with brackets. + netloc = f"[{netloc}]" + return f"{scheme}://{netloc}" + + +def parse_netloc(netloc: str) -> Tuple[Optional[str], Optional[int]]: + """ + Return the host-port pair from a netloc. + """ + url = build_url_from_netloc(netloc) + parsed = urllib.parse.urlparse(url) + return parsed.hostname, parsed.port + + +def split_auth_from_netloc(netloc: str) -> NetlocTuple: + """ + Parse out and remove the auth information from a netloc. + + Returns: (netloc, (username, password)). + """ + if "@" not in netloc: + return netloc, (None, None) + + # Split from the right because that's how urllib.parse.urlsplit() + # behaves if more than one @ is present (which can be checked using + # the password attribute of urlsplit()'s return value). + auth, netloc = netloc.rsplit("@", 1) + pw: Optional[str] = None + if ":" in auth: + # Split from the left because that's how urllib.parse.urlsplit() + # behaves if more than one : is present (which again can be checked + # using the password attribute of the return value) + user, pw = auth.split(":", 1) + else: + user, pw = auth, None + + user = urllib.parse.unquote(user) + if pw is not None: + pw = urllib.parse.unquote(pw) + + return netloc, (user, pw) + + +def redact_netloc(netloc: str) -> str: + """ + Replace the sensitive data in a netloc with "****", if it exists. + + For example: + - "user:pass@example.com" returns "user:****@example.com" + - "accesstoken@example.com" returns "****@example.com" + """ + netloc, (user, password) = split_auth_from_netloc(netloc) + if user is None: + return netloc + if password is None: + user = "****" + password = "" + else: + user = urllib.parse.quote(user) + password = ":****" + return f"{user}{password}@{netloc}" + + +def _transform_url( + url: str, transform_netloc: Callable[[str], Tuple[Any, ...]] +) -> Tuple[str, NetlocTuple]: + """Transform and replace netloc in a url. + + transform_netloc is a function taking the netloc and returning a + tuple. The first element of this tuple is the new netloc. The + entire tuple is returned. + + Returns a tuple containing the transformed url as item 0 and the + original tuple returned by transform_netloc as item 1. + """ + purl = urllib.parse.urlsplit(url) + netloc_tuple = transform_netloc(purl.netloc) + # stripped url + url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment) + surl = urllib.parse.urlunsplit(url_pieces) + return surl, cast("NetlocTuple", netloc_tuple) + + +def _get_netloc(netloc: str) -> NetlocTuple: + return split_auth_from_netloc(netloc) + + +def _redact_netloc(netloc: str) -> Tuple[str]: + return (redact_netloc(netloc),) + + +def split_auth_netloc_from_url( + url: str, +) -> Tuple[str, str, Tuple[Optional[str], Optional[str]]]: + """ + Parse a url into separate netloc, auth, and url with no auth. + + Returns: (url_without_auth, netloc, (username, password)) + """ + url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc) + return url_without_auth, netloc, auth + + +def remove_auth_from_url(url: str) -> str: + """Return a copy of url with 'username:password@' removed.""" + # username/pass params are passed to subversion through flags + # and are not recognized in the url. + return _transform_url(url, _get_netloc)[0] + + +def redact_auth_from_url(url: str) -> str: + """Replace the password in a given url with ****.""" + return _transform_url(url, _redact_netloc)[0] + + +def redact_auth_from_requirement(req: Requirement) -> str: + """Replace the password in a given requirement url with ****.""" + if not req.url: + return str(req) + return str(req).replace(req.url, redact_auth_from_url(req.url)) + + +class HiddenText: + def __init__(self, secret: str, redacted: str) -> None: + self.secret = secret + self.redacted = redacted + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return self.redacted + + # This is useful for testing. + def __eq__(self, other: Any) -> bool: + if type(self) != type(other): + return False + + # The string being used for redaction doesn't also have to match, + # just the raw, original string. + return self.secret == other.secret + + +def hide_value(value: str) -> HiddenText: + return HiddenText(value, redacted="****") + + +def hide_url(url: str) -> HiddenText: + redacted = redact_auth_from_url(url) + return HiddenText(url, redacted=redacted) + + +def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None: + """Protection of pip.exe from modification on Windows + + On Windows, any operation modifying pip should be run as: + python -m pip ... + """ + pip_names = [ + "pip", + f"pip{sys.version_info.major}", + f"pip{sys.version_info.major}.{sys.version_info.minor}", + ] + + # See https://github.com/pypa/pip/issues/1299 for more discussion + should_show_use_python_msg = ( + modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names + ) + + if should_show_use_python_msg: + new_command = [sys.executable, "-m", "pip"] + sys.argv[1:] + raise CommandError( + "To modify pip, please run the following command:\n{}".format( + " ".join(new_command) + ) + ) + + +def check_externally_managed() -> None: + """Check whether the current environment is externally managed. + + If the ``EXTERNALLY-MANAGED`` config file is found, the current environment + is considered externally managed, and an ExternallyManagedEnvironment is + raised. + """ + if running_under_virtualenv(): + return + marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED") + if not os.path.isfile(marker): + return + raise ExternallyManagedEnvironment.from_config(marker) + + +def is_console_interactive() -> bool: + """Is this console interactive?""" + return sys.stdin is not None and sys.stdin.isatty() + + +def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]: + """Return (hash, length) for path using hashlib.sha256()""" + + h = hashlib.sha256() + length = 0 + with open(path, "rb") as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + return h, length + + +def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]: + """ + Return paired elements. + + For example: + s -> (s0, s1), (s2, s3), (s4, s5), ... + """ + iterable = iter(iterable) + return zip_longest(iterable, iterable) + + +def partition( + pred: Callable[[T], bool], + iterable: Iterable[T], +) -> Tuple[Iterable[T], Iterable[T]]: + """ + Use a predicate to partition entries into false entries and true entries, + like + + partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9 + """ + t1, t2 = tee(iterable) + return filterfalse(pred, t1), filter(pred, t2) + + +class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller): + def __init__( + self, + config_holder: Any, + source_dir: str, + build_backend: str, + backend_path: Optional[str] = None, + runner: Optional[Callable[..., None]] = None, + python_executable: Optional[str] = None, + ): + super().__init__( + source_dir, build_backend, backend_path, runner, python_executable + ) + self.config_holder = config_holder + + def build_wheel( + self, + wheel_directory: str, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, + metadata_directory: Optional[str] = None, + ) -> str: + cs = self.config_holder.config_settings + return super().build_wheel( + wheel_directory, config_settings=cs, metadata_directory=metadata_directory + ) + + def build_sdist( + self, + sdist_directory: str, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, + ) -> str: + cs = self.config_holder.config_settings + return super().build_sdist(sdist_directory, config_settings=cs) + + def build_editable( + self, + wheel_directory: str, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, + metadata_directory: Optional[str] = None, + ) -> str: + cs = self.config_holder.config_settings + return super().build_editable( + wheel_directory, config_settings=cs, metadata_directory=metadata_directory + ) + + def get_requires_for_build_wheel( + self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None + ) -> List[str]: + cs = self.config_holder.config_settings + return super().get_requires_for_build_wheel(config_settings=cs) + + def get_requires_for_build_sdist( + self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None + ) -> List[str]: + cs = self.config_holder.config_settings + return super().get_requires_for_build_sdist(config_settings=cs) + + def get_requires_for_build_editable( + self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None + ) -> List[str]: + cs = self.config_holder.config_settings + return super().get_requires_for_build_editable(config_settings=cs) + + def prepare_metadata_for_build_wheel( + self, + metadata_directory: str, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, + _allow_fallback: bool = True, + ) -> str: + cs = self.config_holder.config_settings + return super().prepare_metadata_for_build_wheel( + metadata_directory=metadata_directory, + config_settings=cs, + _allow_fallback=_allow_fallback, + ) + + def prepare_metadata_for_build_editable( + self, + metadata_directory: str, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, + _allow_fallback: bool = True, + ) -> str: + cs = self.config_holder.config_settings + return super().prepare_metadata_for_build_editable( + metadata_directory=metadata_directory, + config_settings=cs, + _allow_fallback=_allow_fallback, + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/models.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/models.py new file mode 100644 index 0000000..b6bb21a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/models.py @@ -0,0 +1,39 @@ +"""Utilities for defining models +""" + +import operator +from typing import Any, Callable, Type + + +class KeyBasedCompareMixin: + """Provides comparison capabilities that is based on a key""" + + __slots__ = ["_compare_key", "_defining_class"] + + def __init__(self, key: Any, defining_class: Type["KeyBasedCompareMixin"]) -> None: + self._compare_key = key + self._defining_class = defining_class + + def __hash__(self) -> int: + return hash(self._compare_key) + + def __lt__(self, other: Any) -> bool: + return self._compare(other, operator.__lt__) + + def __le__(self, other: Any) -> bool: + return self._compare(other, operator.__le__) + + def __gt__(self, other: Any) -> bool: + return self._compare(other, operator.__gt__) + + def __ge__(self, other: Any) -> bool: + return self._compare(other, operator.__ge__) + + def __eq__(self, other: Any) -> bool: + return self._compare(other, operator.__eq__) + + def _compare(self, other: Any, method: Callable[[Any, Any], bool]) -> bool: + if not isinstance(other, self._defining_class): + return NotImplemented + + return method(self._compare_key, other._compare_key) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/packaging.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/packaging.py new file mode 100644 index 0000000..b9f6af4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/packaging.py @@ -0,0 +1,57 @@ +import functools +import logging +import re +from typing import NewType, Optional, Tuple, cast + +from pip._vendor.packaging import specifiers, version +from pip._vendor.packaging.requirements import Requirement + +NormalizedExtra = NewType("NormalizedExtra", str) + +logger = logging.getLogger(__name__) + + +def check_requires_python( + requires_python: Optional[str], version_info: Tuple[int, ...] +) -> bool: + """ + Check if the given Python version matches a "Requires-Python" specifier. + + :param version_info: A 3-tuple of ints representing a Python + major-minor-micro version to check (e.g. `sys.version_info[:3]`). + + :return: `True` if the given Python version satisfies the requirement. + Otherwise, return `False`. + + :raises InvalidSpecifier: If `requires_python` has an invalid format. + """ + if requires_python is None: + # The package provides no information + return True + requires_python_specifier = specifiers.SpecifierSet(requires_python) + + python_version = version.parse(".".join(map(str, version_info))) + return python_version in requires_python_specifier + + +@functools.lru_cache(maxsize=512) +def get_requirement(req_string: str) -> Requirement: + """Construct a packaging.Requirement object with caching""" + # Parsing requirement strings is expensive, and is also expected to happen + # with a low diversity of different arguments (at least relative the number + # constructed). This method adds a cache to requirement object creation to + # minimize repeated parsing of the same string to construct equivalent + # Requirement objects. + return Requirement(req_string) + + +def safe_extra(extra: str) -> NormalizedExtra: + """Convert an arbitrary string to a standard 'extra' name + + Any runs of non-alphanumeric characters are replaced with a single '_', + and the result is always lowercased. + + This function is duplicated from ``pkg_resources``. Note that this is not + the same to either ``canonicalize_name`` or ``_egg_link_name``. + """ + return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower()) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/setuptools_build.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/setuptools_build.py new file mode 100644 index 0000000..96d1b24 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/setuptools_build.py @@ -0,0 +1,146 @@ +import sys +import textwrap +from typing import List, Optional, Sequence + +# Shim to wrap setup.py invocation with setuptools +# Note that __file__ is handled via two {!r} *and* %r, to ensure that paths on +# Windows are correctly handled (it should be "C:\\Users" not "C:\Users"). +_SETUPTOOLS_SHIM = textwrap.dedent( + """ + exec(compile(''' + # This is -- a caller that pip uses to run setup.py + # + # - It imports setuptools before invoking setup.py, to enable projects that directly + # import from `distutils.core` to work with newer packaging standards. + # - It provides a clear error message when setuptools is not installed. + # - It sets `sys.argv[0]` to the underlying `setup.py`, when invoking `setup.py` so + # setuptools doesn't think the script is `-c`. This avoids the following warning: + # manifest_maker: standard file '-c' not found". + # - It generates a shim setup.py, for handling setup.cfg-only projects. + import os, sys, tokenize + + try: + import setuptools + except ImportError as error: + print( + "ERROR: Can not execute `setup.py` since setuptools is not available in " + "the build environment.", + file=sys.stderr, + ) + sys.exit(1) + + __file__ = %r + sys.argv[0] = __file__ + + if os.path.exists(__file__): + filename = __file__ + with tokenize.open(__file__) as f: + setup_py_code = f.read() + else: + filename = "" + setup_py_code = "from setuptools import setup; setup()" + + exec(compile(setup_py_code, filename, "exec")) + ''' % ({!r},), "", "exec")) + """ +).rstrip() + + +def make_setuptools_shim_args( + setup_py_path: str, + global_options: Optional[Sequence[str]] = None, + no_user_config: bool = False, + unbuffered_output: bool = False, +) -> List[str]: + """ + Get setuptools command arguments with shim wrapped setup file invocation. + + :param setup_py_path: The path to setup.py to be wrapped. + :param global_options: Additional global options. + :param no_user_config: If True, disables personal user configuration. + :param unbuffered_output: If True, adds the unbuffered switch to the + argument list. + """ + args = [sys.executable] + if unbuffered_output: + args += ["-u"] + args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)] + if global_options: + args += global_options + if no_user_config: + args += ["--no-user-cfg"] + return args + + +def make_setuptools_bdist_wheel_args( + setup_py_path: str, + global_options: Sequence[str], + build_options: Sequence[str], + destination_dir: str, +) -> List[str]: + # NOTE: Eventually, we'd want to also -S to the flags here, when we're + # isolating. Currently, it breaks Python in virtualenvs, because it + # relies on site.py to find parts of the standard library outside the + # virtualenv. + args = make_setuptools_shim_args( + setup_py_path, global_options=global_options, unbuffered_output=True + ) + args += ["bdist_wheel", "-d", destination_dir] + args += build_options + return args + + +def make_setuptools_clean_args( + setup_py_path: str, + global_options: Sequence[str], +) -> List[str]: + args = make_setuptools_shim_args( + setup_py_path, global_options=global_options, unbuffered_output=True + ) + args += ["clean", "--all"] + return args + + +def make_setuptools_develop_args( + setup_py_path: str, + *, + global_options: Sequence[str], + no_user_config: bool, + prefix: Optional[str], + home: Optional[str], + use_user_site: bool, +) -> List[str]: + assert not (use_user_site and prefix) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + ) + + args += ["develop", "--no-deps"] + + if prefix: + args += ["--prefix", prefix] + if home is not None: + args += ["--install-dir", home] + + if use_user_site: + args += ["--user", "--prefix="] + + return args + + +def make_setuptools_egg_info_args( + setup_py_path: str, + egg_info_dir: Optional[str], + no_user_config: bool, +) -> List[str]: + args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config) + + args += ["egg_info"] + + if egg_info_dir: + args += ["--egg-base", egg_info_dir] + + return args diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/subprocess.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/subprocess.py new file mode 100644 index 0000000..79580b0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/subprocess.py @@ -0,0 +1,260 @@ +import logging +import os +import shlex +import subprocess +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterable, + List, + Mapping, + Optional, + Union, +) + +from pip._vendor.rich.markup import escape + +from pip._internal.cli.spinners import SpinnerInterface, open_spinner +from pip._internal.exceptions import InstallationSubprocessError +from pip._internal.utils.logging import VERBOSE, subprocess_logger +from pip._internal.utils.misc import HiddenText + +if TYPE_CHECKING: + # Literal was introduced in Python 3.8. + # + # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7. + from typing import Literal + +CommandArgs = List[Union[str, HiddenText]] + + +def make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs: + """ + Create a CommandArgs object. + """ + command_args: CommandArgs = [] + for arg in args: + # Check for list instead of CommandArgs since CommandArgs is + # only known during type-checking. + if isinstance(arg, list): + command_args.extend(arg) + else: + # Otherwise, arg is str or HiddenText. + command_args.append(arg) + + return command_args + + +def format_command_args(args: Union[List[str], CommandArgs]) -> str: + """ + Format command arguments for display. + """ + # For HiddenText arguments, display the redacted form by calling str(). + # Also, we don't apply str() to arguments that aren't HiddenText since + # this can trigger a UnicodeDecodeError in Python 2 if the argument + # has type unicode and includes a non-ascii character. (The type + # checker doesn't ensure the annotations are correct in all cases.) + return " ".join( + shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg) + for arg in args + ) + + +def reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]: + """ + Return the arguments in their raw, unredacted form. + """ + return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args] + + +def call_subprocess( + cmd: Union[List[str], CommandArgs], + show_stdout: bool = False, + cwd: Optional[str] = None, + on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise", + extra_ok_returncodes: Optional[Iterable[int]] = None, + extra_environ: Optional[Mapping[str, Any]] = None, + unset_environ: Optional[Iterable[str]] = None, + spinner: Optional[SpinnerInterface] = None, + log_failed_cmd: Optional[bool] = True, + stdout_only: Optional[bool] = False, + *, + command_desc: str, +) -> str: + """ + Args: + show_stdout: if true, use INFO to log the subprocess's stderr and + stdout streams. Otherwise, use DEBUG. Defaults to False. + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. + unset_environ: an iterable of environment variable names to unset + prior to calling subprocess.Popen(). + log_failed_cmd: if false, failed commands are not logged, only raised. + stdout_only: if true, return only stdout, else return both. When true, + logging of both stdout and stderr occurs when the subprocess has + terminated, else logging occurs as subprocess output is produced. + """ + if extra_ok_returncodes is None: + extra_ok_returncodes = [] + if unset_environ is None: + unset_environ = [] + # Most places in pip use show_stdout=False. What this means is-- + # + # - We connect the child's output (combined stderr and stdout) to a + # single pipe, which we read. + # - We log this output to stderr at DEBUG level as it is received. + # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't + # requested), then we show a spinner so the user can still see the + # subprocess is in progress. + # - If the subprocess exits with an error, we log the output to stderr + # at ERROR level if it hasn't already been displayed to the console + # (e.g. if --verbose logging wasn't enabled). This way we don't log + # the output to the console twice. + # + # If show_stdout=True, then the above is still done, but with DEBUG + # replaced by INFO. + if show_stdout: + # Then log the subprocess output at INFO level. + log_subprocess: Callable[..., None] = subprocess_logger.info + used_level = logging.INFO + else: + # Then log the subprocess output using VERBOSE. This also ensures + # it will be logged to the log file (aka user_log), if enabled. + log_subprocess = subprocess_logger.verbose + used_level = VERBOSE + + # Whether the subprocess will be visible in the console. + showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level + + # Only use the spinner if we're not showing the subprocess output + # and we have a spinner. + use_spinner = not showing_subprocess and spinner is not None + + log_subprocess("Running command %s", command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + for name in unset_environ: + env.pop(name, None) + try: + proc = subprocess.Popen( + # Convert HiddenText objects to the underlying str. + reveal_command_args(cmd), + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE, + cwd=cwd, + env=env, + errors="backslashreplace", + ) + except Exception as exc: + if log_failed_cmd: + subprocess_logger.critical( + "Error %s while executing command %s", + exc, + command_desc, + ) + raise + all_output = [] + if not stdout_only: + assert proc.stdout + assert proc.stdin + proc.stdin.close() + # In this mode, stdout and stderr are in the same pipe. + while True: + line: str = proc.stdout.readline() + if not line: + break + line = line.rstrip() + all_output.append(line + "\n") + + # Show the line immediately. + log_subprocess(line) + # Update the spinner. + if use_spinner: + assert spinner + spinner.spin() + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + output = "".join(all_output) + else: + # In this mode, stdout and stderr are in different pipes. + # We must use communicate() which is the only safe way to read both. + out, err = proc.communicate() + # log line by line to preserve pip log indenting + for out_line in out.splitlines(): + log_subprocess(out_line) + all_output.append(out) + for err_line in err.splitlines(): + log_subprocess(err_line) + all_output.append(err) + output = out + + proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes + if use_spinner: + assert spinner + if proc_had_error: + spinner.finish("error") + else: + spinner.finish("done") + if proc_had_error: + if on_returncode == "raise": + error = InstallationSubprocessError( + command_description=command_desc, + exit_code=proc.returncode, + output_lines=all_output if not showing_subprocess else None, + ) + if log_failed_cmd: + subprocess_logger.error("%s", error, extra={"rich": True}) + subprocess_logger.verbose( + "[bold magenta]full command[/]: [blue]%s[/]", + escape(format_command_args(cmd)), + extra={"markup": True}, + ) + subprocess_logger.verbose( + "[bold magenta]cwd[/]: %s", + escape(cwd or "[inherit]"), + extra={"markup": True}, + ) + + raise error + elif on_returncode == "warn": + subprocess_logger.warning( + 'Command "%s" had error code %s in %s', + command_desc, + proc.returncode, + cwd, + ) + elif on_returncode == "ignore": + pass + else: + raise ValueError(f"Invalid value: on_returncode={on_returncode!r}") + return output + + +def runner_with_spinner_message(message: str) -> Callable[..., None]: + """Provide a subprocess_runner that shows a spinner message. + + Intended for use with for BuildBackendHookCaller. Thus, the runner has + an API that matches what's expected by BuildBackendHookCaller.subprocess_runner. + """ + + def runner( + cmd: List[str], + cwd: Optional[str] = None, + extra_environ: Optional[Mapping[str, Any]] = None, + ) -> None: + with open_spinner(message) as spinner: + call_subprocess( + cmd, + command_desc=message, + cwd=cwd, + extra_environ=extra_environ, + spinner=spinner, + ) + + return runner diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/temp_dir.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/temp_dir.py new file mode 100644 index 0000000..4eec5f3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/temp_dir.py @@ -0,0 +1,296 @@ +import errno +import itertools +import logging +import os.path +import tempfile +import traceback +from contextlib import ExitStack, contextmanager +from pathlib import Path +from typing import ( + Any, + Callable, + Dict, + Generator, + List, + Optional, + TypeVar, + Union, +) + +from pip._internal.utils.misc import enum, rmtree + +logger = logging.getLogger(__name__) + +_T = TypeVar("_T", bound="TempDirectory") + + +# Kinds of temporary directories. Only needed for ones that are +# globally-managed. +tempdir_kinds = enum( + BUILD_ENV="build-env", + EPHEM_WHEEL_CACHE="ephem-wheel-cache", + REQ_BUILD="req-build", +) + + +_tempdir_manager: Optional[ExitStack] = None + + +@contextmanager +def global_tempdir_manager() -> Generator[None, None, None]: + global _tempdir_manager + with ExitStack() as stack: + old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack + try: + yield + finally: + _tempdir_manager = old_tempdir_manager + + +class TempDirectoryTypeRegistry: + """Manages temp directory behavior""" + + def __init__(self) -> None: + self._should_delete: Dict[str, bool] = {} + + def set_delete(self, kind: str, value: bool) -> None: + """Indicate whether a TempDirectory of the given kind should be + auto-deleted. + """ + self._should_delete[kind] = value + + def get_delete(self, kind: str) -> bool: + """Get configured auto-delete flag for a given TempDirectory type, + default True. + """ + return self._should_delete.get(kind, True) + + +_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None + + +@contextmanager +def tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]: + """Provides a scoped global tempdir registry that can be used to dictate + whether directories should be deleted. + """ + global _tempdir_registry + old_tempdir_registry = _tempdir_registry + _tempdir_registry = TempDirectoryTypeRegistry() + try: + yield _tempdir_registry + finally: + _tempdir_registry = old_tempdir_registry + + +class _Default: + pass + + +_default = _Default() + + +class TempDirectory: + """Helper class that owns and cleans up a temporary directory. + + This class can be used as a context manager or as an OO representation of a + temporary directory. + + Attributes: + path + Location to the created temporary directory + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + Methods: + cleanup() + Deletes the temporary directory + + When used as a context manager, if the delete attribute is True, on + exiting the context the temporary directory is deleted. + """ + + def __init__( + self, + path: Optional[str] = None, + delete: Union[bool, None, _Default] = _default, + kind: str = "temp", + globally_managed: bool = False, + ignore_cleanup_errors: bool = True, + ): + super().__init__() + + if delete is _default: + if path is not None: + # If we were given an explicit directory, resolve delete option + # now. + delete = False + else: + # Otherwise, we wait until cleanup and see what + # tempdir_registry says. + delete = None + + # The only time we specify path is in for editables where it + # is the value of the --src option. + if path is None: + path = self._create(kind) + + self._path = path + self._deleted = False + self.delete = delete + self.kind = kind + self.ignore_cleanup_errors = ignore_cleanup_errors + + if globally_managed: + assert _tempdir_manager is not None + _tempdir_manager.enter_context(self) + + @property + def path(self) -> str: + assert not self._deleted, f"Attempted to access deleted path: {self._path}" + return self._path + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.path!r}>" + + def __enter__(self: _T) -> _T: + return self + + def __exit__(self, exc: Any, value: Any, tb: Any) -> None: + if self.delete is not None: + delete = self.delete + elif _tempdir_registry: + delete = _tempdir_registry.get_delete(self.kind) + else: + delete = True + + if delete: + self.cleanup() + + def _create(self, kind: str) -> str: + """Create a temporary directory and store its path in self.path""" + # We realpath here because some systems have their default tmpdir + # symlinked to another directory. This tends to confuse build + # scripts, so we canonicalize the path by traversing potential + # symlinks here. + path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) + logger.debug("Created temporary directory: %s", path) + return path + + def cleanup(self) -> None: + """Remove the temporary directory created and reset state""" + self._deleted = True + if not os.path.exists(self._path): + return + + errors: List[BaseException] = [] + + def onerror( + func: Callable[..., Any], + path: Path, + exc_val: BaseException, + ) -> None: + """Log a warning for a `rmtree` error and continue""" + formatted_exc = "\n".join( + traceback.format_exception_only(type(exc_val), exc_val) + ) + formatted_exc = formatted_exc.rstrip() # remove trailing new line + if func in (os.unlink, os.remove, os.rmdir): + logger.debug( + "Failed to remove a temporary file '%s' due to %s.\n", + path, + formatted_exc, + ) + else: + logger.debug("%s failed with %s.", func.__qualname__, formatted_exc) + errors.append(exc_val) + + if self.ignore_cleanup_errors: + try: + # first try with tenacity; retrying to handle ephemeral errors + rmtree(self._path, ignore_errors=False) + except OSError: + # last pass ignore/log all errors + rmtree(self._path, onexc=onerror) + if errors: + logger.warning( + "Failed to remove contents in a temporary directory '%s'.\n" + "You can safely remove it manually.", + self._path, + ) + else: + rmtree(self._path) + + +class AdjacentTempDirectory(TempDirectory): + """Helper class that creates a temporary directory adjacent to a real one. + + Attributes: + original + The original directory to create a temp directory for. + path + After calling create() or entering, contains the full + path to the temporary directory. + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + """ + + # The characters that may be used to name the temp directory + # We always prepend a ~ and then rotate through these until + # a usable name is found. + # pkg_resources raises a different error for .dist-info folder + # with leading '-' and invalid metadata + LEADING_CHARS = "-~.=%0123456789" + + def __init__(self, original: str, delete: Optional[bool] = None) -> None: + self.original = original.rstrip("/\\") + super().__init__(delete=delete) + + @classmethod + def _generate_names(cls, name: str) -> Generator[str, None, None]: + """Generates a series of temporary names. + + The algorithm replaces the leading characters in the name + with ones that are valid filesystem characters, but are not + valid package names (for both Python and pip definitions of + package). + """ + for i in range(1, len(name)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i - 1 + ): + new_name = "~" + "".join(candidate) + name[i:] + if new_name != name: + yield new_name + + # If we make it this far, we will have to make a longer name + for i in range(len(cls.LEADING_CHARS)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i + ): + new_name = "~" + "".join(candidate) + name + if new_name != name: + yield new_name + + def _create(self, kind: str) -> str: + root, name = os.path.split(self.original) + for candidate in self._generate_names(name): + path = os.path.join(root, candidate) + try: + os.mkdir(path) + except OSError as ex: + # Continue if the name exists already + if ex.errno != errno.EEXIST: + raise + else: + path = os.path.realpath(path) + break + else: + # Final fallback on the default behavior. + path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) + + logger.debug("Created temporary directory: %s", path) + return path diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/unpacking.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/unpacking.py new file mode 100644 index 0000000..78b5c13 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/unpacking.py @@ -0,0 +1,257 @@ +"""Utilities related archives. +""" + +import logging +import os +import shutil +import stat +import tarfile +import zipfile +from typing import Iterable, List, Optional +from zipfile import ZipInfo + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.filetypes import ( + BZ2_EXTENSIONS, + TAR_EXTENSIONS, + XZ_EXTENSIONS, + ZIP_EXTENSIONS, +) +from pip._internal.utils.misc import ensure_dir + +logger = logging.getLogger(__name__) + + +SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS + +try: + import bz2 # noqa + + SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS +except ImportError: + logger.debug("bz2 module is not available") + +try: + # Only for Python 3.3+ + import lzma # noqa + + SUPPORTED_EXTENSIONS += XZ_EXTENSIONS +except ImportError: + logger.debug("lzma module is not available") + + +def current_umask() -> int: + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def split_leading_dir(path: str) -> List[str]: + path = path.lstrip("/").lstrip("\\") + if "/" in path and ( + ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path + ): + return path.split("/", 1) + elif "\\" in path: + return path.split("\\", 1) + else: + return [path, ""] + + +def has_leading_dir(paths: Iterable[str]) -> bool: + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def is_within_directory(directory: str, target: str) -> bool: + """ + Return true if the absolute path of target is within the directory + """ + abs_directory = os.path.abspath(directory) + abs_target = os.path.abspath(target) + + prefix = os.path.commonprefix([abs_directory, abs_target]) + return prefix == abs_directory + + +def set_extracted_file_to_default_mode_plus_executable(path: str) -> None: + """ + Make file present at path have execute for user/group/world + (chmod +x) is no-op on windows per python docs + """ + os.chmod(path, (0o777 & ~current_umask() | 0o111)) + + +def zip_item_is_executable(info: ZipInfo) -> bool: + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for + # user/group/world? + return bool(mode and stat.S_ISREG(mode) and mode & 0o111) + + +def unzip_file(filename: str, location: str, flatten: bool = True) -> None: + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + zipfp = open(filename, "rb") + try: + zip = zipfile.ZipFile(zipfp, allowZip64=True) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if not is_within_directory(location, fn): + message = ( + "The zip file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError(message.format(filename, fn, location)) + if fn.endswith("/") or fn.endswith("\\"): + # A directory + ensure_dir(fn) + else: + ensure_dir(dir) + # Don't use read() to avoid allocating an arbitrarily large + # chunk of memory for the file's content + fp = zip.open(name) + try: + with open(fn, "wb") as destfp: + shutil.copyfileobj(fp, destfp) + finally: + fp.close() + if zip_item_is_executable(info): + set_extracted_file_to_default_mode_plus_executable(fn) + finally: + zipfp.close() + + +def untar_file(filename: str, location: str) -> None: + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"): + mode = "r:gz" + elif filename.lower().endswith(BZ2_EXTENSIONS): + mode = "r:bz2" + elif filename.lower().endswith(XZ_EXTENSIONS): + mode = "r:xz" + elif filename.lower().endswith(".tar"): + mode = "r" + else: + logger.warning( + "Cannot determine compression type for file %s", + filename, + ) + mode = "r:*" + tar = tarfile.open(filename, mode, encoding="utf-8") + try: + leading = has_leading_dir([member.name for member in tar.getmembers()]) + for member in tar.getmembers(): + fn = member.name + if leading: + fn = split_leading_dir(fn)[1] + path = os.path.join(location, fn) + if not is_within_directory(location, path): + message = ( + "The tar file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError(message.format(filename, path, location)) + if member.isdir(): + ensure_dir(path) + elif member.issym(): + try: + tar._extract_member(member, path) + except Exception as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + "In the tar file %s the member %s is invalid: %s", + filename, + member.name, + exc, + ) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError) as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + "In the tar file %s the member %s is invalid: %s", + filename, + member.name, + exc, + ) + continue + ensure_dir(os.path.dirname(path)) + assert fp is not None + with open(path, "wb") as destfp: + shutil.copyfileobj(fp, destfp) + fp.close() + # Update the timestamp (useful for cython compiled files) + tar.utime(member, path) + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + set_extracted_file_to_default_mode_plus_executable(path) + finally: + tar.close() + + +def unpack_file( + filename: str, + location: str, + content_type: Optional[str] = None, +) -> None: + filename = os.path.realpath(filename) + if ( + content_type == "application/zip" + or filename.lower().endswith(ZIP_EXTENSIONS) + or zipfile.is_zipfile(filename) + ): + unzip_file(filename, location, flatten=not filename.endswith(".whl")) + elif ( + content_type == "application/x-gzip" + or tarfile.is_tarfile(filename) + or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS) + ): + untar_file(filename, location) + else: + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + "Cannot unpack file %s (downloaded from %s, content-type: %s); " + "cannot detect archive format", + filename, + location, + content_type, + ) + raise InstallationError(f"Cannot determine archive format of {location}") diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/urls.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/urls.py new file mode 100644 index 0000000..6ba2e04 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/urls.py @@ -0,0 +1,62 @@ +import os +import string +import urllib.parse +import urllib.request +from typing import Optional + +from .compat import WINDOWS + + +def get_url_scheme(url: str) -> Optional[str]: + if ":" not in url: + return None + return url.split(":", 1)[0].lower() + + +def path_to_url(path: str) -> str: + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path)) + return url + + +def url_to_path(url: str) -> str: + """ + Convert a file: URL to a path. + """ + assert url.startswith( + "file:" + ), f"You can only turn file: urls into filenames (not {url!r})" + + _, netloc, path, _, _ = urllib.parse.urlsplit(url) + + if not netloc or netloc == "localhost": + # According to RFC 8089, same as empty authority. + netloc = "" + elif WINDOWS: + # If we have a UNC path, prepend UNC share notation. + netloc = "\\\\" + netloc + else: + raise ValueError( + f"non-local file URIs are not supported on this platform: {url!r}" + ) + + path = urllib.request.url2pathname(netloc + path) + + # On Windows, urlsplit parses the path as something like "/C:/Users/foo". + # This creates issues for path-related functions like io.open(), so we try + # to detect and strip the leading slash. + if ( + WINDOWS + and not netloc # Not UNC. + and len(path) >= 3 + and path[0] == "/" # Leading slash to strip. + and path[1] in string.ascii_letters # Drive letter. + and path[2:4] in (":", ":/") # Colon + end of string, or colon + absolute path. + ): + path = path[1:] + + return path diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/virtualenv.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/virtualenv.py new file mode 100644 index 0000000..882e36f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/virtualenv.py @@ -0,0 +1,104 @@ +import logging +import os +import re +import site +import sys +from typing import List, Optional + +logger = logging.getLogger(__name__) +_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile( + r"include-system-site-packages\s*=\s*(?Ptrue|false)" +) + + +def _running_under_venv() -> bool: + """Checks if sys.base_prefix and sys.prefix match. + + This handles PEP 405 compliant virtual environments. + """ + return sys.prefix != getattr(sys, "base_prefix", sys.prefix) + + +def _running_under_legacy_virtualenv() -> bool: + """Checks if sys.real_prefix is set. + + This handles virtual environments created with pypa's virtualenv. + """ + # pypa/virtualenv case + return hasattr(sys, "real_prefix") + + +def running_under_virtualenv() -> bool: + """True if we're running inside a virtual environment, False otherwise.""" + return _running_under_venv() or _running_under_legacy_virtualenv() + + +def _get_pyvenv_cfg_lines() -> Optional[List[str]]: + """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines + + Returns None, if it could not read/access the file. + """ + pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg") + try: + # Although PEP 405 does not specify, the built-in venv module always + # writes with UTF-8. (pypa/pip#8717) + with open(pyvenv_cfg_file, encoding="utf-8") as f: + return f.read().splitlines() # avoids trailing newlines + except OSError: + return None + + +def _no_global_under_venv() -> bool: + """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion + + PEP 405 specifies that when system site-packages are not supposed to be + visible from a virtual environment, `pyvenv.cfg` must contain the following + line: + + include-system-site-packages = false + + Additionally, log a warning if accessing the file fails. + """ + cfg_lines = _get_pyvenv_cfg_lines() + if cfg_lines is None: + # We're not in a "sane" venv, so assume there is no system + # site-packages access (since that's PEP 405's default state). + logger.warning( + "Could not access 'pyvenv.cfg' despite a virtual environment " + "being active. Assuming global site-packages is not accessible " + "in this environment." + ) + return True + + for line in cfg_lines: + match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line) + if match is not None and match.group("value") == "false": + return True + return False + + +def _no_global_under_legacy_virtualenv() -> bool: + """Check if "no-global-site-packages.txt" exists beside site.py + + This mirrors logic in pypa/virtualenv for determining whether system + site-packages are visible in the virtual environment. + """ + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_site_packages_file = os.path.join( + site_mod_dir, + "no-global-site-packages.txt", + ) + return os.path.exists(no_global_site_packages_file) + + +def virtualenv_no_global() -> bool: + """Returns a boolean, whether running in venv with no system site-packages.""" + # PEP 405 compliance needs to be checked first since virtualenv >=20 would + # return True for both checks, but is only able to use the PEP 405 config. + if _running_under_venv(): + return _no_global_under_venv() + + if _running_under_legacy_virtualenv(): + return _no_global_under_legacy_virtualenv() + + return False diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/utils/wheel.py b/myenv/lib/python3.12/site-packages/pip/_internal/utils/wheel.py new file mode 100644 index 0000000..3551f8f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/utils/wheel.py @@ -0,0 +1,134 @@ +"""Support functions for working with wheel files. +""" + +import logging +from email.message import Message +from email.parser import Parser +from typing import Tuple +from zipfile import BadZipFile, ZipFile + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import UnsupportedWheel + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]: + """Extract information from the provided wheel, ensuring it meets basic + standards. + + Returns the name of the .dist-info directory and the parsed WHEEL metadata. + """ + try: + info_dir = wheel_dist_info_dir(wheel_zip, name) + metadata = wheel_metadata(wheel_zip, info_dir) + version = wheel_version(metadata) + except UnsupportedWheel as e: + raise UnsupportedWheel(f"{name} has an invalid wheel, {str(e)}") + + check_compatibility(version, name) + + return info_dir, metadata + + +def wheel_dist_info_dir(source: ZipFile, name: str) -> str: + """Returns the name of the contained .dist-info directory. + + Raises AssertionError or UnsupportedWheel if not found, >1 found, or + it doesn't match the provided name. + """ + # Zip file path separators must be / + subdirs = {p.split("/", 1)[0] for p in source.namelist()} + + info_dirs = [s for s in subdirs if s.endswith(".dist-info")] + + if not info_dirs: + raise UnsupportedWheel(".dist-info directory not found") + + if len(info_dirs) > 1: + raise UnsupportedWheel( + "multiple .dist-info directories found: {}".format(", ".join(info_dirs)) + ) + + info_dir = info_dirs[0] + + info_dir_name = canonicalize_name(info_dir) + canonical_name = canonicalize_name(name) + if not info_dir_name.startswith(canonical_name): + raise UnsupportedWheel( + f".dist-info directory {info_dir!r} does not start with {canonical_name!r}" + ) + + return info_dir + + +def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes: + try: + return source.read(path) + # BadZipFile for general corruption, KeyError for missing entry, + # and RuntimeError for password-protected files + except (BadZipFile, KeyError, RuntimeError) as e: + raise UnsupportedWheel(f"could not read {path!r} file: {e!r}") + + +def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message: + """Return the WHEEL metadata of an extracted wheel, if possible. + Otherwise, raise UnsupportedWheel. + """ + path = f"{dist_info_dir}/WHEEL" + # Zip file path separators must be / + wheel_contents = read_wheel_metadata_file(source, path) + + try: + wheel_text = wheel_contents.decode() + except UnicodeDecodeError as e: + raise UnsupportedWheel(f"error decoding {path!r}: {e!r}") + + # FeedParser (used by Parser) does not raise any exceptions. The returned + # message may have .defects populated, but for backwards-compatibility we + # currently ignore them. + return Parser().parsestr(wheel_text) + + +def wheel_version(wheel_data: Message) -> Tuple[int, ...]: + """Given WHEEL metadata, return the parsed Wheel-Version. + Otherwise, raise UnsupportedWheel. + """ + version_text = wheel_data["Wheel-Version"] + if version_text is None: + raise UnsupportedWheel("WHEEL is missing Wheel-Version") + + version = version_text.strip() + + try: + return tuple(map(int, version.split("."))) + except ValueError: + raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}") + + +def check_compatibility(version: Tuple[int, ...], name: str) -> None: + """Raises errors or warns if called with an incompatible Wheel-Version. + + pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "{}'s Wheel-Version ({}) is not compatible with this version " + "of pip".format(name, ".".join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + "Installing from a newer Wheel-Version (%s)", + ".".join(map(str, version)), + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__init__.py b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__init__.py new file mode 100644 index 0000000..b6beddb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__init__.py @@ -0,0 +1,15 @@ +# Expose a limited set of classes and functions so callers outside of +# the vcs package don't need to import deeper than `pip._internal.vcs`. +# (The test directory may still need to import from a vcs sub-package.) +# Import all vcs modules to register each VCS in the VcsSupport object. +import pip._internal.vcs.bazaar +import pip._internal.vcs.git +import pip._internal.vcs.mercurial +import pip._internal.vcs.subversion # noqa: F401 +from pip._internal.vcs.versioncontrol import ( # noqa: F401 + RemoteNotFoundError, + RemoteNotValidError, + is_url, + make_vcs_requirement_url, + vcs, +) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..11df1c0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc new file mode 100644 index 0000000..247d89f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/git.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/git.cpython-312.pyc new file mode 100644 index 0000000..8b06b5b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/git.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc new file mode 100644 index 0000000..9c02d1b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc new file mode 100644 index 0000000..52320d3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc new file mode 100644 index 0000000..802a79d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/bazaar.py b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/bazaar.py new file mode 100644 index 0000000..20a17ed --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/bazaar.py @@ -0,0 +1,112 @@ +import logging +from typing import List, Optional, Tuple + +from pip._internal.utils.misc import HiddenText, display_path +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RevOptions, + VersionControl, + vcs, +) + +logger = logging.getLogger(__name__) + + +class Bazaar(VersionControl): + name = "bzr" + dirname = ".bzr" + repo_name = "branch" + schemes = ( + "bzr+http", + "bzr+https", + "bzr+ssh", + "bzr+sftp", + "bzr+ftp", + "bzr+lp", + "bzr+file", + ) + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return ["-r", rev] + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info( + "Checking out %s%s to %s", + url, + rev_display, + display_path(dest), + ) + if verbosity <= 0: + flag = "--quiet" + elif verbosity == 1: + flag = "" + else: + flag = f"-{'v'*verbosity}" + cmd_args = make_command( + "checkout", "--lightweight", flag, rev_options.to_args(), url, dest + ) + self.run_command(cmd_args) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + self.run_command(make_command("switch", url), cwd=dest) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + output = self.run_command( + make_command("info"), show_stdout=False, stdout_only=True, cwd=dest + ) + if output.startswith("Standalone "): + # Older versions of pip used to create standalone branches. + # Convert the standalone branch to a checkout by calling "bzr bind". + cmd_args = make_command("bind", "-q", url) + self.run_command(cmd_args, cwd=dest) + + cmd_args = make_command("update", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + # hotfix the URL scheme after removing bzr+ from bzr+ssh:// re-add it + url, rev, user_pass = super().get_url_rev_and_auth(url) + if url.startswith("ssh://"): + url = "bzr+" + url + return url, rev, user_pass + + @classmethod + def get_remote_url(cls, location: str) -> str: + urls = cls.run_command( + ["info"], show_stdout=False, stdout_only=True, cwd=location + ) + for line in urls.splitlines(): + line = line.strip() + for x in ("checkout of branch: ", "parent branch: "): + if line.startswith(x): + repo = line.split(x)[1] + if cls._is_local_repository(repo): + return path_to_url(repo) + return repo + raise RemoteNotFoundError + + @classmethod + def get_revision(cls, location: str) -> str: + revision = cls.run_command( + ["revno"], + show_stdout=False, + stdout_only=True, + cwd=location, + ) + return revision.splitlines()[-1] + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """Always assume the versions don't match""" + return False + + +vcs.register(Bazaar) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/git.py b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/git.py new file mode 100644 index 0000000..8c242cf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/git.py @@ -0,0 +1,526 @@ +import logging +import os.path +import pathlib +import re +import urllib.parse +import urllib.request +from typing import List, Optional, Tuple + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import HiddenText, display_path, hide_url +from pip._internal.utils.subprocess import make_command +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RemoteNotValidError, + RevOptions, + VersionControl, + find_path_to_project_root_from_repo_root, + vcs, +) + +urlsplit = urllib.parse.urlsplit +urlunsplit = urllib.parse.urlunsplit + + +logger = logging.getLogger(__name__) + + +GIT_VERSION_REGEX = re.compile( + r"^git version " # Prefix. + r"(\d+)" # Major. + r"\.(\d+)" # Dot, minor. + r"(?:\.(\d+))?" # Optional dot, patch. + r".*$" # Suffix, including any pre- and post-release segments we don't care about. +) + +HASH_REGEX = re.compile("^[a-fA-F0-9]{40}$") + +# SCP (Secure copy protocol) shorthand. e.g. 'git@example.com:foo/bar.git' +SCP_REGEX = re.compile( + r"""^ + # Optional user, e.g. 'git@' + (\w+@)? + # Server, e.g. 'github.com'. + ([^/:]+): + # The server-side path. e.g. 'user/project.git'. Must start with an + # alphanumeric character so as not to be confusable with a Windows paths + # like 'C:/foo/bar' or 'C:\foo\bar'. + (\w[^:]*) + $""", + re.VERBOSE, +) + + +def looks_like_hash(sha: str) -> bool: + return bool(HASH_REGEX.match(sha)) + + +class Git(VersionControl): + name = "git" + dirname = ".git" + repo_name = "clone" + schemes = ( + "git+http", + "git+https", + "git+ssh", + "git+git", + "git+file", + ) + # Prevent the user's environment variables from interfering with pip: + # https://github.com/pypa/pip/issues/1130 + unset_environ = ("GIT_DIR", "GIT_WORK_TREE") + default_arg_rev = "HEAD" + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return [rev] + + def is_immutable_rev_checkout(self, url: str, dest: str) -> bool: + _, rev_options = self.get_url_rev_options(hide_url(url)) + if not rev_options.rev: + return False + if not self.is_commit_id_equal(dest, rev_options.rev): + # the current commit is different from rev, + # which means rev was something else than a commit hash + return False + # return False in the rare case rev is both a commit hash + # and a tag or a branch; we don't want to cache in that case + # because that branch/tag could point to something else in the future + is_tag_or_branch = bool(self.get_revision_sha(dest, rev_options.rev)[0]) + return not is_tag_or_branch + + def get_git_version(self) -> Tuple[int, ...]: + version = self.run_command( + ["version"], + command_desc="git version", + show_stdout=False, + stdout_only=True, + ) + match = GIT_VERSION_REGEX.match(version) + if not match: + logger.warning("Can't parse git version: %s", version) + return () + return (int(match.group(1)), int(match.group(2))) + + @classmethod + def get_current_branch(cls, location: str) -> Optional[str]: + """ + Return the current branch, or None if HEAD isn't at a branch + (e.g. detached HEAD). + """ + # git-symbolic-ref exits with empty stdout if "HEAD" is a detached + # HEAD rather than a symbolic ref. In addition, the -q causes the + # command to exit with status code 1 instead of 128 in this case + # and to suppress the message to stderr. + args = ["symbolic-ref", "-q", "HEAD"] + output = cls.run_command( + args, + extra_ok_returncodes=(1,), + show_stdout=False, + stdout_only=True, + cwd=location, + ) + ref = output.strip() + + if ref.startswith("refs/heads/"): + return ref[len("refs/heads/") :] + + return None + + @classmethod + def get_revision_sha(cls, dest: str, rev: str) -> Tuple[Optional[str], bool]: + """ + Return (sha_or_none, is_branch), where sha_or_none is a commit hash + if the revision names a remote branch or tag, otherwise None. + + Args: + dest: the repository directory. + rev: the revision name. + """ + # Pass rev to pre-filter the list. + output = cls.run_command( + ["show-ref", rev], + cwd=dest, + show_stdout=False, + stdout_only=True, + on_returncode="ignore", + ) + refs = {} + # NOTE: We do not use splitlines here since that would split on other + # unicode separators, which can be maliciously used to install a + # different revision. + for line in output.strip().split("\n"): + line = line.rstrip("\r") + if not line: + continue + try: + ref_sha, ref_name = line.split(" ", maxsplit=2) + except ValueError: + # Include the offending line to simplify troubleshooting if + # this error ever occurs. + raise ValueError(f"unexpected show-ref line: {line!r}") + + refs[ref_name] = ref_sha + + branch_ref = f"refs/remotes/origin/{rev}" + tag_ref = f"refs/tags/{rev}" + + sha = refs.get(branch_ref) + if sha is not None: + return (sha, True) + + sha = refs.get(tag_ref) + + return (sha, False) + + @classmethod + def _should_fetch(cls, dest: str, rev: str) -> bool: + """ + Return true if rev is a ref or is a commit that we don't have locally. + + Branches and tags are not considered in this method because they are + assumed to be always available locally (which is a normal outcome of + ``git clone`` and ``git fetch --tags``). + """ + if rev.startswith("refs/"): + # Always fetch remote refs. + return True + + if not looks_like_hash(rev): + # Git fetch would fail with abbreviated commits. + return False + + if cls.has_commit(dest, rev): + # Don't fetch if we have the commit locally. + return False + + return True + + @classmethod + def resolve_revision( + cls, dest: str, url: HiddenText, rev_options: RevOptions + ) -> RevOptions: + """ + Resolve a revision to a new RevOptions object with the SHA1 of the + branch, tag, or ref if found. + + Args: + rev_options: a RevOptions object. + """ + rev = rev_options.arg_rev + # The arg_rev property's implementation for Git ensures that the + # rev return value is always non-None. + assert rev is not None + + sha, is_branch = cls.get_revision_sha(dest, rev) + + if sha is not None: + rev_options = rev_options.make_new(sha) + rev_options.branch_name = rev if is_branch else None + + return rev_options + + # Do not show a warning for the common case of something that has + # the form of a Git commit hash. + if not looks_like_hash(rev): + logger.warning( + "Did not find branch or tag '%s', assuming revision or ref.", + rev, + ) + + if not cls._should_fetch(dest, rev): + return rev_options + + # fetch the requested revision + cls.run_command( + make_command("fetch", "-q", url, rev_options.to_args()), + cwd=dest, + ) + # Change the revision to the SHA of the ref we fetched + sha = cls.get_revision(dest, rev="FETCH_HEAD") + rev_options = rev_options.make_new(sha) + + return rev_options + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """ + Return whether the current commit hash equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + if not name: + # Then avoid an unnecessary subprocess call. + return False + + return cls.get_revision(dest) == name + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest)) + if verbosity <= 0: + flags: Tuple[str, ...] = ("--quiet",) + elif verbosity == 1: + flags = () + else: + flags = ("--verbose", "--progress") + if self.get_git_version() >= (2, 17): + # Git added support for partial clone in 2.17 + # https://git-scm.com/docs/partial-clone + # Speeds up cloning by functioning without a complete copy of repository + self.run_command( + make_command( + "clone", + "--filter=blob:none", + *flags, + url, + dest, + ) + ) + else: + self.run_command(make_command("clone", *flags, url, dest)) + + if rev_options.rev: + # Then a specific revision was requested. + rev_options = self.resolve_revision(dest, url, rev_options) + branch_name = getattr(rev_options, "branch_name", None) + logger.debug("Rev options %s, branch_name %s", rev_options, branch_name) + if branch_name is None: + # Only do a checkout if the current commit id doesn't match + # the requested revision. + if not self.is_commit_id_equal(dest, rev_options.rev): + cmd_args = make_command( + "checkout", + "-q", + rev_options.to_args(), + ) + self.run_command(cmd_args, cwd=dest) + elif self.get_current_branch(dest) != branch_name: + # Then a specific branch was requested, and that branch + # is not yet checked out. + track_branch = f"origin/{branch_name}" + cmd_args = [ + "checkout", + "-b", + branch_name, + "--track", + track_branch, + ] + self.run_command(cmd_args, cwd=dest) + else: + sha = self.get_revision(dest) + rev_options = rev_options.make_new(sha) + + logger.info("Resolved %s to commit %s", url, rev_options.rev) + + #: repo may contain submodules + self.update_submodules(dest) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + self.run_command( + make_command("config", "remote.origin.url", url), + cwd=dest, + ) + cmd_args = make_command("checkout", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + self.update_submodules(dest) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + # First fetch changes from the default remote + if self.get_git_version() >= (1, 9): + # fetch tags in addition to everything else + self.run_command(["fetch", "-q", "--tags"], cwd=dest) + else: + self.run_command(["fetch", "-q"], cwd=dest) + # Then reset to wanted revision (maybe even origin/master) + rev_options = self.resolve_revision(dest, url, rev_options) + cmd_args = make_command("reset", "--hard", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + #: update submodules + self.update_submodules(dest) + + @classmethod + def get_remote_url(cls, location: str) -> str: + """ + Return URL of the first remote encountered. + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + # We need to pass 1 for extra_ok_returncodes since the command + # exits with return code 1 if there are no matching lines. + stdout = cls.run_command( + ["config", "--get-regexp", r"remote\..*\.url"], + extra_ok_returncodes=(1,), + show_stdout=False, + stdout_only=True, + cwd=location, + ) + remotes = stdout.splitlines() + try: + found_remote = remotes[0] + except IndexError: + raise RemoteNotFoundError + + for remote in remotes: + if remote.startswith("remote.origin.url "): + found_remote = remote + break + url = found_remote.split(" ")[1] + return cls._git_remote_to_pip_url(url.strip()) + + @staticmethod + def _git_remote_to_pip_url(url: str) -> str: + """ + Convert a remote url from what git uses to what pip accepts. + + There are 3 legal forms **url** may take: + + 1. A fully qualified url: ssh://git@example.com/foo/bar.git + 2. A local project.git folder: /path/to/bare/repository.git + 3. SCP shorthand for form 1: git@example.com:foo/bar.git + + Form 1 is output as-is. Form 2 must be converted to URI and form 3 must + be converted to form 1. + + See the corresponding test test_git_remote_url_to_pip() for examples of + sample inputs/outputs. + """ + if re.match(r"\w+://", url): + # This is already valid. Pass it though as-is. + return url + if os.path.exists(url): + # A local bare remote (git clone --mirror). + # Needs a file:// prefix. + return pathlib.PurePath(url).as_uri() + scp_match = SCP_REGEX.match(url) + if scp_match: + # Add an ssh:// prefix and replace the ':' with a '/'. + return scp_match.expand(r"ssh://\1\2/\3") + # Otherwise, bail out. + raise RemoteNotValidError(url) + + @classmethod + def has_commit(cls, location: str, rev: str) -> bool: + """ + Check if rev is a commit that is available in the local repository. + """ + try: + cls.run_command( + ["rev-parse", "-q", "--verify", "sha^" + rev], + cwd=location, + log_failed_cmd=False, + ) + except InstallationError: + return False + else: + return True + + @classmethod + def get_revision(cls, location: str, rev: Optional[str] = None) -> str: + if rev is None: + rev = "HEAD" + current_rev = cls.run_command( + ["rev-parse", rev], + show_stdout=False, + stdout_only=True, + cwd=location, + ) + return current_rev.strip() + + @classmethod + def get_subdirectory(cls, location: str) -> Optional[str]: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + # find the repo root + git_dir = cls.run_command( + ["rev-parse", "--git-dir"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + if not os.path.isabs(git_dir): + git_dir = os.path.join(location, git_dir) + repo_root = os.path.abspath(os.path.join(git_dir, "..")) + return find_path_to_project_root_from_repo_root(location, repo_root) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + """ + Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. + That's required because although they use SSH they sometimes don't + work with a ssh:// scheme (e.g. GitHub). But we need a scheme for + parsing. Hence we remove it again afterwards and return it as a stub. + """ + # Works around an apparent Git bug + # (see https://article.gmane.org/gmane.comp.version-control.git/146500) + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith("file"): + initial_slashes = path[: -len(path.lstrip("/"))] + newpath = initial_slashes + urllib.request.url2pathname(path).replace( + "\\", "/" + ).lstrip("/") + after_plus = scheme.find("+") + 1 + url = scheme[:after_plus] + urlunsplit( + (scheme[after_plus:], netloc, newpath, query, fragment), + ) + + if "://" not in url: + assert "file:" not in url + url = url.replace("git+", "git+ssh://") + url, rev, user_pass = super().get_url_rev_and_auth(url) + url = url.replace("ssh://", "") + else: + url, rev, user_pass = super().get_url_rev_and_auth(url) + + return url, rev, user_pass + + @classmethod + def update_submodules(cls, location: str) -> None: + if not os.path.exists(os.path.join(location, ".gitmodules")): + return + cls.run_command( + ["submodule", "update", "--init", "--recursive", "-q"], + cwd=location, + ) + + @classmethod + def get_repository_root(cls, location: str) -> Optional[str]: + loc = super().get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ["rev-parse", "--show-toplevel"], + cwd=location, + show_stdout=False, + stdout_only=True, + on_returncode="raise", + log_failed_cmd=False, + ) + except BadCommand: + logger.debug( + "could not determine if %s is under git control " + "because git is not available", + location, + ) + return None + except InstallationError: + return None + return os.path.normpath(r.rstrip("\r\n")) + + @staticmethod + def should_add_vcs_url_prefix(repo_url: str) -> bool: + """In either https or ssh form, requirements must be prefixed with git+.""" + return True + + +vcs.register(Git) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/mercurial.py b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/mercurial.py new file mode 100644 index 0000000..c183d41 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/mercurial.py @@ -0,0 +1,163 @@ +import configparser +import logging +import os +from typing import List, Optional, Tuple + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import HiddenText, display_path +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + RevOptions, + VersionControl, + find_path_to_project_root_from_repo_root, + vcs, +) + +logger = logging.getLogger(__name__) + + +class Mercurial(VersionControl): + name = "hg" + dirname = ".hg" + repo_name = "clone" + schemes = ( + "hg+file", + "hg+http", + "hg+https", + "hg+ssh", + "hg+static-http", + ) + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return [f"--rev={rev}"] + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info( + "Cloning hg %s%s to %s", + url, + rev_display, + display_path(dest), + ) + if verbosity <= 0: + flags: Tuple[str, ...] = ("--quiet",) + elif verbosity == 1: + flags = () + elif verbosity == 2: + flags = ("--verbose",) + else: + flags = ("--verbose", "--debug") + self.run_command(make_command("clone", "--noupdate", *flags, url, dest)) + self.run_command( + make_command("update", *flags, rev_options.to_args()), + cwd=dest, + ) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + repo_config = os.path.join(dest, self.dirname, "hgrc") + config = configparser.RawConfigParser() + try: + config.read(repo_config) + config.set("paths", "default", url.secret) + with open(repo_config, "w") as config_file: + config.write(config_file) + except (OSError, configparser.NoSectionError) as exc: + logger.warning("Could not switch Mercurial repository to %s: %s", url, exc) + else: + cmd_args = make_command("update", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + self.run_command(["pull", "-q"], cwd=dest) + cmd_args = make_command("update", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_remote_url(cls, location: str) -> str: + url = cls.run_command( + ["showconfig", "paths.default"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + if cls._is_local_repository(url): + url = path_to_url(url) + return url.strip() + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the repository-local changeset revision number, as an integer. + """ + current_revision = cls.run_command( + ["parents", "--template={rev}"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + return current_revision + + @classmethod + def get_requirement_revision(cls, location: str) -> str: + """ + Return the changeset identification hash, as a 40-character + hexadecimal string + """ + current_rev_hash = cls.run_command( + ["parents", "--template={node}"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + return current_rev_hash + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """Always assume the versions don't match""" + return False + + @classmethod + def get_subdirectory(cls, location: str) -> Optional[str]: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + # find the repo root + repo_root = cls.run_command( + ["root"], show_stdout=False, stdout_only=True, cwd=location + ).strip() + if not os.path.isabs(repo_root): + repo_root = os.path.abspath(os.path.join(location, repo_root)) + return find_path_to_project_root_from_repo_root(location, repo_root) + + @classmethod + def get_repository_root(cls, location: str) -> Optional[str]: + loc = super().get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ["root"], + cwd=location, + show_stdout=False, + stdout_only=True, + on_returncode="raise", + log_failed_cmd=False, + ) + except BadCommand: + logger.debug( + "could not determine if %s is under hg control " + "because hg is not available", + location, + ) + return None + except InstallationError: + return None + return os.path.normpath(r.rstrip("\r\n")) + + +vcs.register(Mercurial) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/subversion.py b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/subversion.py new file mode 100644 index 0000000..16d93a6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/subversion.py @@ -0,0 +1,324 @@ +import logging +import os +import re +from typing import List, Optional, Tuple + +from pip._internal.utils.misc import ( + HiddenText, + display_path, + is_console_interactive, + is_installable_dir, + split_auth_from_netloc, +) +from pip._internal.utils.subprocess import CommandArgs, make_command +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RevOptions, + VersionControl, + vcs, +) + +logger = logging.getLogger(__name__) + +_svn_xml_url_re = re.compile('url="([^"]+)"') +_svn_rev_re = re.compile(r'committed-rev="(\d+)"') +_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') +_svn_info_xml_url_re = re.compile(r"(.*)") + + +class Subversion(VersionControl): + name = "svn" + dirname = ".svn" + repo_name = "checkout" + schemes = ("svn+ssh", "svn+http", "svn+https", "svn+svn", "svn+file") + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url: str) -> bool: + return True + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return ["-r", rev] + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the maximum revision for all files under a given location + """ + # Note: taken from setuptools.command.egg_info + revision = 0 + + for base, dirs, _ in os.walk(location): + if cls.dirname not in dirs: + dirs[:] = [] + continue # no sense walking uncontrolled subdirs + dirs.remove(cls.dirname) + entries_fn = os.path.join(base, cls.dirname, "entries") + if not os.path.exists(entries_fn): + # FIXME: should we warn? + continue + + dirurl, localrev = cls._get_svn_url_rev(base) + + if base == location: + assert dirurl is not None + base = dirurl + "/" # save the root url + elif not dirurl or not dirurl.startswith(base): + dirs[:] = [] + continue # not part of the same svn tree, skip it + revision = max(revision, localrev) + return str(revision) + + @classmethod + def get_netloc_and_auth( + cls, netloc: str, scheme: str + ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]: + """ + This override allows the auth information to be passed to svn via the + --username and --password options instead of via the URL. + """ + if scheme == "ssh": + # The --username and --password options can't be used for + # svn+ssh URLs, so keep the auth information in the URL. + return super().get_netloc_and_auth(netloc, scheme) + + return split_auth_from_netloc(netloc) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + # hotfix the URL scheme after removing svn+ from svn+ssh:// re-add it + url, rev, user_pass = super().get_url_rev_and_auth(url) + if url.startswith("ssh://"): + url = "svn+" + url + return url, rev, user_pass + + @staticmethod + def make_rev_args( + username: Optional[str], password: Optional[HiddenText] + ) -> CommandArgs: + extra_args: CommandArgs = [] + if username: + extra_args += ["--username", username] + if password: + extra_args += ["--password", password] + + return extra_args + + @classmethod + def get_remote_url(cls, location: str) -> str: + # In cases where the source is in a subdirectory, we have to look up in + # the location until we find a valid project root. + orig_location = location + while not is_installable_dir(location): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding a Python project. + logger.warning( + "Could not find Python project for directory %s (tried all " + "parent directories)", + orig_location, + ) + raise RemoteNotFoundError + + url, _rev = cls._get_svn_url_rev(location) + if url is None: + raise RemoteNotFoundError + + return url + + @classmethod + def _get_svn_url_rev(cls, location: str) -> Tuple[Optional[str], int]: + from pip._internal.exceptions import InstallationError + + entries_path = os.path.join(location, cls.dirname, "entries") + if os.path.exists(entries_path): + with open(entries_path) as f: + data = f.read() + else: # subversion >= 1.7 does not have the 'entries' file + data = "" + + url = None + if data.startswith("8") or data.startswith("9") or data.startswith("10"): + entries = list(map(str.splitlines, data.split("\n\x0c\n"))) + del entries[0][0] # get rid of the '8' + url = entries[0][3] + revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0] + elif data.startswith("= 1.7 + # Note that using get_remote_call_options is not necessary here + # because `svn info` is being run against a local directory. + # We don't need to worry about making sure interactive mode + # is being used to prompt for passwords, because passwords + # are only potentially needed for remote server requests. + xml = cls.run_command( + ["info", "--xml", location], + show_stdout=False, + stdout_only=True, + ) + match = _svn_info_xml_url_re.search(xml) + assert match is not None + url = match.group(1) + revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)] + except InstallationError: + url, revs = None, [] + + if revs: + rev = max(revs) + else: + rev = 0 + + return url, rev + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """Always assume the versions don't match""" + return False + + def __init__(self, use_interactive: Optional[bool] = None) -> None: + if use_interactive is None: + use_interactive = is_console_interactive() + self.use_interactive = use_interactive + + # This member is used to cache the fetched version of the current + # ``svn`` client. + # Special value definitions: + # None: Not evaluated yet. + # Empty tuple: Could not parse version. + self._vcs_version: Optional[Tuple[int, ...]] = None + + super().__init__() + + def call_vcs_version(self) -> Tuple[int, ...]: + """Query the version of the currently installed Subversion client. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + # Example versions: + # svn, version 1.10.3 (r1842928) + # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0 + # svn, version 1.7.14 (r1542130) + # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu + # svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0) + # compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2 + version_prefix = "svn, version " + version = self.run_command(["--version"], show_stdout=False, stdout_only=True) + if not version.startswith(version_prefix): + return () + + version = version[len(version_prefix) :].split()[0] + version_list = version.partition("-")[0].split(".") + try: + parsed_version = tuple(map(int, version_list)) + except ValueError: + return () + + return parsed_version + + def get_vcs_version(self) -> Tuple[int, ...]: + """Return the version of the currently installed Subversion client. + + If the version of the Subversion client has already been queried, + a cached value will be used. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + if self._vcs_version is not None: + # Use cached version, if available. + # If parsing the version failed previously (empty tuple), + # do not attempt to parse it again. + return self._vcs_version + + vcs_version = self.call_vcs_version() + self._vcs_version = vcs_version + return vcs_version + + def get_remote_call_options(self) -> CommandArgs: + """Return options to be used on calls to Subversion that contact the server. + + These options are applicable for the following ``svn`` subcommands used + in this class. + + - checkout + - switch + - update + + :return: A list of command line arguments to pass to ``svn``. + """ + if not self.use_interactive: + # --non-interactive switch is available since Subversion 0.14.4. + # Subversion < 1.8 runs in interactive mode by default. + return ["--non-interactive"] + + svn_version = self.get_vcs_version() + # By default, Subversion >= 1.8 runs in non-interactive mode if + # stdin is not a TTY. Since that is how pip invokes SVN, in + # call_subprocess(), pip must pass --force-interactive to ensure + # the user can be prompted for a password, if required. + # SVN added the --force-interactive option in SVN 1.8. Since + # e.g. RHEL/CentOS 7, which is supported until 2024, ships with + # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip + # can't safely add the option if the SVN version is < 1.8 (or unknown). + if svn_version >= (1, 8): + return ["--force-interactive"] + + return [] + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info( + "Checking out %s%s to %s", + url, + rev_display, + display_path(dest), + ) + if verbosity <= 0: + flag = "--quiet" + else: + flag = "" + cmd_args = make_command( + "checkout", + flag, + self.get_remote_call_options(), + rev_options.to_args(), + url, + dest, + ) + self.run_command(cmd_args) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + cmd_args = make_command( + "switch", + self.get_remote_call_options(), + rev_options.to_args(), + url, + dest, + ) + self.run_command(cmd_args) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + cmd_args = make_command( + "update", + self.get_remote_call_options(), + rev_options.to_args(), + dest, + ) + self.run_command(cmd_args) + + +vcs.register(Subversion) diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/vcs/versioncontrol.py b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/versioncontrol.py new file mode 100644 index 0000000..46ca279 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/vcs/versioncontrol.py @@ -0,0 +1,705 @@ +"""Handles all VCS (version control) support""" + +import logging +import os +import shutil +import sys +import urllib.parse +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Tuple, + Type, + Union, +) + +from pip._internal.cli.spinners import SpinnerInterface +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import ( + HiddenText, + ask_path_exists, + backup_dir, + display_path, + hide_url, + hide_value, + is_installable_dir, + rmtree, +) +from pip._internal.utils.subprocess import ( + CommandArgs, + call_subprocess, + format_command_args, + make_command, +) +from pip._internal.utils.urls import get_url_scheme + +if TYPE_CHECKING: + # Literal was introduced in Python 3.8. + # + # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7. + from typing import Literal + + +__all__ = ["vcs"] + + +logger = logging.getLogger(__name__) + +AuthInfo = Tuple[Optional[str], Optional[str]] + + +def is_url(name: str) -> bool: + """ + Return true if the name looks like a URL. + """ + scheme = get_url_scheme(name) + if scheme is None: + return False + return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes + + +def make_vcs_requirement_url( + repo_url: str, rev: str, project_name: str, subdir: Optional[str] = None +) -> str: + """ + Return the URL for a VCS requirement. + + Args: + repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). + project_name: the (unescaped) project name. + """ + egg_project_name = project_name.replace("-", "_") + req = f"{repo_url}@{rev}#egg={egg_project_name}" + if subdir: + req += f"&subdirectory={subdir}" + + return req + + +def find_path_to_project_root_from_repo_root( + location: str, repo_root: str +) -> Optional[str]: + """ + Find the the Python project's root by searching up the filesystem from + `location`. Return the path to project root relative to `repo_root`. + Return None if the project root is `repo_root`, or cannot be found. + """ + # find project root. + orig_location = location + while not is_installable_dir(location): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding a Python project. + logger.warning( + "Could not find a Python project for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + if os.path.samefile(repo_root, location): + return None + + return os.path.relpath(location, repo_root) + + +class RemoteNotFoundError(Exception): + pass + + +class RemoteNotValidError(Exception): + def __init__(self, url: str): + super().__init__(url) + self.url = url + + +class RevOptions: + + """ + Encapsulates a VCS-specific revision to install, along with any VCS + install options. + + Instances of this class should be treated as if immutable. + """ + + def __init__( + self, + vc_class: Type["VersionControl"], + rev: Optional[str] = None, + extra_args: Optional[CommandArgs] = None, + ) -> None: + """ + Args: + vc_class: a VersionControl subclass. + rev: the name of the revision to install. + extra_args: a list of extra options. + """ + if extra_args is None: + extra_args = [] + + self.extra_args = extra_args + self.rev = rev + self.vc_class = vc_class + self.branch_name: Optional[str] = None + + def __repr__(self) -> str: + return f"" + + @property + def arg_rev(self) -> Optional[str]: + if self.rev is None: + return self.vc_class.default_arg_rev + + return self.rev + + def to_args(self) -> CommandArgs: + """ + Return the VCS-specific command arguments. + """ + args: CommandArgs = [] + rev = self.arg_rev + if rev is not None: + args += self.vc_class.get_base_rev_args(rev) + args += self.extra_args + + return args + + def to_display(self) -> str: + if not self.rev: + return "" + + return f" (to revision {self.rev})" + + def make_new(self, rev: str) -> "RevOptions": + """ + Make a copy of the current instance, but with a new rev. + + Args: + rev: the name of the revision for the new object. + """ + return self.vc_class.make_rev_options(rev, extra_args=self.extra_args) + + +class VcsSupport: + _registry: Dict[str, "VersionControl"] = {} + schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"] + + def __init__(self) -> None: + # Register more schemes with urlparse for various version control + # systems + urllib.parse.uses_netloc.extend(self.schemes) + super().__init__() + + def __iter__(self) -> Iterator[str]: + return self._registry.__iter__() + + @property + def backends(self) -> List["VersionControl"]: + return list(self._registry.values()) + + @property + def dirnames(self) -> List[str]: + return [backend.dirname for backend in self.backends] + + @property + def all_schemes(self) -> List[str]: + schemes: List[str] = [] + for backend in self.backends: + schemes.extend(backend.schemes) + return schemes + + def register(self, cls: Type["VersionControl"]) -> None: + if not hasattr(cls, "name"): + logger.warning("Cannot register VCS %s", cls.__name__) + return + if cls.name not in self._registry: + self._registry[cls.name] = cls() + logger.debug("Registered VCS backend: %s", cls.name) + + def unregister(self, name: str) -> None: + if name in self._registry: + del self._registry[name] + + def get_backend_for_dir(self, location: str) -> Optional["VersionControl"]: + """ + Return a VersionControl object if a repository of that type is found + at the given directory. + """ + vcs_backends = {} + for vcs_backend in self._registry.values(): + repo_path = vcs_backend.get_repository_root(location) + if not repo_path: + continue + logger.debug("Determine that %s uses VCS: %s", location, vcs_backend.name) + vcs_backends[repo_path] = vcs_backend + + if not vcs_backends: + return None + + # Choose the VCS in the inner-most directory. Since all repository + # roots found here would be either `location` or one of its + # parents, the longest path should have the most path components, + # i.e. the backend representing the inner-most repository. + inner_most_repo_path = max(vcs_backends, key=len) + return vcs_backends[inner_most_repo_path] + + def get_backend_for_scheme(self, scheme: str) -> Optional["VersionControl"]: + """ + Return a VersionControl object or None. + """ + for vcs_backend in self._registry.values(): + if scheme in vcs_backend.schemes: + return vcs_backend + return None + + def get_backend(self, name: str) -> Optional["VersionControl"]: + """ + Return a VersionControl object or None. + """ + name = name.lower() + return self._registry.get(name) + + +vcs = VcsSupport() + + +class VersionControl: + name = "" + dirname = "" + repo_name = "" + # List of supported schemes for this Version Control + schemes: Tuple[str, ...] = () + # Iterable of environment variable names to pass to call_subprocess(). + unset_environ: Tuple[str, ...] = () + default_arg_rev: Optional[str] = None + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url: str) -> bool: + """ + Return whether the vcs prefix (e.g. "git+") should be added to a + repository's remote url when used in a requirement. + """ + return not remote_url.lower().startswith(f"{cls.name}:") + + @classmethod + def get_subdirectory(cls, location: str) -> Optional[str]: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + return None + + @classmethod + def get_requirement_revision(cls, repo_dir: str) -> str: + """ + Return the revision string that should be used in a requirement. + """ + return cls.get_revision(repo_dir) + + @classmethod + def get_src_requirement(cls, repo_dir: str, project_name: str) -> str: + """ + Return the requirement string to use to redownload the files + currently at the given repository directory. + + Args: + project_name: the (unescaped) project name. + + The return value has a form similar to the following: + + {repository_url}@{revision}#egg={project_name} + """ + repo_url = cls.get_remote_url(repo_dir) + + if cls.should_add_vcs_url_prefix(repo_url): + repo_url = f"{cls.name}+{repo_url}" + + revision = cls.get_requirement_revision(repo_dir) + subdir = cls.get_subdirectory(repo_dir) + req = make_vcs_requirement_url(repo_url, revision, project_name, subdir=subdir) + + return req + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + """ + Return the base revision arguments for a vcs command. + + Args: + rev: the name of a revision to install. Cannot be None. + """ + raise NotImplementedError + + def is_immutable_rev_checkout(self, url: str, dest: str) -> bool: + """ + Return true if the commit hash checked out at dest matches + the revision in url. + + Always return False, if the VCS does not support immutable commit + hashes. + + This method does not check if there are local uncommitted changes + in dest after checkout, as pip currently has no use case for that. + """ + return False + + @classmethod + def make_rev_options( + cls, rev: Optional[str] = None, extra_args: Optional[CommandArgs] = None + ) -> RevOptions: + """ + Return a RevOptions object. + + Args: + rev: the name of a revision to install. + extra_args: a list of extra options. + """ + return RevOptions(cls, rev, extra_args=extra_args) + + @classmethod + def _is_local_repository(cls, repo: str) -> bool: + """ + posix absolute paths start with os.path.sep, + win32 ones start with drive (like c:\\folder) + """ + drive, tail = os.path.splitdrive(repo) + return repo.startswith(os.path.sep) or bool(drive) + + @classmethod + def get_netloc_and_auth( + cls, netloc: str, scheme: str + ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]: + """ + Parse the repository URL's netloc, and return the new netloc to use + along with auth information. + + Args: + netloc: the original repository URL netloc. + scheme: the repository URL's scheme without the vcs prefix. + + This is mainly for the Subversion class to override, so that auth + information can be provided via the --username and --password options + instead of through the URL. For other subclasses like Git without + such an option, auth information must stay in the URL. + + Returns: (netloc, (username, password)). + """ + return netloc, (None, None) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + """ + Parse the repository URL to use, and return the URL, revision, + and auth info to use. + + Returns: (url, rev, (username, password)). + """ + scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) + if "+" not in scheme: + raise ValueError( + f"Sorry, {url!r} is a malformed VCS url. " + "The format is +://, " + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp" + ) + # Remove the vcs prefix. + scheme = scheme.split("+", 1)[1] + netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme) + rev = None + if "@" in path: + path, rev = path.rsplit("@", 1) + if not rev: + raise InstallationError( + f"The URL {url!r} has an empty revision (after @) " + "which is not supported. Include a revision after @ " + "or remove @ from the URL." + ) + url = urllib.parse.urlunsplit((scheme, netloc, path, query, "")) + return url, rev, user_pass + + @staticmethod + def make_rev_args( + username: Optional[str], password: Optional[HiddenText] + ) -> CommandArgs: + """ + Return the RevOptions "extra arguments" to use in obtain(). + """ + return [] + + def get_url_rev_options(self, url: HiddenText) -> Tuple[HiddenText, RevOptions]: + """ + Return the URL and RevOptions object to use in obtain(), + as a tuple (url, rev_options). + """ + secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret) + username, secret_password = user_pass + password: Optional[HiddenText] = None + if secret_password is not None: + password = hide_value(secret_password) + extra_args = self.make_rev_args(username, password) + rev_options = self.make_rev_options(rev, extra_args=extra_args) + + return hide_url(secret_url), rev_options + + @staticmethod + def normalize_url(url: str) -> str: + """ + Normalize a URL for comparison by unquoting it and removing any + trailing slash. + """ + return urllib.parse.unquote(url).rstrip("/") + + @classmethod + def compare_urls(cls, url1: str, url2: str) -> bool: + """ + Compare two repo URLs for identity, ignoring incidental differences. + """ + return cls.normalize_url(url1) == cls.normalize_url(url2) + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + """ + Fetch a revision from a repository, in the case that this is the + first fetch from the repository. + + Args: + dest: the directory to fetch the repository to. + rev_options: a RevOptions object. + verbosity: verbosity level. + """ + raise NotImplementedError + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + """ + Switch the repo at ``dest`` to point to ``URL``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + """ + Update an already-existing repo to the given ``rev_options``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """ + Return whether the id of the current commit equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + raise NotImplementedError + + def obtain(self, dest: str, url: HiddenText, verbosity: int) -> None: + """ + Install or update in editable mode the package represented by this + VersionControl object. + + :param dest: the repository directory in which to install or update. + :param url: the repository URL starting with a vcs prefix. + :param verbosity: verbosity level. + """ + url, rev_options = self.get_url_rev_options(url) + + if not os.path.exists(dest): + self.fetch_new(dest, url, rev_options, verbosity=verbosity) + return + + rev_display = rev_options.to_display() + if self.is_repository_directory(dest): + existing_url = self.get_remote_url(dest) + if self.compare_urls(existing_url, url.secret): + logger.debug( + "%s in %s exists, and has correct URL (%s)", + self.repo_name.title(), + display_path(dest), + url, + ) + if not self.is_commit_id_equal(dest, rev_options.rev): + logger.info( + "Updating %s %s%s", + display_path(dest), + self.repo_name, + rev_display, + ) + self.update(dest, url, rev_options) + else: + logger.info("Skipping because already up-to-date.") + return + + logger.warning( + "%s %s in %s exists with URL %s", + self.name, + self.repo_name, + display_path(dest), + existing_url, + ) + prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b")) + else: + logger.warning( + "Directory %s already exists, and is not a %s %s.", + dest, + self.name, + self.repo_name, + ) + # https://github.com/python/mypy/issues/1174 + prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b")) # type: ignore + + logger.warning( + "The plan is to install the %s repository %s", + self.name, + url, + ) + response = ask_path_exists(f"What to do? {prompt[0]}", prompt[1]) + + if response == "a": + sys.exit(-1) + + if response == "w": + logger.warning("Deleting %s", display_path(dest)) + rmtree(dest) + self.fetch_new(dest, url, rev_options, verbosity=verbosity) + return + + if response == "b": + dest_dir = backup_dir(dest) + logger.warning("Backing up %s to %s", display_path(dest), dest_dir) + shutil.move(dest, dest_dir) + self.fetch_new(dest, url, rev_options, verbosity=verbosity) + return + + # Do nothing if the response is "i". + if response == "s": + logger.info( + "Switching %s %s to %s%s", + self.repo_name, + display_path(dest), + url, + rev_display, + ) + self.switch(dest, url, rev_options) + + def unpack(self, location: str, url: HiddenText, verbosity: int) -> None: + """ + Clean up current location and download the url repository + (and vcs infos) into location + + :param url: the repository URL starting with a vcs prefix. + :param verbosity: verbosity level. + """ + if os.path.exists(location): + rmtree(location) + self.obtain(location, url=url, verbosity=verbosity) + + @classmethod + def get_remote_url(cls, location: str) -> str: + """ + Return the url used at location + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + raise NotImplementedError + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the current commit id of the files at the given location. + """ + raise NotImplementedError + + @classmethod + def run_command( + cls, + cmd: Union[List[str], CommandArgs], + show_stdout: bool = True, + cwd: Optional[str] = None, + on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise", + extra_ok_returncodes: Optional[Iterable[int]] = None, + command_desc: Optional[str] = None, + extra_environ: Optional[Mapping[str, Any]] = None, + spinner: Optional[SpinnerInterface] = None, + log_failed_cmd: bool = True, + stdout_only: bool = False, + ) -> str: + """ + Run a VCS subcommand + This is simply a wrapper around call_subprocess that adds the VCS + command name, and checks that the VCS is available + """ + cmd = make_command(cls.name, *cmd) + if command_desc is None: + command_desc = format_command_args(cmd) + try: + return call_subprocess( + cmd, + show_stdout, + cwd, + on_returncode=on_returncode, + extra_ok_returncodes=extra_ok_returncodes, + command_desc=command_desc, + extra_environ=extra_environ, + unset_environ=cls.unset_environ, + spinner=spinner, + log_failed_cmd=log_failed_cmd, + stdout_only=stdout_only, + ) + except FileNotFoundError: + # errno.ENOENT = no such file or directory + # In other words, the VCS executable isn't available + raise BadCommand( + f"Cannot find command {cls.name!r} - do you have " + f"{cls.name!r} installed and in your PATH?" + ) + except PermissionError: + # errno.EACCES = Permission denied + # This error occurs, for instance, when the command is installed + # only for another user. So, the current user don't have + # permission to call the other user command. + raise BadCommand( + f"No permission to execute {cls.name!r} - install it " + f"locally, globally (ask admin), or check your PATH. " + f"See possible solutions at " + f"https://pip.pypa.io/en/latest/reference/pip_freeze/" + f"#fixing-permission-denied." + ) + + @classmethod + def is_repository_directory(cls, path: str) -> bool: + """ + Return whether a directory path is a repository directory. + """ + logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name) + return os.path.exists(os.path.join(path, cls.dirname)) + + @classmethod + def get_repository_root(cls, location: str) -> Optional[str]: + """ + Return the "root" (top-level) directory controlled by the vcs, + or `None` if the directory is not in any. + + It is meant to be overridden to implement smarter detection + mechanisms for specific vcs. + + This can do more than is_repository_directory() alone. For + example, the Git override checks that Git is actually available. + """ + if cls.is_repository_directory(location): + return location + return None diff --git a/myenv/lib/python3.12/site-packages/pip/_internal/wheel_builder.py b/myenv/lib/python3.12/site-packages/pip/_internal/wheel_builder.py new file mode 100644 index 0000000..b1debe3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_internal/wheel_builder.py @@ -0,0 +1,354 @@ +"""Orchestrator for building wheels from InstallRequirements. +""" + +import logging +import os.path +import re +import shutil +from typing import Iterable, List, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version +from pip._vendor.packaging.version import InvalidVersion, Version + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel +from pip._internal.metadata import FilesystemWheel, get_wheel_distribution +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.build.wheel import build_wheel_pep517 +from pip._internal.operations.build.wheel_editable import build_wheel_editable +from pip._internal.operations.build.wheel_legacy import build_wheel_legacy +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir, hash_file +from pip._internal.utils.setuptools_build import make_setuptools_clean_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + +_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE) + +BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]] + + +def _contains_egg_info(s: str) -> bool: + """Determine whether the string looks like an egg_info. + + :param s: The string to parse. E.g. foo-2.1 + """ + return bool(_egg_info_re.search(s)) + + +def _should_build( + req: InstallRequirement, + need_wheel: bool, +) -> bool: + """Return whether an InstallRequirement should be built into a wheel.""" + if req.constraint: + # never build requirements that are merely constraints + return False + if req.is_wheel: + if need_wheel: + logger.info( + "Skipping %s, due to already being wheel.", + req.name, + ) + return False + + if need_wheel: + # i.e. pip wheel, not pip install + return True + + # From this point, this concerns the pip install command only + # (need_wheel=False). + + if not req.source_dir: + return False + + if req.editable: + # we only build PEP 660 editable requirements + return req.supports_pyproject_editable() + + return True + + +def should_build_for_wheel_command( + req: InstallRequirement, +) -> bool: + return _should_build(req, need_wheel=True) + + +def should_build_for_install_command( + req: InstallRequirement, +) -> bool: + return _should_build(req, need_wheel=False) + + +def _should_cache( + req: InstallRequirement, +) -> Optional[bool]: + """ + Return whether a built InstallRequirement can be stored in the persistent + wheel cache, assuming the wheel cache is available, and _should_build() + has determined a wheel needs to be built. + """ + if req.editable or not req.source_dir: + # never cache editable requirements + return False + + if req.link and req.link.is_vcs: + # VCS checkout. Do not cache + # unless it points to an immutable commit hash. + assert not req.editable + assert req.source_dir + vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) + assert vcs_backend + if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir): + return True + return False + + assert req.link + base, ext = req.link.splitext() + if _contains_egg_info(base): + return True + + # Otherwise, do not cache. + return False + + +def _get_cache_dir( + req: InstallRequirement, + wheel_cache: WheelCache, +) -> str: + """Return the persistent or temporary cache directory where the built + wheel need to be stored. + """ + cache_available = bool(wheel_cache.cache_dir) + assert req.link + if cache_available and _should_cache(req): + cache_dir = wheel_cache.get_path_for_link(req.link) + else: + cache_dir = wheel_cache.get_ephem_path_for_link(req.link) + return cache_dir + + +def _verify_one(req: InstallRequirement, wheel_path: str) -> None: + canonical_name = canonicalize_name(req.name or "") + w = Wheel(os.path.basename(wheel_path)) + if canonicalize_name(w.name) != canonical_name: + raise InvalidWheelFilename( + f"Wheel has unexpected file name: expected {canonical_name!r}, " + f"got {w.name!r}", + ) + dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name) + dist_verstr = str(dist.version) + if canonicalize_version(dist_verstr) != canonicalize_version(w.version): + raise InvalidWheelFilename( + f"Wheel has unexpected file name: expected {dist_verstr!r}, " + f"got {w.version!r}", + ) + metadata_version_value = dist.metadata_version + if metadata_version_value is None: + raise UnsupportedWheel("Missing Metadata-Version") + try: + metadata_version = Version(metadata_version_value) + except InvalidVersion: + msg = f"Invalid Metadata-Version: {metadata_version_value}" + raise UnsupportedWheel(msg) + if metadata_version >= Version("1.2") and not isinstance(dist.version, Version): + raise UnsupportedWheel( + f"Metadata 1.2 mandates PEP 440 version, but {dist_verstr!r} is not" + ) + + +def _build_one( + req: InstallRequirement, + output_dir: str, + verify: bool, + build_options: List[str], + global_options: List[str], + editable: bool, +) -> Optional[str]: + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + artifact = "editable" if editable else "wheel" + try: + ensure_dir(output_dir) + except OSError as e: + logger.warning( + "Building %s for %s failed: %s", + artifact, + req.name, + e, + ) + return None + + # Install build deps into temporary directory (PEP 518) + with req.build_env: + wheel_path = _build_one_inside_env( + req, output_dir, build_options, global_options, editable + ) + if wheel_path and verify: + try: + _verify_one(req, wheel_path) + except (InvalidWheelFilename, UnsupportedWheel) as e: + logger.warning("Built %s for %s is invalid: %s", artifact, req.name, e) + return None + return wheel_path + + +def _build_one_inside_env( + req: InstallRequirement, + output_dir: str, + build_options: List[str], + global_options: List[str], + editable: bool, +) -> Optional[str]: + with TempDirectory(kind="wheel") as temp_dir: + assert req.name + if req.use_pep517: + assert req.metadata_directory + assert req.pep517_backend + if global_options: + logger.warning( + "Ignoring --global-option when building %s using PEP 517", req.name + ) + if build_options: + logger.warning( + "Ignoring --build-option when building %s using PEP 517", req.name + ) + if editable: + wheel_path = build_wheel_editable( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + tempd=temp_dir.path, + ) + else: + wheel_path = build_wheel_pep517( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + tempd=temp_dir.path, + ) + else: + wheel_path = build_wheel_legacy( + name=req.name, + setup_py_path=req.setup_py_path, + source_dir=req.unpacked_source_directory, + global_options=global_options, + build_options=build_options, + tempd=temp_dir.path, + ) + + if wheel_path is not None: + wheel_name = os.path.basename(wheel_path) + dest_path = os.path.join(output_dir, wheel_name) + try: + wheel_hash, length = hash_file(wheel_path) + shutil.move(wheel_path, dest_path) + logger.info( + "Created wheel for %s: filename=%s size=%d sha256=%s", + req.name, + wheel_name, + length, + wheel_hash.hexdigest(), + ) + logger.info("Stored in directory: %s", output_dir) + return dest_path + except Exception as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, + e, + ) + # Ignore return, we can't do anything else useful. + if not req.use_pep517: + _clean_one_legacy(req, global_options) + return None + + +def _clean_one_legacy(req: InstallRequirement, global_options: List[str]) -> bool: + clean_args = make_setuptools_clean_args( + req.setup_py_path, + global_options=global_options, + ) + + logger.info("Running setup.py clean for %s", req.name) + try: + call_subprocess( + clean_args, command_desc="python setup.py clean", cwd=req.source_dir + ) + return True + except Exception: + logger.error("Failed cleaning build dir for %s", req.name) + return False + + +def build( + requirements: Iterable[InstallRequirement], + wheel_cache: WheelCache, + verify: bool, + build_options: List[str], + global_options: List[str], +) -> BuildResult: + """Build wheels. + + :return: The list of InstallRequirement that succeeded to build and + the list of InstallRequirement that failed to build. + """ + if not requirements: + return [], [] + + # Build the wheels. + logger.info( + "Building wheels for collected packages: %s", + ", ".join(req.name for req in requirements), # type: ignore + ) + + with indent_log(): + build_successes, build_failures = [], [] + for req in requirements: + assert req.name + cache_dir = _get_cache_dir(req, wheel_cache) + wheel_file = _build_one( + req, + cache_dir, + verify, + build_options, + global_options, + req.editable and req.permit_editable_wheels, + ) + if wheel_file: + # Record the download origin in the cache + if req.download_info is not None: + # download_info is guaranteed to be set because when we build an + # InstallRequirement it has been through the preparer before, but + # let's be cautious. + wheel_cache.record_download_origin(cache_dir, req.download_info) + # Update the link for this. + req.link = Link(path_to_url(wheel_file)) + req.local_file_path = req.link.file_path + assert req.link.is_wheel + build_successes.append(req) + else: + build_failures.append(req) + + # notify success/failure + if build_successes: + logger.info( + "Successfully built %s", + " ".join([req.name for req in build_successes]), # type: ignore + ) + if build_failures: + logger.info( + "Failed to build %s", + " ".join([req.name for req in build_failures]), # type: ignore + ) + # Return a list of requirements that failed to build + return build_successes, build_failures diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/__init__.py new file mode 100644 index 0000000..c1884ba --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/__init__.py @@ -0,0 +1,121 @@ +""" +pip._vendor is for vendoring dependencies of pip to prevent needing pip to +depend on something external. + +Files inside of pip._vendor should be considered immutable and should only be +updated to versions from upstream. +""" +from __future__ import absolute_import + +import glob +import os.path +import sys + +# Downstream redistributors which have debundled our dependencies should also +# patch this value to be true. This will trigger the additional patching +# to cause things like "six" to be available as pip. +DEBUNDLED = False + +# By default, look in this directory for a bunch of .whl files which we will +# add to the beginning of sys.path before attempting to import anything. This +# is done to support downstream re-distributors like Debian and Fedora who +# wish to create their own Wheels for our dependencies to aid in debundling. +WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) + + +# Define a small helper function to alias our vendored modules to the real ones +# if the vendored ones do not exist. This idea of this was taken from +# https://github.com/kennethreitz/requests/pull/2567. +def vendored(modulename): + vendored_name = "{0}.{1}".format(__name__, modulename) + + try: + __import__(modulename, globals(), locals(), level=0) + except ImportError: + # We can just silently allow import failures to pass here. If we + # got to this point it means that ``import pip._vendor.whatever`` + # failed and so did ``import whatever``. Since we're importing this + # upfront in an attempt to alias imports, not erroring here will + # just mean we get a regular import error whenever pip *actually* + # tries to import one of these modules to use it, which actually + # gives us a better error message than we would have otherwise + # gotten. + pass + else: + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) + + +# If we're operating in a debundled setup, then we want to go ahead and trigger +# the aliasing of our vendored libraries as well as looking for wheels to add +# to our sys.path. This will cause all of this code to be a no-op typically +# however downstream redistributors can enable it in a consistent way across +# all platforms. +if DEBUNDLED: + # Actually look inside of WHEEL_DIR to find .whl files and add them to the + # front of our sys.path. + sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path + + # Actually alias all of our vendored dependencies. + vendored("cachecontrol") + vendored("certifi") + vendored("colorama") + vendored("distlib") + vendored("distro") + vendored("six") + vendored("six.moves") + vendored("six.moves.urllib") + vendored("six.moves.urllib.parse") + vendored("packaging") + vendored("packaging.version") + vendored("packaging.specifiers") + vendored("pep517") + vendored("pkg_resources") + vendored("platformdirs") + vendored("progress") + vendored("requests") + vendored("requests.exceptions") + vendored("requests.packages") + vendored("requests.packages.urllib3") + vendored("requests.packages.urllib3._collections") + vendored("requests.packages.urllib3.connection") + vendored("requests.packages.urllib3.connectionpool") + vendored("requests.packages.urllib3.contrib") + vendored("requests.packages.urllib3.contrib.ntlmpool") + vendored("requests.packages.urllib3.contrib.pyopenssl") + vendored("requests.packages.urllib3.exceptions") + vendored("requests.packages.urllib3.fields") + vendored("requests.packages.urllib3.filepost") + vendored("requests.packages.urllib3.packages") + vendored("requests.packages.urllib3.packages.ordered_dict") + vendored("requests.packages.urllib3.packages.six") + vendored("requests.packages.urllib3.packages.ssl_match_hostname") + vendored("requests.packages.urllib3.packages.ssl_match_hostname." + "_implementation") + vendored("requests.packages.urllib3.poolmanager") + vendored("requests.packages.urllib3.request") + vendored("requests.packages.urllib3.response") + vendored("requests.packages.urllib3.util") + vendored("requests.packages.urllib3.util.connection") + vendored("requests.packages.urllib3.util.request") + vendored("requests.packages.urllib3.util.response") + vendored("requests.packages.urllib3.util.retry") + vendored("requests.packages.urllib3.util.ssl_") + vendored("requests.packages.urllib3.util.timeout") + vendored("requests.packages.urllib3.util.url") + vendored("resolvelib") + vendored("rich") + vendored("rich.console") + vendored("rich.highlighter") + vendored("rich.logging") + vendored("rich.markup") + vendored("rich.progress") + vendored("rich.segment") + vendored("rich.style") + vendored("rich.text") + vendored("rich.traceback") + vendored("tenacity") + vendored("tomli") + vendored("truststore") + vendored("urllib3") diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..44404ab Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/__pycache__/six.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/__pycache__/six.cpython-312.pyc new file mode 100644 index 0000000..ada434d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/__pycache__/six.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-312.pyc new file mode 100644 index 0000000..86a95e0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__init__.py new file mode 100644 index 0000000..4d20bc9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__init__.py @@ -0,0 +1,28 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +"""CacheControl import Interface. + +Make it easy to import from cachecontrol without long namespaces. +""" +__author__ = "Eric Larson" +__email__ = "eric@ionrock.org" +__version__ = "0.13.1" + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.controller import CacheController +from pip._vendor.cachecontrol.wrapper import CacheControl + +__all__ = [ + "__author__", + "__email__", + "__version__", + "CacheControlAdapter", + "CacheController", + "CacheControl", +] + +import logging + +logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..72cb5cc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc new file mode 100644 index 0000000..c68f254 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc new file mode 100644 index 0000000..13a3502 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc new file mode 100644 index 0000000..3dae399 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc new file mode 100644 index 0000000..28fde7c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc new file mode 100644 index 0000000..ce6f778 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc new file mode 100644 index 0000000..3ad221f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc new file mode 100644 index 0000000..a9e04c8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc new file mode 100644 index 0000000..aa4c512 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/_cmd.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/_cmd.py new file mode 100644 index 0000000..2c84208 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/_cmd.py @@ -0,0 +1,70 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import logging +from argparse import ArgumentParser +from typing import TYPE_CHECKING + +from pip._vendor import requests + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import logger + +if TYPE_CHECKING: + from argparse import Namespace + + from pip._vendor.cachecontrol.controller import CacheController + + +def setup_logging() -> None: + logger.setLevel(logging.DEBUG) + handler = logging.StreamHandler() + logger.addHandler(handler) + + +def get_session() -> requests.Session: + adapter = CacheControlAdapter( + DictCache(), cache_etags=True, serializer=None, heuristic=None + ) + sess = requests.Session() + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + sess.cache_controller = adapter.controller # type: ignore[attr-defined] + return sess + + +def get_args() -> Namespace: + parser = ArgumentParser() + parser.add_argument("url", help="The URL to try and cache") + return parser.parse_args() + + +def main() -> None: + args = get_args() + sess = get_session() + + # Make a request to get a response + resp = sess.get(args.url) + + # Turn on logging + setup_logging() + + # try setting the cache + cache_controller: CacheController = ( + sess.cache_controller # type: ignore[attr-defined] + ) + cache_controller.cache_response(resp.request, resp.raw) + + # Now try to get it + if cache_controller.cached_request(resp.request): + print("Cached!") + else: + print("Not cached :(") + + +if __name__ == "__main__": + main() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/adapter.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/adapter.py new file mode 100644 index 0000000..3e83e30 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/adapter.py @@ -0,0 +1,161 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import functools +import types +import zlib +from typing import TYPE_CHECKING, Any, Collection, Mapping + +from pip._vendor.requests.adapters import HTTPAdapter + +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import PERMANENT_REDIRECT_STATUSES, CacheController +from pip._vendor.cachecontrol.filewrapper import CallbackFileWrapper + +if TYPE_CHECKING: + from pip._vendor.requests import PreparedRequest, Response + from pip._vendor.urllib3 import HTTPResponse + + from pip._vendor.cachecontrol.cache import BaseCache + from pip._vendor.cachecontrol.heuristics import BaseHeuristic + from pip._vendor.cachecontrol.serialize import Serializer + + +class CacheControlAdapter(HTTPAdapter): + invalidating_methods = {"PUT", "PATCH", "DELETE"} + + def __init__( + self, + cache: BaseCache | None = None, + cache_etags: bool = True, + controller_class: type[CacheController] | None = None, + serializer: Serializer | None = None, + heuristic: BaseHeuristic | None = None, + cacheable_methods: Collection[str] | None = None, + *args: Any, + **kw: Any, + ) -> None: + super().__init__(*args, **kw) + self.cache = DictCache() if cache is None else cache + self.heuristic = heuristic + self.cacheable_methods = cacheable_methods or ("GET",) + + controller_factory = controller_class or CacheController + self.controller = controller_factory( + self.cache, cache_etags=cache_etags, serializer=serializer + ) + + def send( + self, + request: PreparedRequest, + stream: bool = False, + timeout: None | float | tuple[float, float] | tuple[float, None] = None, + verify: bool | str = True, + cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None, + proxies: Mapping[str, str] | None = None, + cacheable_methods: Collection[str] | None = None, + ) -> Response: + """ + Send a request. Use the request information to see if it + exists in the cache and cache the response if we need to and can. + """ + cacheable = cacheable_methods or self.cacheable_methods + if request.method in cacheable: + try: + cached_response = self.controller.cached_request(request) + except zlib.error: + cached_response = None + if cached_response: + return self.build_response(request, cached_response, from_cache=True) + + # check for etags and add headers if appropriate + request.headers.update(self.controller.conditional_headers(request)) + + resp = super().send(request, stream, timeout, verify, cert, proxies) + + return resp + + def build_response( + self, + request: PreparedRequest, + response: HTTPResponse, + from_cache: bool = False, + cacheable_methods: Collection[str] | None = None, + ) -> Response: + """ + Build a response by making a request or using the cache. + + This will end up calling send and returning a potentially + cached response + """ + cacheable = cacheable_methods or self.cacheable_methods + if not from_cache and request.method in cacheable: + # Check for any heuristics that might update headers + # before trying to cache. + if self.heuristic: + response = self.heuristic.apply(response) + + # apply any expiration heuristics + if response.status == 304: + # We must have sent an ETag request. This could mean + # that we've been expired already or that we simply + # have an etag. In either case, we want to try and + # update the cache if that is the case. + cached_response = self.controller.update_cached_response( + request, response + ) + + if cached_response is not response: + from_cache = True + + # We are done with the server response, read a + # possible response body (compliant servers will + # not return one, but we cannot be 100% sure) and + # release the connection back to the pool. + response.read(decode_content=False) + response.release_conn() + + response = cached_response + + # We always cache the 301 responses + elif int(response.status) in PERMANENT_REDIRECT_STATUSES: + self.controller.cache_response(request, response) + else: + # Wrap the response file with a wrapper that will cache the + # response when the stream has been consumed. + response._fp = CallbackFileWrapper( # type: ignore[attr-defined] + response._fp, # type: ignore[attr-defined] + functools.partial( + self.controller.cache_response, request, response + ), + ) + if response.chunked: + super_update_chunk_length = response._update_chunk_length # type: ignore[attr-defined] + + def _update_chunk_length(self: HTTPResponse) -> None: + super_update_chunk_length() + if self.chunk_left == 0: + self._fp._close() # type: ignore[attr-defined] + + response._update_chunk_length = types.MethodType( # type: ignore[attr-defined] + _update_chunk_length, response + ) + + resp: Response = super().build_response(request, response) # type: ignore[no-untyped-call] + + # See if we should invalidate the cache. + if request.method in self.invalidating_methods and resp.ok: + assert request.url is not None + cache_url = self.controller.cache_url(request.url) + self.cache.delete(cache_url) + + # Give the request a from_cache attr to let people use it + resp.from_cache = from_cache # type: ignore[attr-defined] + + return resp + + def close(self) -> None: + self.cache.close() + super().close() # type: ignore[no-untyped-call] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/cache.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/cache.py new file mode 100644 index 0000000..3293b00 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/cache.py @@ -0,0 +1,74 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +""" +The cache object API for implementing caches. The default is a thread +safe in-memory dictionary. +""" +from __future__ import annotations + +from threading import Lock +from typing import IO, TYPE_CHECKING, MutableMapping + +if TYPE_CHECKING: + from datetime import datetime + + +class BaseCache: + def get(self, key: str) -> bytes | None: + raise NotImplementedError() + + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: + raise NotImplementedError() + + def delete(self, key: str) -> None: + raise NotImplementedError() + + def close(self) -> None: + pass + + +class DictCache(BaseCache): + def __init__(self, init_dict: MutableMapping[str, bytes] | None = None) -> None: + self.lock = Lock() + self.data = init_dict or {} + + def get(self, key: str) -> bytes | None: + return self.data.get(key, None) + + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: + with self.lock: + self.data.update({key: value}) + + def delete(self, key: str) -> None: + with self.lock: + if key in self.data: + self.data.pop(key) + + +class SeparateBodyBaseCache(BaseCache): + """ + In this variant, the body is not stored mixed in with the metadata, but is + passed in (as a bytes-like object) in a separate call to ``set_body()``. + + That is, the expected interaction pattern is:: + + cache.set(key, serialized_metadata) + cache.set_body(key) + + Similarly, the body should be loaded separately via ``get_body()``. + """ + + def set_body(self, key: str, body: bytes) -> None: + raise NotImplementedError() + + def get_body(self, key: str) -> IO[bytes] | None: + """ + Return the body as file-like object. + """ + raise NotImplementedError() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__init__.py new file mode 100644 index 0000000..24ff469 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__init__.py @@ -0,0 +1,8 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +from pip._vendor.cachecontrol.caches.file_cache import FileCache, SeparateBodyFileCache +from pip._vendor.cachecontrol.caches.redis_cache import RedisCache + +__all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..41d8644 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc new file mode 100644 index 0000000..2caa3b5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc new file mode 100644 index 0000000..8a614e0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py new file mode 100644 index 0000000..1fd2801 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py @@ -0,0 +1,181 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import hashlib +import os +from textwrap import dedent +from typing import IO, TYPE_CHECKING + +from pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache +from pip._vendor.cachecontrol.controller import CacheController + +if TYPE_CHECKING: + from datetime import datetime + + from filelock import BaseFileLock + + +def _secure_open_write(filename: str, fmode: int) -> IO[bytes]: + # We only want to write to this file, so open it in write only mode + flags = os.O_WRONLY + + # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only + # will open *new* files. + # We specify this because we want to ensure that the mode we pass is the + # mode of the file. + flags |= os.O_CREAT | os.O_EXCL + + # Do not follow symlinks to prevent someone from making a symlink that + # we follow and insecurely open a cache file. + if hasattr(os, "O_NOFOLLOW"): + flags |= os.O_NOFOLLOW + + # On Windows we'll mark this file as binary + if hasattr(os, "O_BINARY"): + flags |= os.O_BINARY + + # Before we open our file, we want to delete any existing file that is + # there + try: + os.remove(filename) + except OSError: + # The file must not exist already, so we can just skip ahead to opening + pass + + # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a + # race condition happens between the os.remove and this line, that an + # error will be raised. Because we utilize a lockfile this should only + # happen if someone is attempting to attack us. + fd = os.open(filename, flags, fmode) + try: + return os.fdopen(fd, "wb") + + except: + # An error occurred wrapping our FD in a file object + os.close(fd) + raise + + +class _FileCacheMixin: + """Shared implementation for both FileCache variants.""" + + def __init__( + self, + directory: str, + forever: bool = False, + filemode: int = 0o0600, + dirmode: int = 0o0700, + lock_class: type[BaseFileLock] | None = None, + ) -> None: + try: + if lock_class is None: + from filelock import FileLock + + lock_class = FileLock + except ImportError: + notice = dedent( + """ + NOTE: In order to use the FileCache you must have + filelock installed. You can install it via pip: + pip install filelock + """ + ) + raise ImportError(notice) + + self.directory = directory + self.forever = forever + self.filemode = filemode + self.dirmode = dirmode + self.lock_class = lock_class + + @staticmethod + def encode(x: str) -> str: + return hashlib.sha224(x.encode()).hexdigest() + + def _fn(self, name: str) -> str: + # NOTE: This method should not change as some may depend on it. + # See: https://github.com/ionrock/cachecontrol/issues/63 + hashed = self.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key: str) -> bytes | None: + name = self._fn(key) + try: + with open(name, "rb") as fh: + return fh.read() + + except FileNotFoundError: + return None + + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: + name = self._fn(key) + self._write(name, value) + + def _write(self, path: str, data: bytes) -> None: + """ + Safely write the data to the given path. + """ + # Make sure the directory exists + try: + os.makedirs(os.path.dirname(path), self.dirmode) + except OSError: + pass + + with self.lock_class(path + ".lock"): + # Write our actual file + with _secure_open_write(path, self.filemode) as fh: + fh.write(data) + + def _delete(self, key: str, suffix: str) -> None: + name = self._fn(key) + suffix + if not self.forever: + try: + os.remove(name) + except FileNotFoundError: + pass + + +class FileCache(_FileCacheMixin, BaseCache): + """ + Traditional FileCache: body is stored in memory, so not suitable for large + downloads. + """ + + def delete(self, key: str) -> None: + self._delete(key, "") + + +class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache): + """ + Memory-efficient FileCache: body is stored in a separate file, reducing + peak memory usage. + """ + + def get_body(self, key: str) -> IO[bytes] | None: + name = self._fn(key) + ".body" + try: + return open(name, "rb") + except FileNotFoundError: + return None + + def set_body(self, key: str, body: bytes) -> None: + name = self._fn(key) + ".body" + self._write(name, body) + + def delete(self, key: str) -> None: + self._delete(key, "") + self._delete(key, ".body") + + +def url_to_file_path(url: str, filecache: FileCache) -> str: + """Return the file cache path based on the URL. + + This does not ensure the file exists! + """ + key = CacheController.cache_url(url) + return filecache._fn(key) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py new file mode 100644 index 0000000..f4f68c4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -0,0 +1,48 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + + +from datetime import datetime, timezone +from typing import TYPE_CHECKING + +from pip._vendor.cachecontrol.cache import BaseCache + +if TYPE_CHECKING: + from redis import Redis + + +class RedisCache(BaseCache): + def __init__(self, conn: Redis[bytes]) -> None: + self.conn = conn + + def get(self, key: str) -> bytes | None: + return self.conn.get(key) + + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: + if not expires: + self.conn.set(key, value) + elif isinstance(expires, datetime): + now_utc = datetime.now(timezone.utc) + if expires.tzinfo is None: + now_utc = now_utc.replace(tzinfo=None) + delta = expires - now_utc + self.conn.setex(key, int(delta.total_seconds()), value) + else: + self.conn.setex(key, expires, value) + + def delete(self, key: str) -> None: + self.conn.delete(key) + + def clear(self) -> None: + """Helper for clearing all the keys in a database. Use with + caution!""" + for key in self.conn.keys(): + self.conn.delete(key) + + def close(self) -> None: + """Redis uses connection pooling, no need to close the connection.""" + pass diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/controller.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/controller.py new file mode 100644 index 0000000..586b9f9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/controller.py @@ -0,0 +1,494 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +""" +The httplib2 algorithms ported for use with requests. +""" +from __future__ import annotations + +import calendar +import logging +import re +import time +from email.utils import parsedate_tz +from typing import TYPE_CHECKING, Collection, Mapping + +from pip._vendor.requests.structures import CaseInsensitiveDict + +from pip._vendor.cachecontrol.cache import DictCache, SeparateBodyBaseCache +from pip._vendor.cachecontrol.serialize import Serializer + +if TYPE_CHECKING: + from typing import Literal + + from pip._vendor.requests import PreparedRequest + from pip._vendor.urllib3 import HTTPResponse + + from pip._vendor.cachecontrol.cache import BaseCache + +logger = logging.getLogger(__name__) + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + +PERMANENT_REDIRECT_STATUSES = (301, 308) + + +def parse_uri(uri: str) -> tuple[str, str, str, str, str]: + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + match = URI.match(uri) + assert match is not None + groups = match.groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + + +class CacheController: + """An interface to see if request should cached or not.""" + + def __init__( + self, + cache: BaseCache | None = None, + cache_etags: bool = True, + serializer: Serializer | None = None, + status_codes: Collection[int] | None = None, + ): + self.cache = DictCache() if cache is None else cache + self.cache_etags = cache_etags + self.serializer = serializer or Serializer() + self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308) + + @classmethod + def _urlnorm(cls, uri: str) -> str: + """Normalize the URL to create a safe key for the cache""" + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise Exception("Only absolute URIs are allowed. uri = %s" % uri) + + scheme = scheme.lower() + authority = authority.lower() + + if not path: + path = "/" + + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + defrag_uri = scheme + "://" + authority + request_uri + + return defrag_uri + + @classmethod + def cache_url(cls, uri: str) -> str: + return cls._urlnorm(uri) + + def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]: + known_directives = { + # https://tools.ietf.org/html/rfc7234#section-5.2 + "max-age": (int, True), + "max-stale": (int, False), + "min-fresh": (int, True), + "no-cache": (None, False), + "no-store": (None, False), + "no-transform": (None, False), + "only-if-cached": (None, False), + "must-revalidate": (None, False), + "public": (None, False), + "private": (None, False), + "proxy-revalidate": (None, False), + "s-maxage": (int, True), + } + + cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) + + retval: dict[str, int | None] = {} + + for cc_directive in cc_headers.split(","): + if not cc_directive.strip(): + continue + + parts = cc_directive.split("=", 1) + directive = parts[0].strip() + + try: + typ, required = known_directives[directive] + except KeyError: + logger.debug("Ignoring unknown cache-control directive: %s", directive) + continue + + if not typ or not required: + retval[directive] = None + if typ: + try: + retval[directive] = typ(parts[1].strip()) + except IndexError: + if required: + logger.debug( + "Missing value for cache-control " "directive: %s", + directive, + ) + except ValueError: + logger.debug( + "Invalid value for cache-control directive " "%s, must be %s", + directive, + typ.__name__, + ) + + return retval + + def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None: + """ + Load a cached response, or return None if it's not available. + """ + cache_url = request.url + assert cache_url is not None + cache_data = self.cache.get(cache_url) + if cache_data is None: + logger.debug("No cache entry available") + return None + + if isinstance(self.cache, SeparateBodyBaseCache): + body_file = self.cache.get_body(cache_url) + else: + body_file = None + + result = self.serializer.loads(request, cache_data, body_file) + if result is None: + logger.warning("Cache entry deserialization failed, entry ignored") + return result + + def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]: + """ + Return a cached response if it exists in the cache, otherwise + return False. + """ + assert request.url is not None + cache_url = self.cache_url(request.url) + logger.debug('Looking up "%s" in the cache', cache_url) + cc = self.parse_cache_control(request.headers) + + # Bail out if the request insists on fresh data + if "no-cache" in cc: + logger.debug('Request header has "no-cache", cache bypassed') + return False + + if "max-age" in cc and cc["max-age"] == 0: + logger.debug('Request header has "max_age" as 0, cache bypassed') + return False + + # Check whether we can load the response from the cache: + resp = self._load_from_cache(request) + if not resp: + return False + + # If we have a cached permanent redirect, return it immediately. We + # don't need to test our response for other headers b/c it is + # intrinsically "cacheable" as it is Permanent. + # + # See: + # https://tools.ietf.org/html/rfc7231#section-6.4.2 + # + # Client can try to refresh the value by repeating the request + # with cache busting headers as usual (ie no-cache). + if int(resp.status) in PERMANENT_REDIRECT_STATUSES: + msg = ( + "Returning cached permanent redirect response " + "(ignoring date and etag information)" + ) + logger.debug(msg) + return resp + + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers) + if not headers or "date" not in headers: + if "etag" not in headers: + # Without date or etag, the cached response can never be used + # and should be deleted. + logger.debug("Purging cached response: no date or etag") + self.cache.delete(cache_url) + logger.debug("Ignoring cached response: no date") + return False + + now = time.time() + time_tuple = parsedate_tz(headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) + current_age = max(0, now - date) + logger.debug("Current age based on date: %i", current_age) + + # TODO: There is an assumption that the result will be a + # urllib3 response object. This may not be best since we + # could probably avoid instantiating or constructing the + # response until we know we need it. + resp_cc = self.parse_cache_control(headers) + + # determine freshness + freshness_lifetime = 0 + + # Check the max-age pragma in the cache control header + max_age = resp_cc.get("max-age") + if max_age is not None: + freshness_lifetime = max_age + logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) + + # If there isn't a max-age, check for an expires header + elif "expires" in headers: + expires = parsedate_tz(headers["expires"]) + if expires is not None: + expire_time = calendar.timegm(expires[:6]) - date + freshness_lifetime = max(0, expire_time) + logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) + + # Determine if we are setting freshness limit in the + # request. Note, this overrides what was in the response. + max_age = cc.get("max-age") + if max_age is not None: + freshness_lifetime = max_age + logger.debug( + "Freshness lifetime from request max-age: %i", freshness_lifetime + ) + + min_fresh = cc.get("min-fresh") + if min_fresh is not None: + # adjust our current age by our min fresh + current_age += min_fresh + logger.debug("Adjusted current age from min-fresh: %i", current_age) + + # Return entry if it is fresh enough + if freshness_lifetime > current_age: + logger.debug('The response is "fresh", returning cached response') + logger.debug("%i > %i", freshness_lifetime, current_age) + return resp + + # we're not fresh. If we don't have an Etag, clear it out + if "etag" not in headers: + logger.debug('The cached response is "stale" with no etag, purging') + self.cache.delete(cache_url) + + # return the original handler + return False + + def conditional_headers(self, request: PreparedRequest) -> dict[str, str]: + resp = self._load_from_cache(request) + new_headers = {} + + if resp: + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers) + + if "etag" in headers: + new_headers["If-None-Match"] = headers["ETag"] + + if "last-modified" in headers: + new_headers["If-Modified-Since"] = headers["Last-Modified"] + + return new_headers + + def _cache_set( + self, + cache_url: str, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + expires_time: int | None = None, + ) -> None: + """ + Store the data in the cache. + """ + if isinstance(self.cache, SeparateBodyBaseCache): + # We pass in the body separately; just put a placeholder empty + # string in the metadata. + self.cache.set( + cache_url, + self.serializer.dumps(request, response, b""), + expires=expires_time, + ) + # body is None can happen when, for example, we're only updating + # headers, as is the case in update_cached_response(). + if body is not None: + self.cache.set_body(cache_url, body) + else: + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body), + expires=expires_time, + ) + + def cache_response( + self, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + status_codes: Collection[int] | None = None, + ) -> None: + """ + Algorithm for caching requests. + + This assumes a requests Response object. + """ + # From httplib2: Don't cache 206's since we aren't going to + # handle byte range requests + cacheable_status_codes = status_codes or self.cacheable_status_codes + if response.status not in cacheable_status_codes: + logger.debug( + "Status code %s not in %s", response.status, cacheable_status_codes + ) + return + + response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + response.headers + ) + + if "date" in response_headers: + time_tuple = parsedate_tz(response_headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) + else: + date = 0 + + # If we've been given a body, our response has a Content-Length, that + # Content-Length is valid then we can check to see if the body we've + # been given matches the expected size, and if it doesn't we'll just + # skip trying to cache it. + if ( + body is not None + and "content-length" in response_headers + and response_headers["content-length"].isdigit() + and int(response_headers["content-length"]) != len(body) + ): + return + + cc_req = self.parse_cache_control(request.headers) + cc = self.parse_cache_control(response_headers) + + assert request.url is not None + cache_url = self.cache_url(request.url) + logger.debug('Updating cache with response from "%s"', cache_url) + + # Delete it from the cache if we happen to have it stored there + no_store = False + if "no-store" in cc: + no_store = True + logger.debug('Response header has "no-store"') + if "no-store" in cc_req: + no_store = True + logger.debug('Request header has "no-store"') + if no_store and self.cache.get(cache_url): + logger.debug('Purging existing cache entry to honor "no-store"') + self.cache.delete(cache_url) + if no_store: + return + + # https://tools.ietf.org/html/rfc7234#section-4.1: + # A Vary header field-value of "*" always fails to match. + # Storing such a response leads to a deserialization warning + # during cache lookup and is not allowed to ever be served, + # so storing it can be avoided. + if "*" in response_headers.get("vary", ""): + logger.debug('Response header has "Vary: *"') + return + + # If we've been given an etag, then keep the response + if self.cache_etags and "etag" in response_headers: + expires_time = 0 + if response_headers.get("expires"): + expires = parsedate_tz(response_headers["expires"]) + if expires is not None: + expires_time = calendar.timegm(expires[:6]) - date + + expires_time = max(expires_time, 14 * 86400) + + logger.debug(f"etag object cached for {expires_time} seconds") + logger.debug("Caching due to etag") + self._cache_set(cache_url, request, response, body, expires_time) + + # Add to the cache any permanent redirects. We do this before looking + # that the Date headers. + elif int(response.status) in PERMANENT_REDIRECT_STATUSES: + logger.debug("Caching permanent redirect") + self._cache_set(cache_url, request, response, b"") + + # Add to the cache if the response headers demand it. If there + # is no date header then we can't do anything about expiring + # the cache. + elif "date" in response_headers: + time_tuple = parsedate_tz(response_headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) + # cache when there is a max-age > 0 + max_age = cc.get("max-age") + if max_age is not None and max_age > 0: + logger.debug("Caching b/c date exists and max-age > 0") + expires_time = max_age + self._cache_set( + cache_url, + request, + response, + body, + expires_time, + ) + + # If the request can expire, it means we should cache it + # in the meantime. + elif "expires" in response_headers: + if response_headers["expires"]: + expires = parsedate_tz(response_headers["expires"]) + if expires is not None: + expires_time = calendar.timegm(expires[:6]) - date + else: + expires_time = None + + logger.debug( + "Caching b/c of expires header. expires in {} seconds".format( + expires_time + ) + ) + self._cache_set( + cache_url, + request, + response, + body, + expires_time, + ) + + def update_cached_response( + self, request: PreparedRequest, response: HTTPResponse + ) -> HTTPResponse: + """On a 304 we will get a new set of headers that we want to + update our cached value with, assuming we have one. + + This should only ever be called when we've sent an ETag and + gotten a 304 as the response. + """ + assert request.url is not None + cache_url = self.cache_url(request.url) + cached_response = self._load_from_cache(request) + + if not cached_response: + # we didn't have a cached response + return response + + # Lets update our headers with the headers from the new request: + # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 + # + # The server isn't supposed to send headers that would make + # the cached body invalid. But... just in case, we'll be sure + # to strip out ones we know that might be problmatic due to + # typical assumptions. + excluded_headers = ["content-length"] + + cached_response.headers.update( + { + k: v + for k, v in response.headers.items() # type: ignore[no-untyped-call] + if k.lower() not in excluded_headers + } + ) + + # we want a 200 b/c we have content via the cache + cached_response.status = 200 + + # update our cache + self._cache_set(cache_url, request, cached_response) + + return cached_response diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/filewrapper.py new file mode 100644 index 0000000..2514390 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/filewrapper.py @@ -0,0 +1,119 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import mmap +from tempfile import NamedTemporaryFile +from typing import TYPE_CHECKING, Any, Callable + +if TYPE_CHECKING: + from http.client import HTTPResponse + + +class CallbackFileWrapper: + """ + Small wrapper around a fp object which will tee everything read into a + buffer, and when that file is closed it will execute a callback with the + contents of that buffer. + + All attributes are proxied to the underlying file object. + + This class uses members with a double underscore (__) leading prefix so as + not to accidentally shadow an attribute. + + The data is stored in a temporary file until it is all available. As long + as the temporary files directory is disk-based (sometimes it's a + memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory + pressure is high. For small files the disk usually won't be used at all, + it'll all be in the filesystem memory cache, so there should be no + performance impact. + """ + + def __init__( + self, fp: HTTPResponse, callback: Callable[[bytes], None] | None + ) -> None: + self.__buf = NamedTemporaryFile("rb+", delete=True) + self.__fp = fp + self.__callback = callback + + def __getattr__(self, name: str) -> Any: + # The vaguaries of garbage collection means that self.__fp is + # not always set. By using __getattribute__ and the private + # name[0] allows looking up the attribute value and raising an + # AttributeError when it doesn't exist. This stop thigns from + # infinitely recursing calls to getattr in the case where + # self.__fp hasn't been set. + # + # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers + fp = self.__getattribute__("_CallbackFileWrapper__fp") + return getattr(fp, name) + + def __is_fp_closed(self) -> bool: + try: + return self.__fp.fp is None + + except AttributeError: + pass + + try: + closed: bool = self.__fp.closed + return closed + + except AttributeError: + pass + + # We just don't cache it then. + # TODO: Add some logging here... + return False + + def _close(self) -> None: + if self.__callback: + if self.__buf.tell() == 0: + # Empty file: + result = b"" + else: + # Return the data without actually loading it into memory, + # relying on Python's buffer API and mmap(). mmap() just gives + # a view directly into the filesystem's memory cache, so it + # doesn't result in duplicate memory use. + self.__buf.seek(0, 0) + result = memoryview( + mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ) + ) + self.__callback(result) + + # We assign this to None here, because otherwise we can get into + # really tricky problems where the CPython interpreter dead locks + # because the callback is holding a reference to something which + # has a __del__ method. Setting this to None breaks the cycle + # and allows the garbage collector to do it's thing normally. + self.__callback = None + + # Closing the temporary file releases memory and frees disk space. + # Important when caching big files. + self.__buf.close() + + def read(self, amt: int | None = None) -> bytes: + data: bytes = self.__fp.read(amt) + if data: + # We may be dealing with b'', a sign that things are over: + # it's passed e.g. after we've already closed self.__buf. + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data + + def _safe_read(self, amt: int) -> bytes: + data: bytes = self.__fp._safe_read(amt) # type: ignore[attr-defined] + if amt == 2 and data == b"\r\n": + # urllib executes this read to toss the CRLF at the end + # of the chunk. + return data + + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/heuristics.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/heuristics.py new file mode 100644 index 0000000..b9d72ca --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/heuristics.py @@ -0,0 +1,154 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import calendar +import time +from datetime import datetime, timedelta, timezone +from email.utils import formatdate, parsedate, parsedate_tz +from typing import TYPE_CHECKING, Any, Mapping + +if TYPE_CHECKING: + from pip._vendor.urllib3 import HTTPResponse + +TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" + + +def expire_after(delta: timedelta, date: datetime | None = None) -> datetime: + date = date or datetime.now(timezone.utc) + return date + delta + + +def datetime_to_header(dt: datetime) -> str: + return formatdate(calendar.timegm(dt.timetuple())) + + +class BaseHeuristic: + def warning(self, response: HTTPResponse) -> str | None: + """ + Return a valid 1xx warning header value describing the cache + adjustments. + + The response is provided too allow warnings like 113 + http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need + to explicitly say response is over 24 hours old. + """ + return '110 - "Response is Stale"' + + def update_headers(self, response: HTTPResponse) -> dict[str, str]: + """Update the response headers with any new headers. + + NOTE: This SHOULD always include some Warning header to + signify that the response was cached by the client, not + by way of the provided headers. + """ + return {} + + def apply(self, response: HTTPResponse) -> HTTPResponse: + updated_headers = self.update_headers(response) + + if updated_headers: + response.headers.update(updated_headers) + warning_header_value = self.warning(response) + if warning_header_value is not None: + response.headers.update({"Warning": warning_header_value}) + + return response + + +class OneDayCache(BaseHeuristic): + """ + Cache the response by providing an expires 1 day in the + future. + """ + + def update_headers(self, response: HTTPResponse) -> dict[str, str]: + headers = {} + + if "expires" not in response.headers: + date = parsedate(response.headers["date"]) + expires = expire_after(timedelta(days=1), date=datetime(*date[:6], tzinfo=timezone.utc)) # type: ignore[misc] + headers["expires"] = datetime_to_header(expires) + headers["cache-control"] = "public" + return headers + + +class ExpiresAfter(BaseHeuristic): + """ + Cache **all** requests for a defined time period. + """ + + def __init__(self, **kw: Any) -> None: + self.delta = timedelta(**kw) + + def update_headers(self, response: HTTPResponse) -> dict[str, str]: + expires = expire_after(self.delta) + return {"expires": datetime_to_header(expires), "cache-control": "public"} + + def warning(self, response: HTTPResponse) -> str | None: + tmpl = "110 - Automatically cached for %s. Response might be stale" + return tmpl % self.delta + + +class LastModified(BaseHeuristic): + """ + If there is no Expires header already, fall back on Last-Modified + using the heuristic from + http://tools.ietf.org/html/rfc7234#section-4.2.2 + to calculate a reasonable value. + + Firefox also does something like this per + https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ + http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 + Unlike mozilla we limit this to 24-hr. + """ + + cacheable_by_default_statuses = { + 200, + 203, + 204, + 206, + 300, + 301, + 404, + 405, + 410, + 414, + 501, + } + + def update_headers(self, resp: HTTPResponse) -> dict[str, str]: + headers: Mapping[str, str] = resp.headers + + if "expires" in headers: + return {} + + if "cache-control" in headers and headers["cache-control"] != "public": + return {} + + if resp.status not in self.cacheable_by_default_statuses: + return {} + + if "date" not in headers or "last-modified" not in headers: + return {} + + time_tuple = parsedate_tz(headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) + last_modified = parsedate(headers["last-modified"]) + if last_modified is None: + return {} + + now = time.time() + current_age = max(0, now - date) + delta = date - calendar.timegm(last_modified) + freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) + if freshness_lifetime <= current_age: + return {} + + expires = date + freshness_lifetime + return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} + + def warning(self, resp: HTTPResponse) -> str | None: + return None diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/serialize.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/serialize.py new file mode 100644 index 0000000..f9e967c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/serialize.py @@ -0,0 +1,206 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import io +from typing import IO, TYPE_CHECKING, Any, Mapping, cast + +from pip._vendor import msgpack +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.urllib3 import HTTPResponse + +if TYPE_CHECKING: + from pip._vendor.requests import PreparedRequest + + +class Serializer: + serde_version = "4" + + def dumps( + self, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + ) -> bytes: + response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + response.headers + ) + + if body is None: + # When a body isn't passed in, we'll read the response. We + # also update the response with a new file handler to be + # sure it acts as though it was never read. + body = response.read(decode_content=False) + response._fp = io.BytesIO(body) # type: ignore[attr-defined] + response.length_remaining = len(body) + + data = { + "response": { + "body": body, # Empty bytestring if body is stored separately + "headers": {str(k): str(v) for k, v in response.headers.items()}, # type: ignore[no-untyped-call] + "status": response.status, + "version": response.version, + "reason": str(response.reason), + "decode_content": response.decode_content, + } + } + + # Construct our vary headers + data["vary"] = {} + if "vary" in response_headers: + varied_headers = response_headers["vary"].split(",") + for header in varied_headers: + header = str(header).strip() + header_value = request.headers.get(header, None) + if header_value is not None: + header_value = str(header_value) + data["vary"][header] = header_value + + return b",".join([f"cc={self.serde_version}".encode(), self.serialize(data)]) + + def serialize(self, data: dict[str, Any]) -> bytes: + return cast(bytes, msgpack.dumps(data, use_bin_type=True)) + + def loads( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + # Short circuit if we've been given an empty set of data + if not data: + return None + + # Determine what version of the serializer the data was serialized + # with + try: + ver, data = data.split(b",", 1) + except ValueError: + ver = b"cc=0" + + # Make sure that our "ver" is actually a version and isn't a false + # positive from a , being in the data stream. + if ver[:3] != b"cc=": + data = ver + data + ver = b"cc=0" + + # Get the version number out of the cc=N + verstr = ver.split(b"=", 1)[-1].decode("ascii") + + # Dispatch to the actual load method for the given version + try: + return getattr(self, f"_loads_v{verstr}")(request, data, body_file) # type: ignore[no-any-return] + + except AttributeError: + # This is a version we don't have a loads function for, so we'll + # just treat it as a miss and return None + return None + + def prepare_response( + self, + request: PreparedRequest, + cached: Mapping[str, Any], + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + """Verify our vary headers match and construct a real urllib3 + HTTPResponse object. + """ + # Special case the '*' Vary value as it means we cannot actually + # determine if the cached response is suitable for this request. + # This case is also handled in the controller code when creating + # a cache entry, but is left here for backwards compatibility. + if "*" in cached.get("vary", {}): + return None + + # Ensure that the Vary headers for the cached response match our + # request + for header, value in cached.get("vary", {}).items(): + if request.headers.get(header, None) != value: + return None + + body_raw = cached["response"].pop("body") + + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + data=cached["response"]["headers"] + ) + if headers.get("transfer-encoding", "") == "chunked": + headers.pop("transfer-encoding") + + cached["response"]["headers"] = headers + + try: + body: IO[bytes] + if body_file is None: + body = io.BytesIO(body_raw) + else: + body = body_file + except TypeError: + # This can happen if cachecontrol serialized to v1 format (pickle) + # using Python 2. A Python 2 str(byte string) will be unpickled as + # a Python 3 str (unicode string), which will cause the above to + # fail with: + # + # TypeError: 'str' does not support the buffer interface + body = io.BytesIO(body_raw.encode("utf8")) + + # Discard any `strict` parameter serialized by older version of cachecontrol. + cached["response"].pop("strict", None) + + return HTTPResponse(body=body, preload_content=False, **cached["response"]) + + def _loads_v0( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> None: + # The original legacy cache data. This doesn't contain enough + # information to construct everything we need, so we'll treat this as + # a miss. + return None + + def _loads_v1( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + # The "v1" pickled cache format. This is no longer supported + # for security reasons, so we treat it as a miss. + return None + + def _loads_v2( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + # The "v2" compressed base64 cache format. + # This has been removed due to age and poor size/performance + # characteristics, so we treat it as a miss. + return None + + def _loads_v3( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> None: + # Due to Python 2 encoding issues, it's impossible to know for sure + # exactly how to load v3 entries, thus we'll treat these as a miss so + # that they get rewritten out as v4 entries. + return None + + def _loads_v4( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + try: + cached = msgpack.loads(data, raw=False) + except ValueError: + return None + + return self.prepare_response(request, cached, body_file) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/wrapper.py b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/wrapper.py new file mode 100644 index 0000000..f618bc3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/wrapper.py @@ -0,0 +1,43 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +from typing import TYPE_CHECKING, Collection + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache + +if TYPE_CHECKING: + from pip._vendor import requests + + from pip._vendor.cachecontrol.cache import BaseCache + from pip._vendor.cachecontrol.controller import CacheController + from pip._vendor.cachecontrol.heuristics import BaseHeuristic + from pip._vendor.cachecontrol.serialize import Serializer + + +def CacheControl( + sess: requests.Session, + cache: BaseCache | None = None, + cache_etags: bool = True, + serializer: Serializer | None = None, + heuristic: BaseHeuristic | None = None, + controller_class: type[CacheController] | None = None, + adapter_class: type[CacheControlAdapter] | None = None, + cacheable_methods: Collection[str] | None = None, +) -> requests.Session: + cache = DictCache() if cache is None else cache + adapter_class = adapter_class or CacheControlAdapter + adapter = adapter_class( + cache, + cache_etags=cache_etags, + serializer=serializer, + heuristic=heuristic, + controller_class=controller_class, + cacheable_methods=cacheable_methods, + ) + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + return sess diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__init__.py new file mode 100644 index 0000000..8ce89ce --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2023.07.22" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__main__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__main__.py new file mode 100644 index 0000000..0037634 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from pip._vendor.certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c5de656 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..c74c9c9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000..e8f7486 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/cacert.pem b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/cacert.pem new file mode 100644 index 0000000..0212369 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/cacert.pem @@ -0,0 +1,4635 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication RootCA3" +# Serial: 16247922307909811815 +# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 +# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a +# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 +-----BEGIN CERTIFICATE----- +MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV +BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw +JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 +MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg +Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r +CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA +lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG +TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 +9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 +8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 +g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we +GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst ++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M +0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ +T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw +HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS +YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA +FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd +9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI +UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ +OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke +gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf +iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV +nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD +2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// +1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad +TdJ0MN1kURXbg4NR16/9M51NZg== +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/core.py b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/core.py new file mode 100644 index 0000000..5c67600 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/certifi/core.py @@ -0,0 +1,119 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys + +DEBIAN_CA_CERTS_PATH = '/etc/ssl/certs/ca-certificates.crt' + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("pip._vendor.certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + def contents() -> str: + return files("pip._vendor.certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +elif sys.version_info >= (3, 7): + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + def contents() -> str: + return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii") + +else: + import os + import types + from typing import Union + + Package = Union[types.ModuleType, str] + Resource = Union[str, "os.PathLike"] + + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict' + ) -> str: + with open(where(), encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where() -> str: + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + def contents() -> str: + return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii") + + +# Debian: Use system CA certs: +def where() -> str: + return DEBIAN_CA_CERTS_PATH + + +def contents() -> str: + with open(where(), "r", encoding="ascii") as data: + return data.read() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__init__.py new file mode 100644 index 0000000..fe58162 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__init__.py @@ -0,0 +1,115 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import List, Union + +from .charsetgroupprober import CharSetGroupProber +from .charsetprober import CharSetProber +from .enums import InputState +from .resultdict import ResultDict +from .universaldetector import UniversalDetector +from .version import VERSION, __version__ + +__all__ = ["UniversalDetector", "detect", "detect_all", "__version__", "VERSION"] + + +def detect( + byte_str: Union[bytes, bytearray], should_rename_legacy: bool = False +) -> ResultDict: + """ + Detect the encoding of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + :param should_rename_legacy: Should we rename legacy encodings + to their more modern equivalents? + :type should_rename_legacy: ``bool`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError( + f"Expected object of type bytes or bytearray, got: {type(byte_str)}" + ) + byte_str = bytearray(byte_str) + detector = UniversalDetector(should_rename_legacy=should_rename_legacy) + detector.feed(byte_str) + return detector.close() + + +def detect_all( + byte_str: Union[bytes, bytearray], + ignore_threshold: bool = False, + should_rename_legacy: bool = False, +) -> List[ResultDict]: + """ + Detect all the possible encodings of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + :param ignore_threshold: Include encodings that are below + ``UniversalDetector.MINIMUM_THRESHOLD`` + in results. + :type ignore_threshold: ``bool`` + :param should_rename_legacy: Should we rename legacy encodings + to their more modern equivalents? + :type should_rename_legacy: ``bool`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError( + f"Expected object of type bytes or bytearray, got: {type(byte_str)}" + ) + byte_str = bytearray(byte_str) + + detector = UniversalDetector(should_rename_legacy=should_rename_legacy) + detector.feed(byte_str) + detector.close() + + if detector.input_state == InputState.HIGH_BYTE: + results: List[ResultDict] = [] + probers: List[CharSetProber] = [] + for prober in detector.charset_probers: + if isinstance(prober, CharSetGroupProber): + probers.extend(p for p in prober.probers) + else: + probers.append(prober) + for prober in probers: + if ignore_threshold or prober.get_confidence() > detector.MINIMUM_THRESHOLD: + charset_name = prober.charset_name or "" + lower_charset_name = charset_name.lower() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith("iso-8859") and detector.has_win_bytes: + charset_name = detector.ISO_WIN_MAP.get( + lower_charset_name, charset_name + ) + # Rename legacy encodings with superset encodings if asked + if should_rename_legacy: + charset_name = detector.LEGACY_MAP.get( + charset_name.lower(), charset_name + ) + results.append( + { + "encoding": charset_name, + "confidence": prober.get_confidence(), + "language": prober.language, + } + ) + if len(results) > 0: + return sorted(results, key=lambda result: -result["confidence"]) + + return [detector.result] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..9427bb4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-312.pyc new file mode 100644 index 0000000..2335296 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-312.pyc new file mode 100644 index 0000000..23139de Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-312.pyc new file mode 100644 index 0000000..d0e5c7e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-312.pyc new file mode 100644 index 0000000..419e1d9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-312.pyc new file mode 100644 index 0000000..08715a6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-312.pyc new file mode 100644 index 0000000..c78b88a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-312.pyc new file mode 100644 index 0000000..217d511 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-312.pyc new file mode 100644 index 0000000..869018d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-312.pyc new file mode 100644 index 0000000..db4b2a8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-312.pyc new file mode 100644 index 0000000..6a87537 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-312.pyc new file mode 100644 index 0000000..1006b57 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-312.pyc new file mode 100644 index 0000000..7f4226f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-312.pyc new file mode 100644 index 0000000..a422b50 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-312.pyc new file mode 100644 index 0000000..2384a84 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-312.pyc new file mode 100644 index 0000000..b5a7c34 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-312.pyc new file mode 100644 index 0000000..c0791cc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-312.pyc new file mode 100644 index 0000000..a075209 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-312.pyc new file mode 100644 index 0000000..cfce07f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-312.pyc new file mode 100644 index 0000000..e971164 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-312.pyc new file mode 100644 index 0000000..1a78ceb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-312.pyc new file mode 100644 index 0000000..25d912d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-312.pyc new file mode 100644 index 0000000..dcd7671 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-312.pyc new file mode 100644 index 0000000..1fa952c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-312.pyc new file mode 100644 index 0000000..a341e84 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-312.pyc new file mode 100644 index 0000000..34fd9fc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-312.pyc new file mode 100644 index 0000000..28d08ff Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-312.pyc new file mode 100644 index 0000000..285d4d4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-312.pyc new file mode 100644 index 0000000..e410f81 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-312.pyc new file mode 100644 index 0000000..180445e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-312.pyc new file mode 100644 index 0000000..0de2be7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-312.pyc new file mode 100644 index 0000000..20a7845 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/macromanprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/macromanprober.cpython-312.pyc new file mode 100644 index 0000000..c3a2d20 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/macromanprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-312.pyc new file mode 100644 index 0000000..cf0d0c9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-312.pyc new file mode 100644 index 0000000..78f4d5f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-312.pyc new file mode 100644 index 0000000..fd418ba Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/resultdict.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/resultdict.cpython-312.pyc new file mode 100644 index 0000000..d8071d8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/resultdict.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-312.pyc new file mode 100644 index 0000000..07f1b8c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-312.pyc new file mode 100644 index 0000000..53b68e0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-312.pyc new file mode 100644 index 0000000..af30a34 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-312.pyc new file mode 100644 index 0000000..2fc6753 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-312.pyc new file mode 100644 index 0000000..a3db8f7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-312.pyc new file mode 100644 index 0000000..b1a9362 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-312.pyc new file mode 100644 index 0000000..76714aa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/big5freq.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/big5freq.py new file mode 100644 index 0000000..87d9f97 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/big5freq.py @@ -0,0 +1,386 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Big5 frequency table +# by Taiwan's Mandarin Promotion Council +# +# +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 +# fmt: off +BIG5_CHAR_TO_FREQ_ORDER = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 +) +# fmt: on diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/big5prober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/big5prober.py new file mode 100644 index 0000000..ef09c60 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/big5prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import Big5DistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import BIG5_SM_MODEL + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self) -> None: + super().__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self) -> str: + return "Big5" + + @property + def language(self) -> str: + return "Chinese" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/chardistribution.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/chardistribution.py new file mode 100644 index 0000000..176cb99 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/chardistribution.py @@ -0,0 +1,261 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import Tuple, Union + +from .big5freq import ( + BIG5_CHAR_TO_FREQ_ORDER, + BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO, +) +from .euckrfreq import ( + EUCKR_CHAR_TO_FREQ_ORDER, + EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO, +) +from .euctwfreq import ( + EUCTW_CHAR_TO_FREQ_ORDER, + EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO, +) +from .gb2312freq import ( + GB2312_CHAR_TO_FREQ_ORDER, + GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO, +) +from .jisfreq import ( + JIS_CHAR_TO_FREQ_ORDER, + JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO, +) +from .johabfreq import JOHAB_TO_EUCKR_ORDER_TABLE + + +class CharDistributionAnalysis: + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self) -> None: + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._char_to_freq_order: Tuple[int, ...] = tuple() + self._table_size = 0 # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self.typical_distribution_ratio = 0.0 + self._done = False + self._total_chars = 0 + self._freq_chars = 0 + self.reset() + + def reset(self) -> None: + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + self._total_chars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._freq_chars = 0 + + def feed(self, char: Union[bytes, bytearray], char_len: int) -> None: + """feed a character with known length""" + if char_len == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + # order is valid + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self) -> float: + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + + if self._total_chars != self._freq_chars: + r = self._freq_chars / ( + (self._total_chars - self._freq_chars) * self.typical_distribution_ratio + ) + if r < self.SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return self.SURE_YES + + def got_enough_data(self) -> bool: + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, _: Union[bytes, bytearray]) -> int: + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self) -> None: + super().__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self) -> None: + super().__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + return -1 + + +class JOHABDistributionAnalysis(CharDistributionAnalysis): + def __init__(self) -> None: + super().__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: + first_char = byte_str[0] + if 0x88 <= first_char < 0xD4: + code = first_char * 256 + byte_str[1] + return JOHAB_TO_EUCKR_ORDER_TABLE.get(code, -1) + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self) -> None: + super().__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self) -> None: + super().__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + return 157 * (first_char - 0xA4) + second_char - 0x40 + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self) -> None: + super().__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if 0x81 <= first_char <= 0x9F: + order = 188 * (first_char - 0x81) + elif 0xE0 <= first_char <= 0xEF: + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self) -> None: + super().__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = byte_str[0] + if char >= 0xA0: + return 94 * (char - 0xA1) + byte_str[1] - 0xA1 + return -1 diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetgroupprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetgroupprober.py new file mode 100644 index 0000000..6def56b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetgroupprober.py @@ -0,0 +1,106 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import List, Optional, Union + +from .charsetprober import CharSetProber +from .enums import LanguageFilter, ProbingState + + +class CharSetGroupProber(CharSetProber): + def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None: + super().__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers: List[CharSetProber] = [] + self._best_guess_prober: Optional[CharSetProber] = None + + def reset(self) -> None: + super().reset() + self._active_num = 0 + for prober in self.probers: + prober.reset() + prober.active = True + self._active_num += 1 + self._best_guess_prober = None + + @property + def charset_name(self) -> Optional[str]: + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.charset_name + + @property + def language(self) -> Optional[str]: + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.language + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + for prober in self.probers: + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + self._state = ProbingState.FOUND_IT + return self.state + if state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + return self.state + + def get_confidence(self) -> float: + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + if state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober.active: + self.logger.debug("%s not active", prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug( + "%s %s confidence = %s", prober.charset_name, prober.language, conf + ) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + if not self._best_guess_prober: + return 0.0 + return best_conf diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetprober.py new file mode 100644 index 0000000..a103ca1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/charsetprober.py @@ -0,0 +1,147 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging +import re +from typing import Optional, Union + +from .enums import LanguageFilter, ProbingState + +INTERNATIONAL_WORDS_PATTERN = re.compile( + b"[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?" +) + + +class CharSetProber: + + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None: + self._state = ProbingState.DETECTING + self.active = True + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self) -> None: + self._state = ProbingState.DETECTING + + @property + def charset_name(self) -> Optional[str]: + return None + + @property + def language(self) -> Optional[str]: + raise NotImplementedError + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + raise NotImplementedError + + @property + def state(self) -> ProbingState: + return self._state + + def get_confidence(self) -> float: + return 0.0 + + @staticmethod + def filter_high_byte_only(buf: Union[bytes, bytearray]) -> bytes: + buf = re.sub(b"([\x00-\x7F])+", b" ", buf) + return buf + + @staticmethod + def filter_international_words(buf: Union[bytes, bytearray]) -> bytearray: + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-\xFF] + marker: everything else [^a-zA-Z\x80-\xFF] + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + + # This regex expression filters out only words that have at-least one + # international character. The word may include one marker character at + # the end. + words = INTERNATIONAL_WORDS_PATTERN.findall(buf) + + for word in words: + filtered.extend(word[:-1]) + + # If the last character in the word is a marker, replace it with a + # space as markers shouldn't affect our analysis (they are used + # similarly across all languages and may thus have similar + # frequencies). + last_char = word[-1:] + if not last_char.isalpha() and last_char < b"\x80": + last_char = b" " + filtered.extend(last_char) + + return filtered + + @staticmethod + def remove_xml_tags(buf: Union[bytes, bytearray]) -> bytes: + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + buf = memoryview(buf).cast("c") + + for curr, buf_char in enumerate(buf): + # Check if we're coming out of or entering an XML tag + + # https://github.com/python/typeshed/issues/8182 + if buf_char == b">": # type: ignore[comparison-overlap] + prev = curr + 1 + in_tag = False + # https://github.com/python/typeshed/issues/8182 + elif buf_char == b"<": # type: ignore[comparison-overlap] + if curr > prev and not in_tag: + # Keep everything after last non-extended-ASCII, + # non-alphabetic character + filtered.extend(buf[prev:curr]) + # Output a space to delimit stretch we kept + filtered.extend(b" ") + in_tag = True + + # If we're not in a tag... + if not in_tag: + # Keep everything after last non-extended-ASCII, non-alphabetic + # character + filtered.extend(buf[prev:]) + + return filtered diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d9f7752 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-312.pyc new file mode 100644 index 0000000..1c5ad9d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/chardetect.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/chardetect.py new file mode 100644 index 0000000..43f6e14 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cli/chardetect.py @@ -0,0 +1,112 @@ +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" + + +import argparse +import sys +from typing import Iterable, List, Optional + +from .. import __version__ +from ..universaldetector import UniversalDetector + + +def description_of( + lines: Iterable[bytes], + name: str = "stdin", + minimal: bool = False, + should_rename_legacy: bool = False, +) -> Optional[str]: + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + :param should_rename_legacy: Should we rename legacy encodings to + their more modern equivalents? + :type should_rename_legacy: ``bool`` + """ + u = UniversalDetector(should_rename_legacy=should_rename_legacy) + for line in lines: + line = bytearray(line) + u.feed(line) + # shortcut out of the loop to save reading further - particularly useful if we read a BOM. + if u.done: + break + u.close() + result = u.result + if minimal: + return result["encoding"] + if result["encoding"]: + return f'{name}: {result["encoding"]} with confidence {result["confidence"]}' + return f"{name}: no result" + + +def main(argv: Optional[List[str]] = None) -> None: + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + # Get command line arguments + parser = argparse.ArgumentParser( + description=( + "Takes one or more file paths and reports their detected encodings" + ) + ) + parser.add_argument( + "input", + help="File whose encoding we would like to determine. (default: stdin)", + type=argparse.FileType("rb"), + nargs="*", + default=[sys.stdin.buffer], + ) + parser.add_argument( + "--minimal", + help="Print only the encoding to standard output", + action="store_true", + ) + parser.add_argument( + "-l", + "--legacy", + help="Rename legacy encodings to more modern ones.", + action="store_true", + ) + parser.add_argument( + "--version", action="version", version=f"%(prog)s {__version__}" + ) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print( + "You are running chardetect interactively. Press " + "CTRL-D twice at the start of a blank line to signal the " + "end of your input. If you want help, run chardetect " + "--help\n", + file=sys.stderr, + ) + print( + description_of( + f, f.name, minimal=args.minimal, should_rename_legacy=args.legacy + ) + ) + + +if __name__ == "__main__": + main() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachine.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachine.py new file mode 100644 index 0000000..8ed4a87 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachine.py @@ -0,0 +1,90 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging + +from .codingstatemachinedict import CodingStateMachineDict +from .enums import MachineState + + +class CodingStateMachine: + """ + A state machine to verify a byte sequence for a particular encoding. For + each byte the detector receives, it will feed that byte to every active + state machine available, one byte at a time. The state machine changes its + state based on its previous state and the byte it receives. There are 3 + states in a state machine that are of interest to an auto-detector: + + START state: This is the state to start with, or a legal byte sequence + (i.e. a valid code point) for character has been identified. + + ME state: This indicates that the state machine identified a byte sequence + that is specific to the charset it is designed for and that + there is no other possible encoding which can contain this byte + sequence. This will to lead to an immediate positive answer for + the detector. + + ERROR state: This indicates the state machine identified an illegal byte + sequence for that encoding. This will lead to an immediate + negative answer for this encoding. Detector will exclude this + encoding from consideration from here on. + """ + + def __init__(self, sm: CodingStateMachineDict) -> None: + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = MachineState.START + self.active = True + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self) -> None: + self._curr_state = MachineState.START + + def next_state(self, c: int) -> int: + # for each byte we get its class + # if it is first byte, we also get byte length + byte_class = self._model["class_table"][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model["char_len_table"][byte_class] + # from byte's class and state_table, we get its next state + curr_state = self._curr_state * self._model["class_factor"] + byte_class + self._curr_state = self._model["state_table"][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self) -> int: + return self._curr_char_len + + def get_coding_state_machine(self) -> str: + return self._model["name"] + + @property + def language(self) -> str: + return self._model["language"] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachinedict.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachinedict.py new file mode 100644 index 0000000..7a3c4c7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/codingstatemachinedict.py @@ -0,0 +1,19 @@ +from typing import TYPE_CHECKING, Tuple + +if TYPE_CHECKING: + # TypedDict was introduced in Python 3.8. + # + # TODO: Remove the else block and TYPE_CHECKING check when dropping support + # for Python 3.7. + from typing import TypedDict + + class CodingStateMachineDict(TypedDict, total=False): + class_table: Tuple[int, ...] + class_factor: int + state_table: Tuple[int, ...] + char_len_table: Tuple[int, ...] + name: str + language: str # Optional key + +else: + CodingStateMachineDict = dict diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cp949prober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cp949prober.py new file mode 100644 index 0000000..fa7307e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/cp949prober.py @@ -0,0 +1,49 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self) -> None: + super().__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self) -> str: + return "CP949" + + @property + def language(self) -> str: + return "Korean" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/enums.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/enums.py new file mode 100644 index 0000000..5e3e198 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/enums.py @@ -0,0 +1,85 @@ +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +from enum import Enum, Flag + + +class InputState: + """ + This enum represents the different states a universal detector can be in. + """ + + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(Flag): + """ + This enum represents the different language filters we can apply to a + ``UniversalDetector``. + """ + + NONE = 0x00 + CHINESE_SIMPLIFIED = 0x01 + CHINESE_TRADITIONAL = 0x02 + JAPANESE = 0x04 + KOREAN = 0x08 + NON_CJK = 0x10 + ALL = 0x1F + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(Enum): + """ + This enum represents the different states a prober can be in. + """ + + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState: + """ + This enum represents the different states a state machine can be in. + """ + + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood: + """ + This enum represents the likelihood of a character following the previous one. + """ + + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls) -> int: + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory: + """ + This enum represents the different categories language models for + ``SingleByteCharsetProber`` put characters into. + + Anything less than CONTROL is considered a letter. + """ + + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/escprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/escprober.py new file mode 100644 index 0000000..fd71383 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/escprober.py @@ -0,0 +1,102 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import Optional, Union + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, MachineState, ProbingState +from .escsm import ( + HZ_SM_MODEL, + ISO2022CN_SM_MODEL, + ISO2022JP_SM_MODEL, + ISO2022KR_SM_MODEL, +) + + +class EscCharSetProber(CharSetProber): + """ + This CharSetProber uses a "code scheme" approach for detecting encodings, + whereby easily recognizable escape or shift sequences are relied on to + identify these encodings. + """ + + def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None: + super().__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = 0 + self._detected_charset: Optional[str] = None + self._detected_language: Optional[str] = None + self._state = ProbingState.DETECTING + self.reset() + + def reset(self) -> None: + super().reset() + for coding_sm in self.coding_sm: + coding_sm.active = True + coding_sm.reset() + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self) -> Optional[str]: + return self._detected_charset + + @property + def language(self) -> Optional[str]: + return self._detected_language + + def get_confidence(self) -> float: + return 0.99 if self._detected_charset else 0.00 + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + for c in byte_str: + for coding_sm in self.coding_sm: + if not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/escsm.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/escsm.py new file mode 100644 index 0000000..11d4adf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/escsm.py @@ -0,0 +1,261 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .codingstatemachinedict import CodingStateMachineDict +from .enums import MachineState + +# fmt: off +HZ_CLS = ( + 1, 0, 0, 0, 0, 0, 0, 0, # 00 - 07 + 0, 0, 0, 0, 0, 0, 0, 0, # 08 - 0f + 0, 0, 0, 0, 0, 0, 0, 0, # 10 - 17 + 0, 0, 0, 1, 0, 0, 0, 0, # 18 - 1f + 0, 0, 0, 0, 0, 0, 0, 0, # 20 - 27 + 0, 0, 0, 0, 0, 0, 0, 0, # 28 - 2f + 0, 0, 0, 0, 0, 0, 0, 0, # 30 - 37 + 0, 0, 0, 0, 0, 0, 0, 0, # 38 - 3f + 0, 0, 0, 0, 0, 0, 0, 0, # 40 - 47 + 0, 0, 0, 0, 0, 0, 0, 0, # 48 - 4f + 0, 0, 0, 0, 0, 0, 0, 0, # 50 - 57 + 0, 0, 0, 0, 0, 0, 0, 0, # 58 - 5f + 0, 0, 0, 0, 0, 0, 0, 0, # 60 - 67 + 0, 0, 0, 0, 0, 0, 0, 0, # 68 - 6f + 0, 0, 0, 0, 0, 0, 0, 0, # 70 - 77 + 0, 0, 0, 4, 0, 5, 2, 0, # 78 - 7f + 1, 1, 1, 1, 1, 1, 1, 1, # 80 - 87 + 1, 1, 1, 1, 1, 1, 1, 1, # 88 - 8f + 1, 1, 1, 1, 1, 1, 1, 1, # 90 - 97 + 1, 1, 1, 1, 1, 1, 1, 1, # 98 - 9f + 1, 1, 1, 1, 1, 1, 1, 1, # a0 - a7 + 1, 1, 1, 1, 1, 1, 1, 1, # a8 - af + 1, 1, 1, 1, 1, 1, 1, 1, # b0 - b7 + 1, 1, 1, 1, 1, 1, 1, 1, # b8 - bf + 1, 1, 1, 1, 1, 1, 1, 1, # c0 - c7 + 1, 1, 1, 1, 1, 1, 1, 1, # c8 - cf + 1, 1, 1, 1, 1, 1, 1, 1, # d0 - d7 + 1, 1, 1, 1, 1, 1, 1, 1, # d8 - df + 1, 1, 1, 1, 1, 1, 1, 1, # e0 - e7 + 1, 1, 1, 1, 1, 1, 1, 1, # e8 - ef + 1, 1, 1, 1, 1, 1, 1, 1, # f0 - f7 + 1, 1, 1, 1, 1, 1, 1, 1, # f8 - ff +) + +HZ_ST = ( +MachineState.START, MachineState.ERROR, 3, MachineState.START, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, # 00-07 +MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 08-0f +MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START, 4, MachineState.ERROR, # 10-17 + 5, MachineState.ERROR, 6, MachineState.ERROR, 5, 5, 4, MachineState.ERROR, # 18-1f + 4, MachineState.ERROR, 4, 4, 4, MachineState.ERROR, 4, MachineState.ERROR, # 20-27 + 4, MachineState.ITS_ME, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 28-2f +) +# fmt: on + +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +HZ_SM_MODEL: CodingStateMachineDict = { + "class_table": HZ_CLS, + "class_factor": 6, + "state_table": HZ_ST, + "char_len_table": HZ_CHAR_LEN_TABLE, + "name": "HZ-GB-2312", + "language": "Chinese", +} + +# fmt: off +ISO2022CN_CLS = ( + 2, 0, 0, 0, 0, 0, 0, 0, # 00 - 07 + 0, 0, 0, 0, 0, 0, 0, 0, # 08 - 0f + 0, 0, 0, 0, 0, 0, 0, 0, # 10 - 17 + 0, 0, 0, 1, 0, 0, 0, 0, # 18 - 1f + 0, 0, 0, 0, 0, 0, 0, 0, # 20 - 27 + 0, 3, 0, 0, 0, 0, 0, 0, # 28 - 2f + 0, 0, 0, 0, 0, 0, 0, 0, # 30 - 37 + 0, 0, 0, 0, 0, 0, 0, 0, # 38 - 3f + 0, 0, 0, 4, 0, 0, 0, 0, # 40 - 47 + 0, 0, 0, 0, 0, 0, 0, 0, # 48 - 4f + 0, 0, 0, 0, 0, 0, 0, 0, # 50 - 57 + 0, 0, 0, 0, 0, 0, 0, 0, # 58 - 5f + 0, 0, 0, 0, 0, 0, 0, 0, # 60 - 67 + 0, 0, 0, 0, 0, 0, 0, 0, # 68 - 6f + 0, 0, 0, 0, 0, 0, 0, 0, # 70 - 77 + 0, 0, 0, 0, 0, 0, 0, 0, # 78 - 7f + 2, 2, 2, 2, 2, 2, 2, 2, # 80 - 87 + 2, 2, 2, 2, 2, 2, 2, 2, # 88 - 8f + 2, 2, 2, 2, 2, 2, 2, 2, # 90 - 97 + 2, 2, 2, 2, 2, 2, 2, 2, # 98 - 9f + 2, 2, 2, 2, 2, 2, 2, 2, # a0 - a7 + 2, 2, 2, 2, 2, 2, 2, 2, # a8 - af + 2, 2, 2, 2, 2, 2, 2, 2, # b0 - b7 + 2, 2, 2, 2, 2, 2, 2, 2, # b8 - bf + 2, 2, 2, 2, 2, 2, 2, 2, # c0 - c7 + 2, 2, 2, 2, 2, 2, 2, 2, # c8 - cf + 2, 2, 2, 2, 2, 2, 2, 2, # d0 - d7 + 2, 2, 2, 2, 2, 2, 2, 2, # d8 - df + 2, 2, 2, 2, 2, 2, 2, 2, # e0 - e7 + 2, 2, 2, 2, 2, 2, 2, 2, # e8 - ef + 2, 2, 2, 2, 2, 2, 2, 2, # f0 - f7 + 2, 2, 2, 2, 2, 2, 2, 2, # f8 - ff +) + +ISO2022CN_ST = ( + MachineState.START, 3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 00-07 + MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 08-0f + MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 10-17 + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 4, MachineState.ERROR, # 18-1f + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 20-27 + 5, 6, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 28-2f + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 30-37 + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.START, # 38-3f +) +# fmt: on + +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CN_SM_MODEL: CodingStateMachineDict = { + "class_table": ISO2022CN_CLS, + "class_factor": 9, + "state_table": ISO2022CN_ST, + "char_len_table": ISO2022CN_CHAR_LEN_TABLE, + "name": "ISO-2022-CN", + "language": "Chinese", +} + +# fmt: off +ISO2022JP_CLS = ( + 2, 0, 0, 0, 0, 0, 0, 0, # 00 - 07 + 0, 0, 0, 0, 0, 0, 2, 2, # 08 - 0f + 0, 0, 0, 0, 0, 0, 0, 0, # 10 - 17 + 0, 0, 0, 1, 0, 0, 0, 0, # 18 - 1f + 0, 0, 0, 0, 7, 0, 0, 0, # 20 - 27 + 3, 0, 0, 0, 0, 0, 0, 0, # 28 - 2f + 0, 0, 0, 0, 0, 0, 0, 0, # 30 - 37 + 0, 0, 0, 0, 0, 0, 0, 0, # 38 - 3f + 6, 0, 4, 0, 8, 0, 0, 0, # 40 - 47 + 0, 9, 5, 0, 0, 0, 0, 0, # 48 - 4f + 0, 0, 0, 0, 0, 0, 0, 0, # 50 - 57 + 0, 0, 0, 0, 0, 0, 0, 0, # 58 - 5f + 0, 0, 0, 0, 0, 0, 0, 0, # 60 - 67 + 0, 0, 0, 0, 0, 0, 0, 0, # 68 - 6f + 0, 0, 0, 0, 0, 0, 0, 0, # 70 - 77 + 0, 0, 0, 0, 0, 0, 0, 0, # 78 - 7f + 2, 2, 2, 2, 2, 2, 2, 2, # 80 - 87 + 2, 2, 2, 2, 2, 2, 2, 2, # 88 - 8f + 2, 2, 2, 2, 2, 2, 2, 2, # 90 - 97 + 2, 2, 2, 2, 2, 2, 2, 2, # 98 - 9f + 2, 2, 2, 2, 2, 2, 2, 2, # a0 - a7 + 2, 2, 2, 2, 2, 2, 2, 2, # a8 - af + 2, 2, 2, 2, 2, 2, 2, 2, # b0 - b7 + 2, 2, 2, 2, 2, 2, 2, 2, # b8 - bf + 2, 2, 2, 2, 2, 2, 2, 2, # c0 - c7 + 2, 2, 2, 2, 2, 2, 2, 2, # c8 - cf + 2, 2, 2, 2, 2, 2, 2, 2, # d0 - d7 + 2, 2, 2, 2, 2, 2, 2, 2, # d8 - df + 2, 2, 2, 2, 2, 2, 2, 2, # e0 - e7 + 2, 2, 2, 2, 2, 2, 2, 2, # e8 - ef + 2, 2, 2, 2, 2, 2, 2, 2, # f0 - f7 + 2, 2, 2, 2, 2, 2, 2, 2, # f8 - ff +) + +ISO2022JP_ST = ( + MachineState.START, 3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 00-07 + MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 08-0f + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 10-17 + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, # 18-1f + MachineState.ERROR, 5, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 4, MachineState.ERROR, MachineState.ERROR, # 20-27 + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 6, MachineState.ITS_ME, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, # 28-2f + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, # 30-37 + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 38-3f + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.START, MachineState.START, # 40-47 +) +# fmt: on + +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JP_SM_MODEL: CodingStateMachineDict = { + "class_table": ISO2022JP_CLS, + "class_factor": 10, + "state_table": ISO2022JP_ST, + "char_len_table": ISO2022JP_CHAR_LEN_TABLE, + "name": "ISO-2022-JP", + "language": "Japanese", +} + +# fmt: off +ISO2022KR_CLS = ( + 2, 0, 0, 0, 0, 0, 0, 0, # 00 - 07 + 0, 0, 0, 0, 0, 0, 0, 0, # 08 - 0f + 0, 0, 0, 0, 0, 0, 0, 0, # 10 - 17 + 0, 0, 0, 1, 0, 0, 0, 0, # 18 - 1f + 0, 0, 0, 0, 3, 0, 0, 0, # 20 - 27 + 0, 4, 0, 0, 0, 0, 0, 0, # 28 - 2f + 0, 0, 0, 0, 0, 0, 0, 0, # 30 - 37 + 0, 0, 0, 0, 0, 0, 0, 0, # 38 - 3f + 0, 0, 0, 5, 0, 0, 0, 0, # 40 - 47 + 0, 0, 0, 0, 0, 0, 0, 0, # 48 - 4f + 0, 0, 0, 0, 0, 0, 0, 0, # 50 - 57 + 0, 0, 0, 0, 0, 0, 0, 0, # 58 - 5f + 0, 0, 0, 0, 0, 0, 0, 0, # 60 - 67 + 0, 0, 0, 0, 0, 0, 0, 0, # 68 - 6f + 0, 0, 0, 0, 0, 0, 0, 0, # 70 - 77 + 0, 0, 0, 0, 0, 0, 0, 0, # 78 - 7f + 2, 2, 2, 2, 2, 2, 2, 2, # 80 - 87 + 2, 2, 2, 2, 2, 2, 2, 2, # 88 - 8f + 2, 2, 2, 2, 2, 2, 2, 2, # 90 - 97 + 2, 2, 2, 2, 2, 2, 2, 2, # 98 - 9f + 2, 2, 2, 2, 2, 2, 2, 2, # a0 - a7 + 2, 2, 2, 2, 2, 2, 2, 2, # a8 - af + 2, 2, 2, 2, 2, 2, 2, 2, # b0 - b7 + 2, 2, 2, 2, 2, 2, 2, 2, # b8 - bf + 2, 2, 2, 2, 2, 2, 2, 2, # c0 - c7 + 2, 2, 2, 2, 2, 2, 2, 2, # c8 - cf + 2, 2, 2, 2, 2, 2, 2, 2, # d0 - d7 + 2, 2, 2, 2, 2, 2, 2, 2, # d8 - df + 2, 2, 2, 2, 2, 2, 2, 2, # e0 - e7 + 2, 2, 2, 2, 2, 2, 2, 2, # e8 - ef + 2, 2, 2, 2, 2, 2, 2, 2, # f0 - f7 + 2, 2, 2, 2, 2, 2, 2, 2, # f8 - ff +) + +ISO2022KR_ST = ( + MachineState.START, 3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, # 00-07 + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 08-0f + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 4, MachineState.ERROR, MachineState.ERROR, # 10-17 + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 5, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 18-1f + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 20-27 +) +# fmt: on + +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +ISO2022KR_SM_MODEL: CodingStateMachineDict = { + "class_table": ISO2022KR_CLS, + "class_factor": 6, + "state_table": ISO2022KR_ST, + "char_len_table": ISO2022KR_CHAR_LEN_TABLE, + "name": "ISO-2022-KR", + "language": "Korean", +} diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/eucjpprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/eucjpprober.py new file mode 100644 index 0000000..39487f4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/eucjpprober.py @@ -0,0 +1,102 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import Union + +from .chardistribution import EUCJPDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .enums import MachineState, ProbingState +from .jpcntx import EUCJPContextAnalysis +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import EUCJP_SM_MODEL + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self) -> None: + super().__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self) -> None: + super().reset() + self.context_analyzer.reset() + + @property + def charset_name(self) -> str: + return "EUC-JP" + + @property + def language(self) -> str: + return "Japanese" + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + assert self.coding_sm is not None + assert self.distribution_analyzer is not None + + for i, byte in enumerate(byte_str): + # PY3K: byte_str is a byte array, so byte is an int, not a byte + coding_state = self.coding_sm.next_state(byte) + if coding_state == MachineState.ERROR: + self.logger.debug( + "%s %s prober hit error at byte %s", + self.charset_name, + self.language, + i, + ) + self._state = ProbingState.NOT_ME + break + if coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + if coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i - 1 : i + 1], char_len) + self.distribution_analyzer.feed(byte_str[i - 1 : i + 1], char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if self.context_analyzer.got_enough_data() and ( + self.get_confidence() > self.SHORTCUT_THRESHOLD + ): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self) -> float: + assert self.distribution_analyzer is not None + + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrfreq.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrfreq.py new file mode 100644 index 0000000..7dc3b10 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrfreq.py @@ -0,0 +1,196 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +# fmt: off +EUCKR_CHAR_TO_FREQ_ORDER = ( + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +) +# fmt: on diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrprober.py new file mode 100644 index 0000000..1fc5de0 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euckrprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import EUCKR_SM_MODEL + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self) -> None: + super().__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self) -> str: + return "EUC-KR" + + @property + def language(self) -> str: + return "Korean" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwfreq.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwfreq.py new file mode 100644 index 0000000..4900ccc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwfreq.py @@ -0,0 +1,388 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table +EUCTW_TABLE_SIZE = 5376 + +# fmt: off +EUCTW_CHAR_TO_FREQ_ORDER = ( + 1, 1800, 1506, 255, 1431, 198, 9, 82, 6, 7310, 177, 202, 3615, 1256, 2808, 110, # 2742 + 3735, 33, 3241, 261, 76, 44, 2113, 16, 2931, 2184, 1176, 659, 3868, 26, 3404, 2643, # 2758 + 1198, 3869, 3313, 4060, 410, 2211, 302, 590, 361, 1963, 8, 204, 58, 4296, 7311, 1931, # 2774 + 63, 7312, 7313, 317, 1614, 75, 222, 159, 4061, 2412, 1480, 7314, 3500, 3068, 224, 2809, # 2790 + 3616, 3, 10, 3870, 1471, 29, 2774, 1135, 2852, 1939, 873, 130, 3242, 1123, 312, 7315, # 2806 + 4297, 2051, 507, 252, 682, 7316, 142, 1914, 124, 206, 2932, 34, 3501, 3173, 64, 604, # 2822 + 7317, 2494, 1976, 1977, 155, 1990, 645, 641, 1606, 7318, 3405, 337, 72, 406, 7319, 80, # 2838 + 630, 238, 3174, 1509, 263, 939, 1092, 2644, 756, 1440, 1094, 3406, 449, 69, 2969, 591, # 2854 + 179, 2095, 471, 115, 2034, 1843, 60, 50, 2970, 134, 806, 1868, 734, 2035, 3407, 180, # 2870 + 995, 1607, 156, 537, 2893, 688, 7320, 319, 1305, 779, 2144, 514, 2374, 298, 4298, 359, # 2886 + 2495, 90, 2707, 1338, 663, 11, 906, 1099, 2545, 20, 2436, 182, 532, 1716, 7321, 732, # 2902 + 1376, 4062, 1311, 1420, 3175, 25, 2312, 1056, 113, 399, 382, 1949, 242, 3408, 2467, 529, # 2918 + 3243, 475, 1447, 3617, 7322, 117, 21, 656, 810, 1297, 2295, 2329, 3502, 7323, 126, 4063, # 2934 + 706, 456, 150, 613, 4299, 71, 1118, 2036, 4064, 145, 3069, 85, 835, 486, 2114, 1246, # 2950 + 1426, 428, 727, 1285, 1015, 800, 106, 623, 303, 1281, 7324, 2127, 2354, 347, 3736, 221, # 2966 + 3503, 3110, 7325, 1955, 1153, 4065, 83, 296, 1199, 3070, 192, 624, 93, 7326, 822, 1897, # 2982 + 2810, 3111, 795, 2064, 991, 1554, 1542, 1592, 27, 43, 2853, 859, 139, 1456, 860, 4300, # 2998 + 437, 712, 3871, 164, 2392, 3112, 695, 211, 3017, 2096, 195, 3872, 1608, 3504, 3505, 3618, # 3014 + 3873, 234, 811, 2971, 2097, 3874, 2229, 1441, 3506, 1615, 2375, 668, 2076, 1638, 305, 228, # 3030 + 1664, 4301, 467, 415, 7327, 262, 2098, 1593, 239, 108, 300, 200, 1033, 512, 1247, 2077, # 3046 + 7328, 7329, 2173, 3176, 3619, 2673, 593, 845, 1062, 3244, 88, 1723, 2037, 3875, 1950, 212, # 3062 + 266, 152, 149, 468, 1898, 4066, 4302, 77, 187, 7330, 3018, 37, 5, 2972, 7331, 3876, # 3078 + 7332, 7333, 39, 2517, 4303, 2894, 3177, 2078, 55, 148, 74, 4304, 545, 483, 1474, 1029, # 3094 + 1665, 217, 1869, 1531, 3113, 1104, 2645, 4067, 24, 172, 3507, 900, 3877, 3508, 3509, 4305, # 3110 + 32, 1408, 2811, 1312, 329, 487, 2355, 2247, 2708, 784, 2674, 4, 3019, 3314, 1427, 1788, # 3126 + 188, 109, 499, 7334, 3620, 1717, 1789, 888, 1217, 3020, 4306, 7335, 3510, 7336, 3315, 1520, # 3142 + 3621, 3878, 196, 1034, 775, 7337, 7338, 929, 1815, 249, 439, 38, 7339, 1063, 7340, 794, # 3158 + 3879, 1435, 2296, 46, 178, 3245, 2065, 7341, 2376, 7342, 214, 1709, 4307, 804, 35, 707, # 3174 + 324, 3622, 1601, 2546, 140, 459, 4068, 7343, 7344, 1365, 839, 272, 978, 2257, 2572, 3409, # 3190 + 2128, 1363, 3623, 1423, 697, 100, 3071, 48, 70, 1231, 495, 3114, 2193, 7345, 1294, 7346, # 3206 + 2079, 462, 586, 1042, 3246, 853, 256, 988, 185, 2377, 3410, 1698, 434, 1084, 7347, 3411, # 3222 + 314, 2615, 2775, 4308, 2330, 2331, 569, 2280, 637, 1816, 2518, 757, 1162, 1878, 1616, 3412, # 3238 + 287, 1577, 2115, 768, 4309, 1671, 2854, 3511, 2519, 1321, 3737, 909, 2413, 7348, 4069, 933, # 3254 + 3738, 7349, 2052, 2356, 1222, 4310, 765, 2414, 1322, 786, 4311, 7350, 1919, 1462, 1677, 2895, # 3270 + 1699, 7351, 4312, 1424, 2437, 3115, 3624, 2590, 3316, 1774, 1940, 3413, 3880, 4070, 309, 1369, # 3286 + 1130, 2812, 364, 2230, 1653, 1299, 3881, 3512, 3882, 3883, 2646, 525, 1085, 3021, 902, 2000, # 3302 + 1475, 964, 4313, 421, 1844, 1415, 1057, 2281, 940, 1364, 3116, 376, 4314, 4315, 1381, 7, # 3318 + 2520, 983, 2378, 336, 1710, 2675, 1845, 321, 3414, 559, 1131, 3022, 2742, 1808, 1132, 1313, # 3334 + 265, 1481, 1857, 7352, 352, 1203, 2813, 3247, 167, 1089, 420, 2814, 776, 792, 1724, 3513, # 3350 + 4071, 2438, 3248, 7353, 4072, 7354, 446, 229, 333, 2743, 901, 3739, 1200, 1557, 4316, 2647, # 3366 + 1920, 395, 2744, 2676, 3740, 4073, 1835, 125, 916, 3178, 2616, 4317, 7355, 7356, 3741, 7357, # 3382 + 7358, 7359, 4318, 3117, 3625, 1133, 2547, 1757, 3415, 1510, 2313, 1409, 3514, 7360, 2145, 438, # 3398 + 2591, 2896, 2379, 3317, 1068, 958, 3023, 461, 311, 2855, 2677, 4074, 1915, 3179, 4075, 1978, # 3414 + 383, 750, 2745, 2617, 4076, 274, 539, 385, 1278, 1442, 7361, 1154, 1964, 384, 561, 210, # 3430 + 98, 1295, 2548, 3515, 7362, 1711, 2415, 1482, 3416, 3884, 2897, 1257, 129, 7363, 3742, 642, # 3446 + 523, 2776, 2777, 2648, 7364, 141, 2231, 1333, 68, 176, 441, 876, 907, 4077, 603, 2592, # 3462 + 710, 171, 3417, 404, 549, 18, 3118, 2393, 1410, 3626, 1666, 7365, 3516, 4319, 2898, 4320, # 3478 + 7366, 2973, 368, 7367, 146, 366, 99, 871, 3627, 1543, 748, 807, 1586, 1185, 22, 2258, # 3494 + 379, 3743, 3180, 7368, 3181, 505, 1941, 2618, 1991, 1382, 2314, 7369, 380, 2357, 218, 702, # 3510 + 1817, 1248, 3418, 3024, 3517, 3318, 3249, 7370, 2974, 3628, 930, 3250, 3744, 7371, 59, 7372, # 3526 + 585, 601, 4078, 497, 3419, 1112, 1314, 4321, 1801, 7373, 1223, 1472, 2174, 7374, 749, 1836, # 3542 + 690, 1899, 3745, 1772, 3885, 1476, 429, 1043, 1790, 2232, 2116, 917, 4079, 447, 1086, 1629, # 3558 + 7375, 556, 7376, 7377, 2020, 1654, 844, 1090, 105, 550, 966, 1758, 2815, 1008, 1782, 686, # 3574 + 1095, 7378, 2282, 793, 1602, 7379, 3518, 2593, 4322, 4080, 2933, 2297, 4323, 3746, 980, 2496, # 3590 + 544, 353, 527, 4324, 908, 2678, 2899, 7380, 381, 2619, 1942, 1348, 7381, 1341, 1252, 560, # 3606 + 3072, 7382, 3420, 2856, 7383, 2053, 973, 886, 2080, 143, 4325, 7384, 7385, 157, 3886, 496, # 3622 + 4081, 57, 840, 540, 2038, 4326, 4327, 3421, 2117, 1445, 970, 2259, 1748, 1965, 2081, 4082, # 3638 + 3119, 1234, 1775, 3251, 2816, 3629, 773, 1206, 2129, 1066, 2039, 1326, 3887, 1738, 1725, 4083, # 3654 + 279, 3120, 51, 1544, 2594, 423, 1578, 2130, 2066, 173, 4328, 1879, 7386, 7387, 1583, 264, # 3670 + 610, 3630, 4329, 2439, 280, 154, 7388, 7389, 7390, 1739, 338, 1282, 3073, 693, 2857, 1411, # 3686 + 1074, 3747, 2440, 7391, 4330, 7392, 7393, 1240, 952, 2394, 7394, 2900, 1538, 2679, 685, 1483, # 3702 + 4084, 2468, 1436, 953, 4085, 2054, 4331, 671, 2395, 79, 4086, 2441, 3252, 608, 567, 2680, # 3718 + 3422, 4087, 4088, 1691, 393, 1261, 1791, 2396, 7395, 4332, 7396, 7397, 7398, 7399, 1383, 1672, # 3734 + 3748, 3182, 1464, 522, 1119, 661, 1150, 216, 675, 4333, 3888, 1432, 3519, 609, 4334, 2681, # 3750 + 2397, 7400, 7401, 7402, 4089, 3025, 0, 7403, 2469, 315, 231, 2442, 301, 3319, 4335, 2380, # 3766 + 7404, 233, 4090, 3631, 1818, 4336, 4337, 7405, 96, 1776, 1315, 2082, 7406, 257, 7407, 1809, # 3782 + 3632, 2709, 1139, 1819, 4091, 2021, 1124, 2163, 2778, 1777, 2649, 7408, 3074, 363, 1655, 3183, # 3798 + 7409, 2975, 7410, 7411, 7412, 3889, 1567, 3890, 718, 103, 3184, 849, 1443, 341, 3320, 2934, # 3814 + 1484, 7413, 1712, 127, 67, 339, 4092, 2398, 679, 1412, 821, 7414, 7415, 834, 738, 351, # 3830 + 2976, 2146, 846, 235, 1497, 1880, 418, 1992, 3749, 2710, 186, 1100, 2147, 2746, 3520, 1545, # 3846 + 1355, 2935, 2858, 1377, 583, 3891, 4093, 2573, 2977, 7416, 1298, 3633, 1078, 2549, 3634, 2358, # 3862 + 78, 3750, 3751, 267, 1289, 2099, 2001, 1594, 4094, 348, 369, 1274, 2194, 2175, 1837, 4338, # 3878 + 1820, 2817, 3635, 2747, 2283, 2002, 4339, 2936, 2748, 144, 3321, 882, 4340, 3892, 2749, 3423, # 3894 + 4341, 2901, 7417, 4095, 1726, 320, 7418, 3893, 3026, 788, 2978, 7419, 2818, 1773, 1327, 2859, # 3910 + 3894, 2819, 7420, 1306, 4342, 2003, 1700, 3752, 3521, 2359, 2650, 787, 2022, 506, 824, 3636, # 3926 + 534, 323, 4343, 1044, 3322, 2023, 1900, 946, 3424, 7421, 1778, 1500, 1678, 7422, 1881, 4344, # 3942 + 165, 243, 4345, 3637, 2521, 123, 683, 4096, 764, 4346, 36, 3895, 1792, 589, 2902, 816, # 3958 + 626, 1667, 3027, 2233, 1639, 1555, 1622, 3753, 3896, 7423, 3897, 2860, 1370, 1228, 1932, 891, # 3974 + 2083, 2903, 304, 4097, 7424, 292, 2979, 2711, 3522, 691, 2100, 4098, 1115, 4347, 118, 662, # 3990 + 7425, 611, 1156, 854, 2381, 1316, 2861, 2, 386, 515, 2904, 7426, 7427, 3253, 868, 2234, # 4006 + 1486, 855, 2651, 785, 2212, 3028, 7428, 1040, 3185, 3523, 7429, 3121, 448, 7430, 1525, 7431, # 4022 + 2164, 4348, 7432, 3754, 7433, 4099, 2820, 3524, 3122, 503, 818, 3898, 3123, 1568, 814, 676, # 4038 + 1444, 306, 1749, 7434, 3755, 1416, 1030, 197, 1428, 805, 2821, 1501, 4349, 7435, 7436, 7437, # 4054 + 1993, 7438, 4350, 7439, 7440, 2195, 13, 2779, 3638, 2980, 3124, 1229, 1916, 7441, 3756, 2131, # 4070 + 7442, 4100, 4351, 2399, 3525, 7443, 2213, 1511, 1727, 1120, 7444, 7445, 646, 3757, 2443, 307, # 4086 + 7446, 7447, 1595, 3186, 7448, 7449, 7450, 3639, 1113, 1356, 3899, 1465, 2522, 2523, 7451, 519, # 4102 + 7452, 128, 2132, 92, 2284, 1979, 7453, 3900, 1512, 342, 3125, 2196, 7454, 2780, 2214, 1980, # 4118 + 3323, 7455, 290, 1656, 1317, 789, 827, 2360, 7456, 3758, 4352, 562, 581, 3901, 7457, 401, # 4134 + 4353, 2248, 94, 4354, 1399, 2781, 7458, 1463, 2024, 4355, 3187, 1943, 7459, 828, 1105, 4101, # 4150 + 1262, 1394, 7460, 4102, 605, 4356, 7461, 1783, 2862, 7462, 2822, 819, 2101, 578, 2197, 2937, # 4166 + 7463, 1502, 436, 3254, 4103, 3255, 2823, 3902, 2905, 3425, 3426, 7464, 2712, 2315, 7465, 7466, # 4182 + 2332, 2067, 23, 4357, 193, 826, 3759, 2102, 699, 1630, 4104, 3075, 390, 1793, 1064, 3526, # 4198 + 7467, 1579, 3076, 3077, 1400, 7468, 4105, 1838, 1640, 2863, 7469, 4358, 4359, 137, 4106, 598, # 4214 + 3078, 1966, 780, 104, 974, 2938, 7470, 278, 899, 253, 402, 572, 504, 493, 1339, 7471, # 4230 + 3903, 1275, 4360, 2574, 2550, 7472, 3640, 3029, 3079, 2249, 565, 1334, 2713, 863, 41, 7473, # 4246 + 7474, 4361, 7475, 1657, 2333, 19, 463, 2750, 4107, 606, 7476, 2981, 3256, 1087, 2084, 1323, # 4262 + 2652, 2982, 7477, 1631, 1623, 1750, 4108, 2682, 7478, 2864, 791, 2714, 2653, 2334, 232, 2416, # 4278 + 7479, 2983, 1498, 7480, 2654, 2620, 755, 1366, 3641, 3257, 3126, 2025, 1609, 119, 1917, 3427, # 4294 + 862, 1026, 4109, 7481, 3904, 3760, 4362, 3905, 4363, 2260, 1951, 2470, 7482, 1125, 817, 4110, # 4310 + 4111, 3906, 1513, 1766, 2040, 1487, 4112, 3030, 3258, 2824, 3761, 3127, 7483, 7484, 1507, 7485, # 4326 + 2683, 733, 40, 1632, 1106, 2865, 345, 4113, 841, 2524, 230, 4364, 2984, 1846, 3259, 3428, # 4342 + 7486, 1263, 986, 3429, 7487, 735, 879, 254, 1137, 857, 622, 1300, 1180, 1388, 1562, 3907, # 4358 + 3908, 2939, 967, 2751, 2655, 1349, 592, 2133, 1692, 3324, 2985, 1994, 4114, 1679, 3909, 1901, # 4374 + 2185, 7488, 739, 3642, 2715, 1296, 1290, 7489, 4115, 2198, 2199, 1921, 1563, 2595, 2551, 1870, # 4390 + 2752, 2986, 7490, 435, 7491, 343, 1108, 596, 17, 1751, 4365, 2235, 3430, 3643, 7492, 4366, # 4406 + 294, 3527, 2940, 1693, 477, 979, 281, 2041, 3528, 643, 2042, 3644, 2621, 2782, 2261, 1031, # 4422 + 2335, 2134, 2298, 3529, 4367, 367, 1249, 2552, 7493, 3530, 7494, 4368, 1283, 3325, 2004, 240, # 4438 + 1762, 3326, 4369, 4370, 836, 1069, 3128, 474, 7495, 2148, 2525, 268, 3531, 7496, 3188, 1521, # 4454 + 1284, 7497, 1658, 1546, 4116, 7498, 3532, 3533, 7499, 4117, 3327, 2684, 1685, 4118, 961, 1673, # 4470 + 2622, 190, 2005, 2200, 3762, 4371, 4372, 7500, 570, 2497, 3645, 1490, 7501, 4373, 2623, 3260, # 4486 + 1956, 4374, 584, 1514, 396, 1045, 1944, 7502, 4375, 1967, 2444, 7503, 7504, 4376, 3910, 619, # 4502 + 7505, 3129, 3261, 215, 2006, 2783, 2553, 3189, 4377, 3190, 4378, 763, 4119, 3763, 4379, 7506, # 4518 + 7507, 1957, 1767, 2941, 3328, 3646, 1174, 452, 1477, 4380, 3329, 3130, 7508, 2825, 1253, 2382, # 4534 + 2186, 1091, 2285, 4120, 492, 7509, 638, 1169, 1824, 2135, 1752, 3911, 648, 926, 1021, 1324, # 4550 + 4381, 520, 4382, 997, 847, 1007, 892, 4383, 3764, 2262, 1871, 3647, 7510, 2400, 1784, 4384, # 4566 + 1952, 2942, 3080, 3191, 1728, 4121, 2043, 3648, 4385, 2007, 1701, 3131, 1551, 30, 2263, 4122, # 4582 + 7511, 2026, 4386, 3534, 7512, 501, 7513, 4123, 594, 3431, 2165, 1821, 3535, 3432, 3536, 3192, # 4598 + 829, 2826, 4124, 7514, 1680, 3132, 1225, 4125, 7515, 3262, 4387, 4126, 3133, 2336, 7516, 4388, # 4614 + 4127, 7517, 3912, 3913, 7518, 1847, 2383, 2596, 3330, 7519, 4389, 374, 3914, 652, 4128, 4129, # 4630 + 375, 1140, 798, 7520, 7521, 7522, 2361, 4390, 2264, 546, 1659, 138, 3031, 2445, 4391, 7523, # 4646 + 2250, 612, 1848, 910, 796, 3765, 1740, 1371, 825, 3766, 3767, 7524, 2906, 2554, 7525, 692, # 4662 + 444, 3032, 2624, 801, 4392, 4130, 7526, 1491, 244, 1053, 3033, 4131, 4132, 340, 7527, 3915, # 4678 + 1041, 2987, 293, 1168, 87, 1357, 7528, 1539, 959, 7529, 2236, 721, 694, 4133, 3768, 219, # 4694 + 1478, 644, 1417, 3331, 2656, 1413, 1401, 1335, 1389, 3916, 7530, 7531, 2988, 2362, 3134, 1825, # 4710 + 730, 1515, 184, 2827, 66, 4393, 7532, 1660, 2943, 246, 3332, 378, 1457, 226, 3433, 975, # 4726 + 3917, 2944, 1264, 3537, 674, 696, 7533, 163, 7534, 1141, 2417, 2166, 713, 3538, 3333, 4394, # 4742 + 3918, 7535, 7536, 1186, 15, 7537, 1079, 1070, 7538, 1522, 3193, 3539, 276, 1050, 2716, 758, # 4758 + 1126, 653, 2945, 3263, 7539, 2337, 889, 3540, 3919, 3081, 2989, 903, 1250, 4395, 3920, 3434, # 4774 + 3541, 1342, 1681, 1718, 766, 3264, 286, 89, 2946, 3649, 7540, 1713, 7541, 2597, 3334, 2990, # 4790 + 7542, 2947, 2215, 3194, 2866, 7543, 4396, 2498, 2526, 181, 387, 1075, 3921, 731, 2187, 3335, # 4806 + 7544, 3265, 310, 313, 3435, 2299, 770, 4134, 54, 3034, 189, 4397, 3082, 3769, 3922, 7545, # 4822 + 1230, 1617, 1849, 355, 3542, 4135, 4398, 3336, 111, 4136, 3650, 1350, 3135, 3436, 3035, 4137, # 4838 + 2149, 3266, 3543, 7546, 2784, 3923, 3924, 2991, 722, 2008, 7547, 1071, 247, 1207, 2338, 2471, # 4854 + 1378, 4399, 2009, 864, 1437, 1214, 4400, 373, 3770, 1142, 2216, 667, 4401, 442, 2753, 2555, # 4870 + 3771, 3925, 1968, 4138, 3267, 1839, 837, 170, 1107, 934, 1336, 1882, 7548, 7549, 2118, 4139, # 4886 + 2828, 743, 1569, 7550, 4402, 4140, 582, 2384, 1418, 3437, 7551, 1802, 7552, 357, 1395, 1729, # 4902 + 3651, 3268, 2418, 1564, 2237, 7553, 3083, 3772, 1633, 4403, 1114, 2085, 4141, 1532, 7554, 482, # 4918 + 2446, 4404, 7555, 7556, 1492, 833, 1466, 7557, 2717, 3544, 1641, 2829, 7558, 1526, 1272, 3652, # 4934 + 4142, 1686, 1794, 416, 2556, 1902, 1953, 1803, 7559, 3773, 2785, 3774, 1159, 2316, 7560, 2867, # 4950 + 4405, 1610, 1584, 3036, 2419, 2754, 443, 3269, 1163, 3136, 7561, 7562, 3926, 7563, 4143, 2499, # 4966 + 3037, 4406, 3927, 3137, 2103, 1647, 3545, 2010, 1872, 4144, 7564, 4145, 431, 3438, 7565, 250, # 4982 + 97, 81, 4146, 7566, 1648, 1850, 1558, 160, 848, 7567, 866, 740, 1694, 7568, 2201, 2830, # 4998 + 3195, 4147, 4407, 3653, 1687, 950, 2472, 426, 469, 3196, 3654, 3655, 3928, 7569, 7570, 1188, # 5014 + 424, 1995, 861, 3546, 4148, 3775, 2202, 2685, 168, 1235, 3547, 4149, 7571, 2086, 1674, 4408, # 5030 + 3337, 3270, 220, 2557, 1009, 7572, 3776, 670, 2992, 332, 1208, 717, 7573, 7574, 3548, 2447, # 5046 + 3929, 3338, 7575, 513, 7576, 1209, 2868, 3339, 3138, 4409, 1080, 7577, 7578, 7579, 7580, 2527, # 5062 + 3656, 3549, 815, 1587, 3930, 3931, 7581, 3550, 3439, 3777, 1254, 4410, 1328, 3038, 1390, 3932, # 5078 + 1741, 3933, 3778, 3934, 7582, 236, 3779, 2448, 3271, 7583, 7584, 3657, 3780, 1273, 3781, 4411, # 5094 + 7585, 308, 7586, 4412, 245, 4413, 1851, 2473, 1307, 2575, 430, 715, 2136, 2449, 7587, 270, # 5110 + 199, 2869, 3935, 7588, 3551, 2718, 1753, 761, 1754, 725, 1661, 1840, 4414, 3440, 3658, 7589, # 5126 + 7590, 587, 14, 3272, 227, 2598, 326, 480, 2265, 943, 2755, 3552, 291, 650, 1883, 7591, # 5142 + 1702, 1226, 102, 1547, 62, 3441, 904, 4415, 3442, 1164, 4150, 7592, 7593, 1224, 1548, 2756, # 5158 + 391, 498, 1493, 7594, 1386, 1419, 7595, 2055, 1177, 4416, 813, 880, 1081, 2363, 566, 1145, # 5174 + 4417, 2286, 1001, 1035, 2558, 2599, 2238, 394, 1286, 7596, 7597, 2068, 7598, 86, 1494, 1730, # 5190 + 3936, 491, 1588, 745, 897, 2948, 843, 3340, 3937, 2757, 2870, 3273, 1768, 998, 2217, 2069, # 5206 + 397, 1826, 1195, 1969, 3659, 2993, 3341, 284, 7599, 3782, 2500, 2137, 2119, 1903, 7600, 3938, # 5222 + 2150, 3939, 4151, 1036, 3443, 1904, 114, 2559, 4152, 209, 1527, 7601, 7602, 2949, 2831, 2625, # 5238 + 2385, 2719, 3139, 812, 2560, 7603, 3274, 7604, 1559, 737, 1884, 3660, 1210, 885, 28, 2686, # 5254 + 3553, 3783, 7605, 4153, 1004, 1779, 4418, 7606, 346, 1981, 2218, 2687, 4419, 3784, 1742, 797, # 5270 + 1642, 3940, 1933, 1072, 1384, 2151, 896, 3941, 3275, 3661, 3197, 2871, 3554, 7607, 2561, 1958, # 5286 + 4420, 2450, 1785, 7608, 7609, 7610, 3942, 4154, 1005, 1308, 3662, 4155, 2720, 4421, 4422, 1528, # 5302 + 2600, 161, 1178, 4156, 1982, 987, 4423, 1101, 4157, 631, 3943, 1157, 3198, 2420, 1343, 1241, # 5318 + 1016, 2239, 2562, 372, 877, 2339, 2501, 1160, 555, 1934, 911, 3944, 7611, 466, 1170, 169, # 5334 + 1051, 2907, 2688, 3663, 2474, 2994, 1182, 2011, 2563, 1251, 2626, 7612, 992, 2340, 3444, 1540, # 5350 + 2721, 1201, 2070, 2401, 1996, 2475, 7613, 4424, 528, 1922, 2188, 1503, 1873, 1570, 2364, 3342, # 5366 + 3276, 7614, 557, 1073, 7615, 1827, 3445, 2087, 2266, 3140, 3039, 3084, 767, 3085, 2786, 4425, # 5382 + 1006, 4158, 4426, 2341, 1267, 2176, 3664, 3199, 778, 3945, 3200, 2722, 1597, 2657, 7616, 4427, # 5398 + 7617, 3446, 7618, 7619, 7620, 3277, 2689, 1433, 3278, 131, 95, 1504, 3946, 723, 4159, 3141, # 5414 + 1841, 3555, 2758, 2189, 3947, 2027, 2104, 3665, 7621, 2995, 3948, 1218, 7622, 3343, 3201, 3949, # 5430 + 4160, 2576, 248, 1634, 3785, 912, 7623, 2832, 3666, 3040, 3786, 654, 53, 7624, 2996, 7625, # 5446 + 1688, 4428, 777, 3447, 1032, 3950, 1425, 7626, 191, 820, 2120, 2833, 971, 4429, 931, 3202, # 5462 + 135, 664, 783, 3787, 1997, 772, 2908, 1935, 3951, 3788, 4430, 2909, 3203, 282, 2723, 640, # 5478 + 1372, 3448, 1127, 922, 325, 3344, 7627, 7628, 711, 2044, 7629, 7630, 3952, 2219, 2787, 1936, # 5494 + 3953, 3345, 2220, 2251, 3789, 2300, 7631, 4431, 3790, 1258, 3279, 3954, 3204, 2138, 2950, 3955, # 5510 + 3956, 7632, 2221, 258, 3205, 4432, 101, 1227, 7633, 3280, 1755, 7634, 1391, 3281, 7635, 2910, # 5526 + 2056, 893, 7636, 7637, 7638, 1402, 4161, 2342, 7639, 7640, 3206, 3556, 7641, 7642, 878, 1325, # 5542 + 1780, 2788, 4433, 259, 1385, 2577, 744, 1183, 2267, 4434, 7643, 3957, 2502, 7644, 684, 1024, # 5558 + 4162, 7645, 472, 3557, 3449, 1165, 3282, 3958, 3959, 322, 2152, 881, 455, 1695, 1152, 1340, # 5574 + 660, 554, 2153, 4435, 1058, 4436, 4163, 830, 1065, 3346, 3960, 4437, 1923, 7646, 1703, 1918, # 5590 + 7647, 932, 2268, 122, 7648, 4438, 947, 677, 7649, 3791, 2627, 297, 1905, 1924, 2269, 4439, # 5606 + 2317, 3283, 7650, 7651, 4164, 7652, 4165, 84, 4166, 112, 989, 7653, 547, 1059, 3961, 701, # 5622 + 3558, 1019, 7654, 4167, 7655, 3450, 942, 639, 457, 2301, 2451, 993, 2951, 407, 851, 494, # 5638 + 4440, 3347, 927, 7656, 1237, 7657, 2421, 3348, 573, 4168, 680, 921, 2911, 1279, 1874, 285, # 5654 + 790, 1448, 1983, 719, 2167, 7658, 7659, 4441, 3962, 3963, 1649, 7660, 1541, 563, 7661, 1077, # 5670 + 7662, 3349, 3041, 3451, 511, 2997, 3964, 3965, 3667, 3966, 1268, 2564, 3350, 3207, 4442, 4443, # 5686 + 7663, 535, 1048, 1276, 1189, 2912, 2028, 3142, 1438, 1373, 2834, 2952, 1134, 2012, 7664, 4169, # 5702 + 1238, 2578, 3086, 1259, 7665, 700, 7666, 2953, 3143, 3668, 4170, 7667, 4171, 1146, 1875, 1906, # 5718 + 4444, 2601, 3967, 781, 2422, 132, 1589, 203, 147, 273, 2789, 2402, 898, 1786, 2154, 3968, # 5734 + 3969, 7668, 3792, 2790, 7669, 7670, 4445, 4446, 7671, 3208, 7672, 1635, 3793, 965, 7673, 1804, # 5750 + 2690, 1516, 3559, 1121, 1082, 1329, 3284, 3970, 1449, 3794, 65, 1128, 2835, 2913, 2759, 1590, # 5766 + 3795, 7674, 7675, 12, 2658, 45, 976, 2579, 3144, 4447, 517, 2528, 1013, 1037, 3209, 7676, # 5782 + 3796, 2836, 7677, 3797, 7678, 3452, 7679, 2602, 614, 1998, 2318, 3798, 3087, 2724, 2628, 7680, # 5798 + 2580, 4172, 599, 1269, 7681, 1810, 3669, 7682, 2691, 3088, 759, 1060, 489, 1805, 3351, 3285, # 5814 + 1358, 7683, 7684, 2386, 1387, 1215, 2629, 2252, 490, 7685, 7686, 4173, 1759, 2387, 2343, 7687, # 5830 + 4448, 3799, 1907, 3971, 2630, 1806, 3210, 4449, 3453, 3286, 2760, 2344, 874, 7688, 7689, 3454, # 5846 + 3670, 1858, 91, 2914, 3671, 3042, 3800, 4450, 7690, 3145, 3972, 2659, 7691, 3455, 1202, 1403, # 5862 + 3801, 2954, 2529, 1517, 2503, 4451, 3456, 2504, 7692, 4452, 7693, 2692, 1885, 1495, 1731, 3973, # 5878 + 2365, 4453, 7694, 2029, 7695, 7696, 3974, 2693, 1216, 237, 2581, 4174, 2319, 3975, 3802, 4454, # 5894 + 4455, 2694, 3560, 3457, 445, 4456, 7697, 7698, 7699, 7700, 2761, 61, 3976, 3672, 1822, 3977, # 5910 + 7701, 687, 2045, 935, 925, 405, 2660, 703, 1096, 1859, 2725, 4457, 3978, 1876, 1367, 2695, # 5926 + 3352, 918, 2105, 1781, 2476, 334, 3287, 1611, 1093, 4458, 564, 3146, 3458, 3673, 3353, 945, # 5942 + 2631, 2057, 4459, 7702, 1925, 872, 4175, 7703, 3459, 2696, 3089, 349, 4176, 3674, 3979, 4460, # 5958 + 3803, 4177, 3675, 2155, 3980, 4461, 4462, 4178, 4463, 2403, 2046, 782, 3981, 400, 251, 4179, # 5974 + 1624, 7704, 7705, 277, 3676, 299, 1265, 476, 1191, 3804, 2121, 4180, 4181, 1109, 205, 7706, # 5990 + 2582, 1000, 2156, 3561, 1860, 7707, 7708, 7709, 4464, 7710, 4465, 2565, 107, 2477, 2157, 3982, # 6006 + 3460, 3147, 7711, 1533, 541, 1301, 158, 753, 4182, 2872, 3562, 7712, 1696, 370, 1088, 4183, # 6022 + 4466, 3563, 579, 327, 440, 162, 2240, 269, 1937, 1374, 3461, 968, 3043, 56, 1396, 3090, # 6038 + 2106, 3288, 3354, 7713, 1926, 2158, 4467, 2998, 7714, 3564, 7715, 7716, 3677, 4468, 2478, 7717, # 6054 + 2791, 7718, 1650, 4469, 7719, 2603, 7720, 7721, 3983, 2661, 3355, 1149, 3356, 3984, 3805, 3985, # 6070 + 7722, 1076, 49, 7723, 951, 3211, 3289, 3290, 450, 2837, 920, 7724, 1811, 2792, 2366, 4184, # 6086 + 1908, 1138, 2367, 3806, 3462, 7725, 3212, 4470, 1909, 1147, 1518, 2423, 4471, 3807, 7726, 4472, # 6102 + 2388, 2604, 260, 1795, 3213, 7727, 7728, 3808, 3291, 708, 7729, 3565, 1704, 7730, 3566, 1351, # 6118 + 1618, 3357, 2999, 1886, 944, 4185, 3358, 4186, 3044, 3359, 4187, 7731, 3678, 422, 413, 1714, # 6134 + 3292, 500, 2058, 2345, 4188, 2479, 7732, 1344, 1910, 954, 7733, 1668, 7734, 7735, 3986, 2404, # 6150 + 4189, 3567, 3809, 4190, 7736, 2302, 1318, 2505, 3091, 133, 3092, 2873, 4473, 629, 31, 2838, # 6166 + 2697, 3810, 4474, 850, 949, 4475, 3987, 2955, 1732, 2088, 4191, 1496, 1852, 7737, 3988, 620, # 6182 + 3214, 981, 1242, 3679, 3360, 1619, 3680, 1643, 3293, 2139, 2452, 1970, 1719, 3463, 2168, 7738, # 6198 + 3215, 7739, 7740, 3361, 1828, 7741, 1277, 4476, 1565, 2047, 7742, 1636, 3568, 3093, 7743, 869, # 6214 + 2839, 655, 3811, 3812, 3094, 3989, 3000, 3813, 1310, 3569, 4477, 7744, 7745, 7746, 1733, 558, # 6230 + 4478, 3681, 335, 1549, 3045, 1756, 4192, 3682, 1945, 3464, 1829, 1291, 1192, 470, 2726, 2107, # 6246 + 2793, 913, 1054, 3990, 7747, 1027, 7748, 3046, 3991, 4479, 982, 2662, 3362, 3148, 3465, 3216, # 6262 + 3217, 1946, 2794, 7749, 571, 4480, 7750, 1830, 7751, 3570, 2583, 1523, 2424, 7752, 2089, 984, # 6278 + 4481, 3683, 1959, 7753, 3684, 852, 923, 2795, 3466, 3685, 969, 1519, 999, 2048, 2320, 1705, # 6294 + 7754, 3095, 615, 1662, 151, 597, 3992, 2405, 2321, 1049, 275, 4482, 3686, 4193, 568, 3687, # 6310 + 3571, 2480, 4194, 3688, 7755, 2425, 2270, 409, 3218, 7756, 1566, 2874, 3467, 1002, 769, 2840, # 6326 + 194, 2090, 3149, 3689, 2222, 3294, 4195, 628, 1505, 7757, 7758, 1763, 2177, 3001, 3993, 521, # 6342 + 1161, 2584, 1787, 2203, 2406, 4483, 3994, 1625, 4196, 4197, 412, 42, 3096, 464, 7759, 2632, # 6358 + 4484, 3363, 1760, 1571, 2875, 3468, 2530, 1219, 2204, 3814, 2633, 2140, 2368, 4485, 4486, 3295, # 6374 + 1651, 3364, 3572, 7760, 7761, 3573, 2481, 3469, 7762, 3690, 7763, 7764, 2271, 2091, 460, 7765, # 6390 + 4487, 7766, 3002, 962, 588, 3574, 289, 3219, 2634, 1116, 52, 7767, 3047, 1796, 7768, 7769, # 6406 + 7770, 1467, 7771, 1598, 1143, 3691, 4198, 1984, 1734, 1067, 4488, 1280, 3365, 465, 4489, 1572, # 6422 + 510, 7772, 1927, 2241, 1812, 1644, 3575, 7773, 4490, 3692, 7774, 7775, 2663, 1573, 1534, 7776, # 6438 + 7777, 4199, 536, 1807, 1761, 3470, 3815, 3150, 2635, 7778, 7779, 7780, 4491, 3471, 2915, 1911, # 6454 + 2796, 7781, 3296, 1122, 377, 3220, 7782, 360, 7783, 7784, 4200, 1529, 551, 7785, 2059, 3693, # 6470 + 1769, 2426, 7786, 2916, 4201, 3297, 3097, 2322, 2108, 2030, 4492, 1404, 136, 1468, 1479, 672, # 6486 + 1171, 3221, 2303, 271, 3151, 7787, 2762, 7788, 2049, 678, 2727, 865, 1947, 4493, 7789, 2013, # 6502 + 3995, 2956, 7790, 2728, 2223, 1397, 3048, 3694, 4494, 4495, 1735, 2917, 3366, 3576, 7791, 3816, # 6518 + 509, 2841, 2453, 2876, 3817, 7792, 7793, 3152, 3153, 4496, 4202, 2531, 4497, 2304, 1166, 1010, # 6534 + 552, 681, 1887, 7794, 7795, 2957, 2958, 3996, 1287, 1596, 1861, 3154, 358, 453, 736, 175, # 6550 + 478, 1117, 905, 1167, 1097, 7796, 1853, 1530, 7797, 1706, 7798, 2178, 3472, 2287, 3695, 3473, # 6566 + 3577, 4203, 2092, 4204, 7799, 3367, 1193, 2482, 4205, 1458, 2190, 2205, 1862, 1888, 1421, 3298, # 6582 + 2918, 3049, 2179, 3474, 595, 2122, 7800, 3997, 7801, 7802, 4206, 1707, 2636, 223, 3696, 1359, # 6598 + 751, 3098, 183, 3475, 7803, 2797, 3003, 419, 2369, 633, 704, 3818, 2389, 241, 7804, 7805, # 6614 + 7806, 838, 3004, 3697, 2272, 2763, 2454, 3819, 1938, 2050, 3998, 1309, 3099, 2242, 1181, 7807, # 6630 + 1136, 2206, 3820, 2370, 1446, 4207, 2305, 4498, 7808, 7809, 4208, 1055, 2605, 484, 3698, 7810, # 6646 + 3999, 625, 4209, 2273, 3368, 1499, 4210, 4000, 7811, 4001, 4211, 3222, 2274, 2275, 3476, 7812, # 6662 + 7813, 2764, 808, 2606, 3699, 3369, 4002, 4212, 3100, 2532, 526, 3370, 3821, 4213, 955, 7814, # 6678 + 1620, 4214, 2637, 2427, 7815, 1429, 3700, 1669, 1831, 994, 928, 7816, 3578, 1260, 7817, 7818, # 6694 + 7819, 1948, 2288, 741, 2919, 1626, 4215, 2729, 2455, 867, 1184, 362, 3371, 1392, 7820, 7821, # 6710 + 4003, 4216, 1770, 1736, 3223, 2920, 4499, 4500, 1928, 2698, 1459, 1158, 7822, 3050, 3372, 2877, # 6726 + 1292, 1929, 2506, 2842, 3701, 1985, 1187, 2071, 2014, 2607, 4217, 7823, 2566, 2507, 2169, 3702, # 6742 + 2483, 3299, 7824, 3703, 4501, 7825, 7826, 666, 1003, 3005, 1022, 3579, 4218, 7827, 4502, 1813, # 6758 + 2253, 574, 3822, 1603, 295, 1535, 705, 3823, 4219, 283, 858, 417, 7828, 7829, 3224, 4503, # 6774 + 4504, 3051, 1220, 1889, 1046, 2276, 2456, 4004, 1393, 1599, 689, 2567, 388, 4220, 7830, 2484, # 6790 + 802, 7831, 2798, 3824, 2060, 1405, 2254, 7832, 4505, 3825, 2109, 1052, 1345, 3225, 1585, 7833, # 6806 + 809, 7834, 7835, 7836, 575, 2730, 3477, 956, 1552, 1469, 1144, 2323, 7837, 2324, 1560, 2457, # 6822 + 3580, 3226, 4005, 616, 2207, 3155, 2180, 2289, 7838, 1832, 7839, 3478, 4506, 7840, 1319, 3704, # 6838 + 3705, 1211, 3581, 1023, 3227, 1293, 2799, 7841, 7842, 7843, 3826, 607, 2306, 3827, 762, 2878, # 6854 + 1439, 4221, 1360, 7844, 1485, 3052, 7845, 4507, 1038, 4222, 1450, 2061, 2638, 4223, 1379, 4508, # 6870 + 2585, 7846, 7847, 4224, 1352, 1414, 2325, 2921, 1172, 7848, 7849, 3828, 3829, 7850, 1797, 1451, # 6886 + 7851, 7852, 7853, 7854, 2922, 4006, 4007, 2485, 2346, 411, 4008, 4009, 3582, 3300, 3101, 4509, # 6902 + 1561, 2664, 1452, 4010, 1375, 7855, 7856, 47, 2959, 316, 7857, 1406, 1591, 2923, 3156, 7858, # 6918 + 1025, 2141, 3102, 3157, 354, 2731, 884, 2224, 4225, 2407, 508, 3706, 726, 3583, 996, 2428, # 6934 + 3584, 729, 7859, 392, 2191, 1453, 4011, 4510, 3707, 7860, 7861, 2458, 3585, 2608, 1675, 2800, # 6950 + 919, 2347, 2960, 2348, 1270, 4511, 4012, 73, 7862, 7863, 647, 7864, 3228, 2843, 2255, 1550, # 6966 + 1346, 3006, 7865, 1332, 883, 3479, 7866, 7867, 7868, 7869, 3301, 2765, 7870, 1212, 831, 1347, # 6982 + 4226, 4512, 2326, 3830, 1863, 3053, 720, 3831, 4513, 4514, 3832, 7871, 4227, 7872, 7873, 4515, # 6998 + 7874, 7875, 1798, 4516, 3708, 2609, 4517, 3586, 1645, 2371, 7876, 7877, 2924, 669, 2208, 2665, # 7014 + 2429, 7878, 2879, 7879, 7880, 1028, 3229, 7881, 4228, 2408, 7882, 2256, 1353, 7883, 7884, 4518, # 7030 + 3158, 518, 7885, 4013, 7886, 4229, 1960, 7887, 2142, 4230, 7888, 7889, 3007, 2349, 2350, 3833, # 7046 + 516, 1833, 1454, 4014, 2699, 4231, 4519, 2225, 2610, 1971, 1129, 3587, 7890, 2766, 7891, 2961, # 7062 + 1422, 577, 1470, 3008, 1524, 3373, 7892, 7893, 432, 4232, 3054, 3480, 7894, 2586, 1455, 2508, # 7078 + 2226, 1972, 1175, 7895, 1020, 2732, 4015, 3481, 4520, 7896, 2733, 7897, 1743, 1361, 3055, 3482, # 7094 + 2639, 4016, 4233, 4521, 2290, 895, 924, 4234, 2170, 331, 2243, 3056, 166, 1627, 3057, 1098, # 7110 + 7898, 1232, 2880, 2227, 3374, 4522, 657, 403, 1196, 2372, 542, 3709, 3375, 1600, 4235, 3483, # 7126 + 7899, 4523, 2767, 3230, 576, 530, 1362, 7900, 4524, 2533, 2666, 3710, 4017, 7901, 842, 3834, # 7142 + 7902, 2801, 2031, 1014, 4018, 213, 2700, 3376, 665, 621, 4236, 7903, 3711, 2925, 2430, 7904, # 7158 + 2431, 3302, 3588, 3377, 7905, 4237, 2534, 4238, 4525, 3589, 1682, 4239, 3484, 1380, 7906, 724, # 7174 + 2277, 600, 1670, 7907, 1337, 1233, 4526, 3103, 2244, 7908, 1621, 4527, 7909, 651, 4240, 7910, # 7190 + 1612, 4241, 2611, 7911, 2844, 7912, 2734, 2307, 3058, 7913, 716, 2459, 3059, 174, 1255, 2701, # 7206 + 4019, 3590, 548, 1320, 1398, 728, 4020, 1574, 7914, 1890, 1197, 3060, 4021, 7915, 3061, 3062, # 7222 + 3712, 3591, 3713, 747, 7916, 635, 4242, 4528, 7917, 7918, 7919, 4243, 7920, 7921, 4529, 7922, # 7238 + 3378, 4530, 2432, 451, 7923, 3714, 2535, 2072, 4244, 2735, 4245, 4022, 7924, 1764, 4531, 7925, # 7254 + 4246, 350, 7926, 2278, 2390, 2486, 7927, 4247, 4023, 2245, 1434, 4024, 488, 4532, 458, 4248, # 7270 + 4025, 3715, 771, 1330, 2391, 3835, 2568, 3159, 2159, 2409, 1553, 2667, 3160, 4249, 7928, 2487, # 7286 + 2881, 2612, 1720, 2702, 4250, 3379, 4533, 7929, 2536, 4251, 7930, 3231, 4252, 2768, 7931, 2015, # 7302 + 2736, 7932, 1155, 1017, 3716, 3836, 7933, 3303, 2308, 201, 1864, 4253, 1430, 7934, 4026, 7935, # 7318 + 7936, 7937, 7938, 7939, 4254, 1604, 7940, 414, 1865, 371, 2587, 4534, 4535, 3485, 2016, 3104, # 7334 + 4536, 1708, 960, 4255, 887, 389, 2171, 1536, 1663, 1721, 7941, 2228, 4027, 2351, 2926, 1580, # 7350 + 7942, 7943, 7944, 1744, 7945, 2537, 4537, 4538, 7946, 4539, 7947, 2073, 7948, 7949, 3592, 3380, # 7366 + 2882, 4256, 7950, 4257, 2640, 3381, 2802, 673, 2703, 2460, 709, 3486, 4028, 3593, 4258, 7951, # 7382 + 1148, 502, 634, 7952, 7953, 1204, 4540, 3594, 1575, 4541, 2613, 3717, 7954, 3718, 3105, 948, # 7398 + 3232, 121, 1745, 3837, 1110, 7955, 4259, 3063, 2509, 3009, 4029, 3719, 1151, 1771, 3838, 1488, # 7414 + 4030, 1986, 7956, 2433, 3487, 7957, 7958, 2093, 7959, 4260, 3839, 1213, 1407, 2803, 531, 2737, # 7430 + 2538, 3233, 1011, 1537, 7960, 2769, 4261, 3106, 1061, 7961, 3720, 3721, 1866, 2883, 7962, 2017, # 7446 + 120, 4262, 4263, 2062, 3595, 3234, 2309, 3840, 2668, 3382, 1954, 4542, 7963, 7964, 3488, 1047, # 7462 + 2704, 1266, 7965, 1368, 4543, 2845, 649, 3383, 3841, 2539, 2738, 1102, 2846, 2669, 7966, 7967, # 7478 + 1999, 7968, 1111, 3596, 2962, 7969, 2488, 3842, 3597, 2804, 1854, 3384, 3722, 7970, 7971, 3385, # 7494 + 2410, 2884, 3304, 3235, 3598, 7972, 2569, 7973, 3599, 2805, 4031, 1460, 856, 7974, 3600, 7975, # 7510 + 2885, 2963, 7976, 2886, 3843, 7977, 4264, 632, 2510, 875, 3844, 1697, 3845, 2291, 7978, 7979, # 7526 + 4544, 3010, 1239, 580, 4545, 4265, 7980, 914, 936, 2074, 1190, 4032, 1039, 2123, 7981, 7982, # 7542 + 7983, 3386, 1473, 7984, 1354, 4266, 3846, 7985, 2172, 3064, 4033, 915, 3305, 4267, 4268, 3306, # 7558 + 1605, 1834, 7986, 2739, 398, 3601, 4269, 3847, 4034, 328, 1912, 2847, 4035, 3848, 1331, 4270, # 7574 + 3011, 937, 4271, 7987, 3602, 4036, 4037, 3387, 2160, 4546, 3388, 524, 742, 538, 3065, 1012, # 7590 + 7988, 7989, 3849, 2461, 7990, 658, 1103, 225, 3850, 7991, 7992, 4547, 7993, 4548, 7994, 3236, # 7606 + 1243, 7995, 4038, 963, 2246, 4549, 7996, 2705, 3603, 3161, 7997, 7998, 2588, 2327, 7999, 4550, # 7622 + 8000, 8001, 8002, 3489, 3307, 957, 3389, 2540, 2032, 1930, 2927, 2462, 870, 2018, 3604, 1746, # 7638 + 2770, 2771, 2434, 2463, 8003, 3851, 8004, 3723, 3107, 3724, 3490, 3390, 3725, 8005, 1179, 3066, # 7654 + 8006, 3162, 2373, 4272, 3726, 2541, 3163, 3108, 2740, 4039, 8007, 3391, 1556, 2542, 2292, 977, # 7670 + 2887, 2033, 4040, 1205, 3392, 8008, 1765, 3393, 3164, 2124, 1271, 1689, 714, 4551, 3491, 8009, # 7686 + 2328, 3852, 533, 4273, 3605, 2181, 617, 8010, 2464, 3308, 3492, 2310, 8011, 8012, 3165, 8013, # 7702 + 8014, 3853, 1987, 618, 427, 2641, 3493, 3394, 8015, 8016, 1244, 1690, 8017, 2806, 4274, 4552, # 7718 + 8018, 3494, 8019, 8020, 2279, 1576, 473, 3606, 4275, 3395, 972, 8021, 3607, 8022, 3067, 8023, # 7734 + 8024, 4553, 4554, 8025, 3727, 4041, 4042, 8026, 153, 4555, 356, 8027, 1891, 2888, 4276, 2143, # 7750 + 408, 803, 2352, 8028, 3854, 8029, 4277, 1646, 2570, 2511, 4556, 4557, 3855, 8030, 3856, 4278, # 7766 + 8031, 2411, 3396, 752, 8032, 8033, 1961, 2964, 8034, 746, 3012, 2465, 8035, 4279, 3728, 698, # 7782 + 4558, 1892, 4280, 3608, 2543, 4559, 3609, 3857, 8036, 3166, 3397, 8037, 1823, 1302, 4043, 2706, # 7798 + 3858, 1973, 4281, 8038, 4282, 3167, 823, 1303, 1288, 1236, 2848, 3495, 4044, 3398, 774, 3859, # 7814 + 8039, 1581, 4560, 1304, 2849, 3860, 4561, 8040, 2435, 2161, 1083, 3237, 4283, 4045, 4284, 344, # 7830 + 1173, 288, 2311, 454, 1683, 8041, 8042, 1461, 4562, 4046, 2589, 8043, 8044, 4563, 985, 894, # 7846 + 8045, 3399, 3168, 8046, 1913, 2928, 3729, 1988, 8047, 2110, 1974, 8048, 4047, 8049, 2571, 1194, # 7862 + 425, 8050, 4564, 3169, 1245, 3730, 4285, 8051, 8052, 2850, 8053, 636, 4565, 1855, 3861, 760, # 7878 + 1799, 8054, 4286, 2209, 1508, 4566, 4048, 1893, 1684, 2293, 8055, 8056, 8057, 4287, 4288, 2210, # 7894 + 479, 8058, 8059, 832, 8060, 4049, 2489, 8061, 2965, 2490, 3731, 990, 3109, 627, 1814, 2642, # 7910 + 4289, 1582, 4290, 2125, 2111, 3496, 4567, 8062, 799, 4291, 3170, 8063, 4568, 2112, 1737, 3013, # 7926 + 1018, 543, 754, 4292, 3309, 1676, 4569, 4570, 4050, 8064, 1489, 8065, 3497, 8066, 2614, 2889, # 7942 + 4051, 8067, 8068, 2966, 8069, 8070, 8071, 8072, 3171, 4571, 4572, 2182, 1722, 8073, 3238, 3239, # 7958 + 1842, 3610, 1715, 481, 365, 1975, 1856, 8074, 8075, 1962, 2491, 4573, 8076, 2126, 3611, 3240, # 7974 + 433, 1894, 2063, 2075, 8077, 602, 2741, 8078, 8079, 8080, 8081, 8082, 3014, 1628, 3400, 8083, # 7990 + 3172, 4574, 4052, 2890, 4575, 2512, 8084, 2544, 2772, 8085, 8086, 8087, 3310, 4576, 2891, 8088, # 8006 + 4577, 8089, 2851, 4578, 4579, 1221, 2967, 4053, 2513, 8090, 8091, 8092, 1867, 1989, 8093, 8094, # 8022 + 8095, 1895, 8096, 8097, 4580, 1896, 4054, 318, 8098, 2094, 4055, 4293, 8099, 8100, 485, 8101, # 8038 + 938, 3862, 553, 2670, 116, 8102, 3863, 3612, 8103, 3498, 2671, 2773, 3401, 3311, 2807, 8104, # 8054 + 3613, 2929, 4056, 1747, 2930, 2968, 8105, 8106, 207, 8107, 8108, 2672, 4581, 2514, 8109, 3015, # 8070 + 890, 3614, 3864, 8110, 1877, 3732, 3402, 8111, 2183, 2353, 3403, 1652, 8112, 8113, 8114, 941, # 8086 + 2294, 208, 3499, 4057, 2019, 330, 4294, 3865, 2892, 2492, 3733, 4295, 8115, 8116, 8117, 8118, # 8102 +) +# fmt: on diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwprober.py new file mode 100644 index 0000000..a37ab18 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/euctwprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCTWDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import EUCTW_SM_MODEL + + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self) -> None: + super().__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self) -> str: + return "EUC-TW" + + @property + def language(self) -> str: + return "Taiwan" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312freq.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312freq.py new file mode 100644 index 0000000..b32bfc7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312freq.py @@ -0,0 +1,284 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +# fmt: off +GB2312_CHAR_TO_FREQ_ORDER = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 +) +# fmt: on diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312prober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312prober.py new file mode 100644 index 0000000..d423e73 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/gb2312prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import GB2312DistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import GB2312_SM_MODEL + + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self) -> None: + super().__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self) -> str: + return "GB2312" + + @property + def language(self) -> str: + return "Chinese" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/hebrewprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/hebrewprober.py new file mode 100644 index 0000000..785d005 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/hebrewprober.py @@ -0,0 +1,316 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import Optional, Union + +from .charsetprober import CharSetProber +from .enums import ProbingState +from .sbcharsetprober import SingleByteCharSetProber + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + + +class HebrewProber(CharSetProber): + SPACE = 0x20 + # windows-1255 / ISO-8859-8 code points of interest + FINAL_KAF = 0xEA + NORMAL_KAF = 0xEB + FINAL_MEM = 0xED + NORMAL_MEM = 0xEE + FINAL_NUN = 0xEF + NORMAL_NUN = 0xF0 + FINAL_PE = 0xF3 + NORMAL_PE = 0xF4 + FINAL_TSADI = 0xF5 + NORMAL_TSADI = 0xF6 + + # Minimum Visual vs Logical final letter score difference. + # If the difference is below this, don't rely solely on the final letter score + # distance. + MIN_FINAL_CHAR_DISTANCE = 5 + + # Minimum Visual vs Logical model score difference. + # If the difference is below this, don't rely at all on the model score + # distance. + MIN_MODEL_DISTANCE = 0.01 + + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self) -> None: + super().__init__() + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + self._prev = self.SPACE + self._before_prev = self.SPACE + self._logical_prober: Optional[SingleByteCharSetProber] = None + self._visual_prober: Optional[SingleByteCharSetProber] = None + self.reset() + + def reset(self) -> None: + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._prev = self.SPACE + self._before_prev = self.SPACE + # These probers are owned by the group prober. + + def set_model_probers( + self, + logical_prober: SingleByteCharSetProber, + visual_prober: SingleByteCharSetProber, + ) -> None: + self._logical_prober = logical_prober + self._visual_prober = visual_prober + + def is_final(self, c: int) -> bool: + return c in [ + self.FINAL_KAF, + self.FINAL_MEM, + self.FINAL_NUN, + self.FINAL_PE, + self.FINAL_TSADI, + ] + + def is_non_final(self, c: int) -> bool: + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return c in [self.NORMAL_KAF, self.NORMAL_MEM, self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.state == ProbingState.NOT_ME: + # Both model probers say it's not them. No reason to continue. + return ProbingState.NOT_ME + + byte_str = self.filter_high_byte_only(byte_str) + + for cur in byte_str: + if cur == self.SPACE: + # We stand on a space - a word just ended + if self._before_prev != self.SPACE: + # next-to-last char was not a space so self._prev is not a + # 1 letter word + if self.is_final(self._prev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._final_char_visual_score += 1 + else: + # Not standing on a space + if ( + (self._before_prev == self.SPACE) + and (self.is_final(self._prev)) + and (cur != self.SPACE) + ): + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + # Forever detecting, till the end or until both model probers return + # ProbingState.NOT_ME (handled above) + return ProbingState.DETECTING + + @property + def charset_name(self) -> str: + assert self._logical_prober is not None + assert self._visual_prober is not None + + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = ( + self._logical_prober.get_confidence() - self._visual_prober.get_confidence() + ) + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return self.LOGICAL_HEBREW_NAME + + @property + def language(self) -> str: + return "Hebrew" + + @property + def state(self) -> ProbingState: + assert self._logical_prober is not None + assert self._visual_prober is not None + + # Remain active as long as any of the model probers are active. + if (self._logical_prober.state == ProbingState.NOT_ME) and ( + self._visual_prober.state == ProbingState.NOT_ME + ): + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/jisfreq.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/jisfreq.py new file mode 100644 index 0000000..3293576 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/jisfreq.py @@ -0,0 +1,325 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +# fmt: off +JIS_CHAR_TO_FREQ_ORDER = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +) +# fmt: on diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/johabfreq.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/johabfreq.py new file mode 100644 index 0000000..c129699 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/johabfreq.py @@ -0,0 +1,2382 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# The frequency data itself is the same as euc-kr. +# This is just a mapping table to euc-kr. + +JOHAB_TO_EUCKR_ORDER_TABLE = { + 0x8861: 0, + 0x8862: 1, + 0x8865: 2, + 0x8868: 3, + 0x8869: 4, + 0x886A: 5, + 0x886B: 6, + 0x8871: 7, + 0x8873: 8, + 0x8874: 9, + 0x8875: 10, + 0x8876: 11, + 0x8877: 12, + 0x8878: 13, + 0x8879: 14, + 0x887B: 15, + 0x887C: 16, + 0x887D: 17, + 0x8881: 18, + 0x8882: 19, + 0x8885: 20, + 0x8889: 21, + 0x8891: 22, + 0x8893: 23, + 0x8895: 24, + 0x8896: 25, + 0x8897: 26, + 0x88A1: 27, + 0x88A2: 28, + 0x88A5: 29, + 0x88A9: 30, + 0x88B5: 31, + 0x88B7: 32, + 0x88C1: 33, + 0x88C5: 34, + 0x88C9: 35, + 0x88E1: 36, + 0x88E2: 37, + 0x88E5: 38, + 0x88E8: 39, + 0x88E9: 40, + 0x88EB: 41, + 0x88F1: 42, + 0x88F3: 43, + 0x88F5: 44, + 0x88F6: 45, + 0x88F7: 46, + 0x88F8: 47, + 0x88FB: 48, + 0x88FC: 49, + 0x88FD: 50, + 0x8941: 51, + 0x8945: 52, + 0x8949: 53, + 0x8951: 54, + 0x8953: 55, + 0x8955: 56, + 0x8956: 57, + 0x8957: 58, + 0x8961: 59, + 0x8962: 60, + 0x8963: 61, + 0x8965: 62, + 0x8968: 63, + 0x8969: 64, + 0x8971: 65, + 0x8973: 66, + 0x8975: 67, + 0x8976: 68, + 0x8977: 69, + 0x897B: 70, + 0x8981: 71, + 0x8985: 72, + 0x8989: 73, + 0x8993: 74, + 0x8995: 75, + 0x89A1: 76, + 0x89A2: 77, + 0x89A5: 78, + 0x89A8: 79, + 0x89A9: 80, + 0x89AB: 81, + 0x89AD: 82, + 0x89B0: 83, + 0x89B1: 84, + 0x89B3: 85, + 0x89B5: 86, + 0x89B7: 87, + 0x89B8: 88, + 0x89C1: 89, + 0x89C2: 90, + 0x89C5: 91, + 0x89C9: 92, + 0x89CB: 93, + 0x89D1: 94, + 0x89D3: 95, + 0x89D5: 96, + 0x89D7: 97, + 0x89E1: 98, + 0x89E5: 99, + 0x89E9: 100, + 0x89F3: 101, + 0x89F6: 102, + 0x89F7: 103, + 0x8A41: 104, + 0x8A42: 105, + 0x8A45: 106, + 0x8A49: 107, + 0x8A51: 108, + 0x8A53: 109, + 0x8A55: 110, + 0x8A57: 111, + 0x8A61: 112, + 0x8A65: 113, + 0x8A69: 114, + 0x8A73: 115, + 0x8A75: 116, + 0x8A81: 117, + 0x8A82: 118, + 0x8A85: 119, + 0x8A88: 120, + 0x8A89: 121, + 0x8A8A: 122, + 0x8A8B: 123, + 0x8A90: 124, + 0x8A91: 125, + 0x8A93: 126, + 0x8A95: 127, + 0x8A97: 128, + 0x8A98: 129, + 0x8AA1: 130, + 0x8AA2: 131, + 0x8AA5: 132, + 0x8AA9: 133, + 0x8AB6: 134, + 0x8AB7: 135, + 0x8AC1: 136, + 0x8AD5: 137, + 0x8AE1: 138, + 0x8AE2: 139, + 0x8AE5: 140, + 0x8AE9: 141, + 0x8AF1: 142, + 0x8AF3: 143, + 0x8AF5: 144, + 0x8B41: 145, + 0x8B45: 146, + 0x8B49: 147, + 0x8B61: 148, + 0x8B62: 149, + 0x8B65: 150, + 0x8B68: 151, + 0x8B69: 152, + 0x8B6A: 153, + 0x8B71: 154, + 0x8B73: 155, + 0x8B75: 156, + 0x8B77: 157, + 0x8B81: 158, + 0x8BA1: 159, + 0x8BA2: 160, + 0x8BA5: 161, + 0x8BA8: 162, + 0x8BA9: 163, + 0x8BAB: 164, + 0x8BB1: 165, + 0x8BB3: 166, + 0x8BB5: 167, + 0x8BB7: 168, + 0x8BB8: 169, + 0x8BBC: 170, + 0x8C61: 171, + 0x8C62: 172, + 0x8C63: 173, + 0x8C65: 174, + 0x8C69: 175, + 0x8C6B: 176, + 0x8C71: 177, + 0x8C73: 178, + 0x8C75: 179, + 0x8C76: 180, + 0x8C77: 181, + 0x8C7B: 182, + 0x8C81: 183, + 0x8C82: 184, + 0x8C85: 185, + 0x8C89: 186, + 0x8C91: 187, + 0x8C93: 188, + 0x8C95: 189, + 0x8C96: 190, + 0x8C97: 191, + 0x8CA1: 192, + 0x8CA2: 193, + 0x8CA9: 194, + 0x8CE1: 195, + 0x8CE2: 196, + 0x8CE3: 197, + 0x8CE5: 198, + 0x8CE9: 199, + 0x8CF1: 200, + 0x8CF3: 201, + 0x8CF5: 202, + 0x8CF6: 203, + 0x8CF7: 204, + 0x8D41: 205, + 0x8D42: 206, + 0x8D45: 207, + 0x8D51: 208, + 0x8D55: 209, + 0x8D57: 210, + 0x8D61: 211, + 0x8D65: 212, + 0x8D69: 213, + 0x8D75: 214, + 0x8D76: 215, + 0x8D7B: 216, + 0x8D81: 217, + 0x8DA1: 218, + 0x8DA2: 219, + 0x8DA5: 220, + 0x8DA7: 221, + 0x8DA9: 222, + 0x8DB1: 223, + 0x8DB3: 224, + 0x8DB5: 225, + 0x8DB7: 226, + 0x8DB8: 227, + 0x8DB9: 228, + 0x8DC1: 229, + 0x8DC2: 230, + 0x8DC9: 231, + 0x8DD6: 232, + 0x8DD7: 233, + 0x8DE1: 234, + 0x8DE2: 235, + 0x8DF7: 236, + 0x8E41: 237, + 0x8E45: 238, + 0x8E49: 239, + 0x8E51: 240, + 0x8E53: 241, + 0x8E57: 242, + 0x8E61: 243, + 0x8E81: 244, + 0x8E82: 245, + 0x8E85: 246, + 0x8E89: 247, + 0x8E90: 248, + 0x8E91: 249, + 0x8E93: 250, + 0x8E95: 251, + 0x8E97: 252, + 0x8E98: 253, + 0x8EA1: 254, + 0x8EA9: 255, + 0x8EB6: 256, + 0x8EB7: 257, + 0x8EC1: 258, + 0x8EC2: 259, + 0x8EC5: 260, + 0x8EC9: 261, + 0x8ED1: 262, + 0x8ED3: 263, + 0x8ED6: 264, + 0x8EE1: 265, + 0x8EE5: 266, + 0x8EE9: 267, + 0x8EF1: 268, + 0x8EF3: 269, + 0x8F41: 270, + 0x8F61: 271, + 0x8F62: 272, + 0x8F65: 273, + 0x8F67: 274, + 0x8F69: 275, + 0x8F6B: 276, + 0x8F70: 277, + 0x8F71: 278, + 0x8F73: 279, + 0x8F75: 280, + 0x8F77: 281, + 0x8F7B: 282, + 0x8FA1: 283, + 0x8FA2: 284, + 0x8FA5: 285, + 0x8FA9: 286, + 0x8FB1: 287, + 0x8FB3: 288, + 0x8FB5: 289, + 0x8FB7: 290, + 0x9061: 291, + 0x9062: 292, + 0x9063: 293, + 0x9065: 294, + 0x9068: 295, + 0x9069: 296, + 0x906A: 297, + 0x906B: 298, + 0x9071: 299, + 0x9073: 300, + 0x9075: 301, + 0x9076: 302, + 0x9077: 303, + 0x9078: 304, + 0x9079: 305, + 0x907B: 306, + 0x907D: 307, + 0x9081: 308, + 0x9082: 309, + 0x9085: 310, + 0x9089: 311, + 0x9091: 312, + 0x9093: 313, + 0x9095: 314, + 0x9096: 315, + 0x9097: 316, + 0x90A1: 317, + 0x90A2: 318, + 0x90A5: 319, + 0x90A9: 320, + 0x90B1: 321, + 0x90B7: 322, + 0x90E1: 323, + 0x90E2: 324, + 0x90E4: 325, + 0x90E5: 326, + 0x90E9: 327, + 0x90EB: 328, + 0x90EC: 329, + 0x90F1: 330, + 0x90F3: 331, + 0x90F5: 332, + 0x90F6: 333, + 0x90F7: 334, + 0x90FD: 335, + 0x9141: 336, + 0x9142: 337, + 0x9145: 338, + 0x9149: 339, + 0x9151: 340, + 0x9153: 341, + 0x9155: 342, + 0x9156: 343, + 0x9157: 344, + 0x9161: 345, + 0x9162: 346, + 0x9165: 347, + 0x9169: 348, + 0x9171: 349, + 0x9173: 350, + 0x9176: 351, + 0x9177: 352, + 0x917A: 353, + 0x9181: 354, + 0x9185: 355, + 0x91A1: 356, + 0x91A2: 357, + 0x91A5: 358, + 0x91A9: 359, + 0x91AB: 360, + 0x91B1: 361, + 0x91B3: 362, + 0x91B5: 363, + 0x91B7: 364, + 0x91BC: 365, + 0x91BD: 366, + 0x91C1: 367, + 0x91C5: 368, + 0x91C9: 369, + 0x91D6: 370, + 0x9241: 371, + 0x9245: 372, + 0x9249: 373, + 0x9251: 374, + 0x9253: 375, + 0x9255: 376, + 0x9261: 377, + 0x9262: 378, + 0x9265: 379, + 0x9269: 380, + 0x9273: 381, + 0x9275: 382, + 0x9277: 383, + 0x9281: 384, + 0x9282: 385, + 0x9285: 386, + 0x9288: 387, + 0x9289: 388, + 0x9291: 389, + 0x9293: 390, + 0x9295: 391, + 0x9297: 392, + 0x92A1: 393, + 0x92B6: 394, + 0x92C1: 395, + 0x92E1: 396, + 0x92E5: 397, + 0x92E9: 398, + 0x92F1: 399, + 0x92F3: 400, + 0x9341: 401, + 0x9342: 402, + 0x9349: 403, + 0x9351: 404, + 0x9353: 405, + 0x9357: 406, + 0x9361: 407, + 0x9362: 408, + 0x9365: 409, + 0x9369: 410, + 0x936A: 411, + 0x936B: 412, + 0x9371: 413, + 0x9373: 414, + 0x9375: 415, + 0x9377: 416, + 0x9378: 417, + 0x937C: 418, + 0x9381: 419, + 0x9385: 420, + 0x9389: 421, + 0x93A1: 422, + 0x93A2: 423, + 0x93A5: 424, + 0x93A9: 425, + 0x93AB: 426, + 0x93B1: 427, + 0x93B3: 428, + 0x93B5: 429, + 0x93B7: 430, + 0x93BC: 431, + 0x9461: 432, + 0x9462: 433, + 0x9463: 434, + 0x9465: 435, + 0x9468: 436, + 0x9469: 437, + 0x946A: 438, + 0x946B: 439, + 0x946C: 440, + 0x9470: 441, + 0x9471: 442, + 0x9473: 443, + 0x9475: 444, + 0x9476: 445, + 0x9477: 446, + 0x9478: 447, + 0x9479: 448, + 0x947D: 449, + 0x9481: 450, + 0x9482: 451, + 0x9485: 452, + 0x9489: 453, + 0x9491: 454, + 0x9493: 455, + 0x9495: 456, + 0x9496: 457, + 0x9497: 458, + 0x94A1: 459, + 0x94E1: 460, + 0x94E2: 461, + 0x94E3: 462, + 0x94E5: 463, + 0x94E8: 464, + 0x94E9: 465, + 0x94EB: 466, + 0x94EC: 467, + 0x94F1: 468, + 0x94F3: 469, + 0x94F5: 470, + 0x94F7: 471, + 0x94F9: 472, + 0x94FC: 473, + 0x9541: 474, + 0x9542: 475, + 0x9545: 476, + 0x9549: 477, + 0x9551: 478, + 0x9553: 479, + 0x9555: 480, + 0x9556: 481, + 0x9557: 482, + 0x9561: 483, + 0x9565: 484, + 0x9569: 485, + 0x9576: 486, + 0x9577: 487, + 0x9581: 488, + 0x9585: 489, + 0x95A1: 490, + 0x95A2: 491, + 0x95A5: 492, + 0x95A8: 493, + 0x95A9: 494, + 0x95AB: 495, + 0x95AD: 496, + 0x95B1: 497, + 0x95B3: 498, + 0x95B5: 499, + 0x95B7: 500, + 0x95B9: 501, + 0x95BB: 502, + 0x95C1: 503, + 0x95C5: 504, + 0x95C9: 505, + 0x95E1: 506, + 0x95F6: 507, + 0x9641: 508, + 0x9645: 509, + 0x9649: 510, + 0x9651: 511, + 0x9653: 512, + 0x9655: 513, + 0x9661: 514, + 0x9681: 515, + 0x9682: 516, + 0x9685: 517, + 0x9689: 518, + 0x9691: 519, + 0x9693: 520, + 0x9695: 521, + 0x9697: 522, + 0x96A1: 523, + 0x96B6: 524, + 0x96C1: 525, + 0x96D7: 526, + 0x96E1: 527, + 0x96E5: 528, + 0x96E9: 529, + 0x96F3: 530, + 0x96F5: 531, + 0x96F7: 532, + 0x9741: 533, + 0x9745: 534, + 0x9749: 535, + 0x9751: 536, + 0x9757: 537, + 0x9761: 538, + 0x9762: 539, + 0x9765: 540, + 0x9768: 541, + 0x9769: 542, + 0x976B: 543, + 0x9771: 544, + 0x9773: 545, + 0x9775: 546, + 0x9777: 547, + 0x9781: 548, + 0x97A1: 549, + 0x97A2: 550, + 0x97A5: 551, + 0x97A8: 552, + 0x97A9: 553, + 0x97B1: 554, + 0x97B3: 555, + 0x97B5: 556, + 0x97B6: 557, + 0x97B7: 558, + 0x97B8: 559, + 0x9861: 560, + 0x9862: 561, + 0x9865: 562, + 0x9869: 563, + 0x9871: 564, + 0x9873: 565, + 0x9875: 566, + 0x9876: 567, + 0x9877: 568, + 0x987D: 569, + 0x9881: 570, + 0x9882: 571, + 0x9885: 572, + 0x9889: 573, + 0x9891: 574, + 0x9893: 575, + 0x9895: 576, + 0x9896: 577, + 0x9897: 578, + 0x98E1: 579, + 0x98E2: 580, + 0x98E5: 581, + 0x98E9: 582, + 0x98EB: 583, + 0x98EC: 584, + 0x98F1: 585, + 0x98F3: 586, + 0x98F5: 587, + 0x98F6: 588, + 0x98F7: 589, + 0x98FD: 590, + 0x9941: 591, + 0x9942: 592, + 0x9945: 593, + 0x9949: 594, + 0x9951: 595, + 0x9953: 596, + 0x9955: 597, + 0x9956: 598, + 0x9957: 599, + 0x9961: 600, + 0x9976: 601, + 0x99A1: 602, + 0x99A2: 603, + 0x99A5: 604, + 0x99A9: 605, + 0x99B7: 606, + 0x99C1: 607, + 0x99C9: 608, + 0x99E1: 609, + 0x9A41: 610, + 0x9A45: 611, + 0x9A81: 612, + 0x9A82: 613, + 0x9A85: 614, + 0x9A89: 615, + 0x9A90: 616, + 0x9A91: 617, + 0x9A97: 618, + 0x9AC1: 619, + 0x9AE1: 620, + 0x9AE5: 621, + 0x9AE9: 622, + 0x9AF1: 623, + 0x9AF3: 624, + 0x9AF7: 625, + 0x9B61: 626, + 0x9B62: 627, + 0x9B65: 628, + 0x9B68: 629, + 0x9B69: 630, + 0x9B71: 631, + 0x9B73: 632, + 0x9B75: 633, + 0x9B81: 634, + 0x9B85: 635, + 0x9B89: 636, + 0x9B91: 637, + 0x9B93: 638, + 0x9BA1: 639, + 0x9BA5: 640, + 0x9BA9: 641, + 0x9BB1: 642, + 0x9BB3: 643, + 0x9BB5: 644, + 0x9BB7: 645, + 0x9C61: 646, + 0x9C62: 647, + 0x9C65: 648, + 0x9C69: 649, + 0x9C71: 650, + 0x9C73: 651, + 0x9C75: 652, + 0x9C76: 653, + 0x9C77: 654, + 0x9C78: 655, + 0x9C7C: 656, + 0x9C7D: 657, + 0x9C81: 658, + 0x9C82: 659, + 0x9C85: 660, + 0x9C89: 661, + 0x9C91: 662, + 0x9C93: 663, + 0x9C95: 664, + 0x9C96: 665, + 0x9C97: 666, + 0x9CA1: 667, + 0x9CA2: 668, + 0x9CA5: 669, + 0x9CB5: 670, + 0x9CB7: 671, + 0x9CE1: 672, + 0x9CE2: 673, + 0x9CE5: 674, + 0x9CE9: 675, + 0x9CF1: 676, + 0x9CF3: 677, + 0x9CF5: 678, + 0x9CF6: 679, + 0x9CF7: 680, + 0x9CFD: 681, + 0x9D41: 682, + 0x9D42: 683, + 0x9D45: 684, + 0x9D49: 685, + 0x9D51: 686, + 0x9D53: 687, + 0x9D55: 688, + 0x9D57: 689, + 0x9D61: 690, + 0x9D62: 691, + 0x9D65: 692, + 0x9D69: 693, + 0x9D71: 694, + 0x9D73: 695, + 0x9D75: 696, + 0x9D76: 697, + 0x9D77: 698, + 0x9D81: 699, + 0x9D85: 700, + 0x9D93: 701, + 0x9D95: 702, + 0x9DA1: 703, + 0x9DA2: 704, + 0x9DA5: 705, + 0x9DA9: 706, + 0x9DB1: 707, + 0x9DB3: 708, + 0x9DB5: 709, + 0x9DB7: 710, + 0x9DC1: 711, + 0x9DC5: 712, + 0x9DD7: 713, + 0x9DF6: 714, + 0x9E41: 715, + 0x9E45: 716, + 0x9E49: 717, + 0x9E51: 718, + 0x9E53: 719, + 0x9E55: 720, + 0x9E57: 721, + 0x9E61: 722, + 0x9E65: 723, + 0x9E69: 724, + 0x9E73: 725, + 0x9E75: 726, + 0x9E77: 727, + 0x9E81: 728, + 0x9E82: 729, + 0x9E85: 730, + 0x9E89: 731, + 0x9E91: 732, + 0x9E93: 733, + 0x9E95: 734, + 0x9E97: 735, + 0x9EA1: 736, + 0x9EB6: 737, + 0x9EC1: 738, + 0x9EE1: 739, + 0x9EE2: 740, + 0x9EE5: 741, + 0x9EE9: 742, + 0x9EF1: 743, + 0x9EF5: 744, + 0x9EF7: 745, + 0x9F41: 746, + 0x9F42: 747, + 0x9F45: 748, + 0x9F49: 749, + 0x9F51: 750, + 0x9F53: 751, + 0x9F55: 752, + 0x9F57: 753, + 0x9F61: 754, + 0x9F62: 755, + 0x9F65: 756, + 0x9F69: 757, + 0x9F71: 758, + 0x9F73: 759, + 0x9F75: 760, + 0x9F77: 761, + 0x9F78: 762, + 0x9F7B: 763, + 0x9F7C: 764, + 0x9FA1: 765, + 0x9FA2: 766, + 0x9FA5: 767, + 0x9FA9: 768, + 0x9FB1: 769, + 0x9FB3: 770, + 0x9FB5: 771, + 0x9FB7: 772, + 0xA061: 773, + 0xA062: 774, + 0xA065: 775, + 0xA067: 776, + 0xA068: 777, + 0xA069: 778, + 0xA06A: 779, + 0xA06B: 780, + 0xA071: 781, + 0xA073: 782, + 0xA075: 783, + 0xA077: 784, + 0xA078: 785, + 0xA07B: 786, + 0xA07D: 787, + 0xA081: 788, + 0xA082: 789, + 0xA085: 790, + 0xA089: 791, + 0xA091: 792, + 0xA093: 793, + 0xA095: 794, + 0xA096: 795, + 0xA097: 796, + 0xA098: 797, + 0xA0A1: 798, + 0xA0A2: 799, + 0xA0A9: 800, + 0xA0B7: 801, + 0xA0E1: 802, + 0xA0E2: 803, + 0xA0E5: 804, + 0xA0E9: 805, + 0xA0EB: 806, + 0xA0F1: 807, + 0xA0F3: 808, + 0xA0F5: 809, + 0xA0F7: 810, + 0xA0F8: 811, + 0xA0FD: 812, + 0xA141: 813, + 0xA142: 814, + 0xA145: 815, + 0xA149: 816, + 0xA151: 817, + 0xA153: 818, + 0xA155: 819, + 0xA156: 820, + 0xA157: 821, + 0xA161: 822, + 0xA162: 823, + 0xA165: 824, + 0xA169: 825, + 0xA175: 826, + 0xA176: 827, + 0xA177: 828, + 0xA179: 829, + 0xA181: 830, + 0xA1A1: 831, + 0xA1A2: 832, + 0xA1A4: 833, + 0xA1A5: 834, + 0xA1A9: 835, + 0xA1AB: 836, + 0xA1B1: 837, + 0xA1B3: 838, + 0xA1B5: 839, + 0xA1B7: 840, + 0xA1C1: 841, + 0xA1C5: 842, + 0xA1D6: 843, + 0xA1D7: 844, + 0xA241: 845, + 0xA245: 846, + 0xA249: 847, + 0xA253: 848, + 0xA255: 849, + 0xA257: 850, + 0xA261: 851, + 0xA265: 852, + 0xA269: 853, + 0xA273: 854, + 0xA275: 855, + 0xA281: 856, + 0xA282: 857, + 0xA283: 858, + 0xA285: 859, + 0xA288: 860, + 0xA289: 861, + 0xA28A: 862, + 0xA28B: 863, + 0xA291: 864, + 0xA293: 865, + 0xA295: 866, + 0xA297: 867, + 0xA29B: 868, + 0xA29D: 869, + 0xA2A1: 870, + 0xA2A5: 871, + 0xA2A9: 872, + 0xA2B3: 873, + 0xA2B5: 874, + 0xA2C1: 875, + 0xA2E1: 876, + 0xA2E5: 877, + 0xA2E9: 878, + 0xA341: 879, + 0xA345: 880, + 0xA349: 881, + 0xA351: 882, + 0xA355: 883, + 0xA361: 884, + 0xA365: 885, + 0xA369: 886, + 0xA371: 887, + 0xA375: 888, + 0xA3A1: 889, + 0xA3A2: 890, + 0xA3A5: 891, + 0xA3A8: 892, + 0xA3A9: 893, + 0xA3AB: 894, + 0xA3B1: 895, + 0xA3B3: 896, + 0xA3B5: 897, + 0xA3B6: 898, + 0xA3B7: 899, + 0xA3B9: 900, + 0xA3BB: 901, + 0xA461: 902, + 0xA462: 903, + 0xA463: 904, + 0xA464: 905, + 0xA465: 906, + 0xA468: 907, + 0xA469: 908, + 0xA46A: 909, + 0xA46B: 910, + 0xA46C: 911, + 0xA471: 912, + 0xA473: 913, + 0xA475: 914, + 0xA477: 915, + 0xA47B: 916, + 0xA481: 917, + 0xA482: 918, + 0xA485: 919, + 0xA489: 920, + 0xA491: 921, + 0xA493: 922, + 0xA495: 923, + 0xA496: 924, + 0xA497: 925, + 0xA49B: 926, + 0xA4A1: 927, + 0xA4A2: 928, + 0xA4A5: 929, + 0xA4B3: 930, + 0xA4E1: 931, + 0xA4E2: 932, + 0xA4E5: 933, + 0xA4E8: 934, + 0xA4E9: 935, + 0xA4EB: 936, + 0xA4F1: 937, + 0xA4F3: 938, + 0xA4F5: 939, + 0xA4F7: 940, + 0xA4F8: 941, + 0xA541: 942, + 0xA542: 943, + 0xA545: 944, + 0xA548: 945, + 0xA549: 946, + 0xA551: 947, + 0xA553: 948, + 0xA555: 949, + 0xA556: 950, + 0xA557: 951, + 0xA561: 952, + 0xA562: 953, + 0xA565: 954, + 0xA569: 955, + 0xA573: 956, + 0xA575: 957, + 0xA576: 958, + 0xA577: 959, + 0xA57B: 960, + 0xA581: 961, + 0xA585: 962, + 0xA5A1: 963, + 0xA5A2: 964, + 0xA5A3: 965, + 0xA5A5: 966, + 0xA5A9: 967, + 0xA5B1: 968, + 0xA5B3: 969, + 0xA5B5: 970, + 0xA5B7: 971, + 0xA5C1: 972, + 0xA5C5: 973, + 0xA5D6: 974, + 0xA5E1: 975, + 0xA5F6: 976, + 0xA641: 977, + 0xA642: 978, + 0xA645: 979, + 0xA649: 980, + 0xA651: 981, + 0xA653: 982, + 0xA661: 983, + 0xA665: 984, + 0xA681: 985, + 0xA682: 986, + 0xA685: 987, + 0xA688: 988, + 0xA689: 989, + 0xA68A: 990, + 0xA68B: 991, + 0xA691: 992, + 0xA693: 993, + 0xA695: 994, + 0xA697: 995, + 0xA69B: 996, + 0xA69C: 997, + 0xA6A1: 998, + 0xA6A9: 999, + 0xA6B6: 1000, + 0xA6C1: 1001, + 0xA6E1: 1002, + 0xA6E2: 1003, + 0xA6E5: 1004, + 0xA6E9: 1005, + 0xA6F7: 1006, + 0xA741: 1007, + 0xA745: 1008, + 0xA749: 1009, + 0xA751: 1010, + 0xA755: 1011, + 0xA757: 1012, + 0xA761: 1013, + 0xA762: 1014, + 0xA765: 1015, + 0xA769: 1016, + 0xA771: 1017, + 0xA773: 1018, + 0xA775: 1019, + 0xA7A1: 1020, + 0xA7A2: 1021, + 0xA7A5: 1022, + 0xA7A9: 1023, + 0xA7AB: 1024, + 0xA7B1: 1025, + 0xA7B3: 1026, + 0xA7B5: 1027, + 0xA7B7: 1028, + 0xA7B8: 1029, + 0xA7B9: 1030, + 0xA861: 1031, + 0xA862: 1032, + 0xA865: 1033, + 0xA869: 1034, + 0xA86B: 1035, + 0xA871: 1036, + 0xA873: 1037, + 0xA875: 1038, + 0xA876: 1039, + 0xA877: 1040, + 0xA87D: 1041, + 0xA881: 1042, + 0xA882: 1043, + 0xA885: 1044, + 0xA889: 1045, + 0xA891: 1046, + 0xA893: 1047, + 0xA895: 1048, + 0xA896: 1049, + 0xA897: 1050, + 0xA8A1: 1051, + 0xA8A2: 1052, + 0xA8B1: 1053, + 0xA8E1: 1054, + 0xA8E2: 1055, + 0xA8E5: 1056, + 0xA8E8: 1057, + 0xA8E9: 1058, + 0xA8F1: 1059, + 0xA8F5: 1060, + 0xA8F6: 1061, + 0xA8F7: 1062, + 0xA941: 1063, + 0xA957: 1064, + 0xA961: 1065, + 0xA962: 1066, + 0xA971: 1067, + 0xA973: 1068, + 0xA975: 1069, + 0xA976: 1070, + 0xA977: 1071, + 0xA9A1: 1072, + 0xA9A2: 1073, + 0xA9A5: 1074, + 0xA9A9: 1075, + 0xA9B1: 1076, + 0xA9B3: 1077, + 0xA9B7: 1078, + 0xAA41: 1079, + 0xAA61: 1080, + 0xAA77: 1081, + 0xAA81: 1082, + 0xAA82: 1083, + 0xAA85: 1084, + 0xAA89: 1085, + 0xAA91: 1086, + 0xAA95: 1087, + 0xAA97: 1088, + 0xAB41: 1089, + 0xAB57: 1090, + 0xAB61: 1091, + 0xAB65: 1092, + 0xAB69: 1093, + 0xAB71: 1094, + 0xAB73: 1095, + 0xABA1: 1096, + 0xABA2: 1097, + 0xABA5: 1098, + 0xABA9: 1099, + 0xABB1: 1100, + 0xABB3: 1101, + 0xABB5: 1102, + 0xABB7: 1103, + 0xAC61: 1104, + 0xAC62: 1105, + 0xAC64: 1106, + 0xAC65: 1107, + 0xAC68: 1108, + 0xAC69: 1109, + 0xAC6A: 1110, + 0xAC6B: 1111, + 0xAC71: 1112, + 0xAC73: 1113, + 0xAC75: 1114, + 0xAC76: 1115, + 0xAC77: 1116, + 0xAC7B: 1117, + 0xAC81: 1118, + 0xAC82: 1119, + 0xAC85: 1120, + 0xAC89: 1121, + 0xAC91: 1122, + 0xAC93: 1123, + 0xAC95: 1124, + 0xAC96: 1125, + 0xAC97: 1126, + 0xACA1: 1127, + 0xACA2: 1128, + 0xACA5: 1129, + 0xACA9: 1130, + 0xACB1: 1131, + 0xACB3: 1132, + 0xACB5: 1133, + 0xACB7: 1134, + 0xACC1: 1135, + 0xACC5: 1136, + 0xACC9: 1137, + 0xACD1: 1138, + 0xACD7: 1139, + 0xACE1: 1140, + 0xACE2: 1141, + 0xACE3: 1142, + 0xACE4: 1143, + 0xACE5: 1144, + 0xACE8: 1145, + 0xACE9: 1146, + 0xACEB: 1147, + 0xACEC: 1148, + 0xACF1: 1149, + 0xACF3: 1150, + 0xACF5: 1151, + 0xACF6: 1152, + 0xACF7: 1153, + 0xACFC: 1154, + 0xAD41: 1155, + 0xAD42: 1156, + 0xAD45: 1157, + 0xAD49: 1158, + 0xAD51: 1159, + 0xAD53: 1160, + 0xAD55: 1161, + 0xAD56: 1162, + 0xAD57: 1163, + 0xAD61: 1164, + 0xAD62: 1165, + 0xAD65: 1166, + 0xAD69: 1167, + 0xAD71: 1168, + 0xAD73: 1169, + 0xAD75: 1170, + 0xAD76: 1171, + 0xAD77: 1172, + 0xAD81: 1173, + 0xAD85: 1174, + 0xAD89: 1175, + 0xAD97: 1176, + 0xADA1: 1177, + 0xADA2: 1178, + 0xADA3: 1179, + 0xADA5: 1180, + 0xADA9: 1181, + 0xADAB: 1182, + 0xADB1: 1183, + 0xADB3: 1184, + 0xADB5: 1185, + 0xADB7: 1186, + 0xADBB: 1187, + 0xADC1: 1188, + 0xADC2: 1189, + 0xADC5: 1190, + 0xADC9: 1191, + 0xADD7: 1192, + 0xADE1: 1193, + 0xADE5: 1194, + 0xADE9: 1195, + 0xADF1: 1196, + 0xADF5: 1197, + 0xADF6: 1198, + 0xAE41: 1199, + 0xAE45: 1200, + 0xAE49: 1201, + 0xAE51: 1202, + 0xAE53: 1203, + 0xAE55: 1204, + 0xAE61: 1205, + 0xAE62: 1206, + 0xAE65: 1207, + 0xAE69: 1208, + 0xAE71: 1209, + 0xAE73: 1210, + 0xAE75: 1211, + 0xAE77: 1212, + 0xAE81: 1213, + 0xAE82: 1214, + 0xAE85: 1215, + 0xAE88: 1216, + 0xAE89: 1217, + 0xAE91: 1218, + 0xAE93: 1219, + 0xAE95: 1220, + 0xAE97: 1221, + 0xAE99: 1222, + 0xAE9B: 1223, + 0xAE9C: 1224, + 0xAEA1: 1225, + 0xAEB6: 1226, + 0xAEC1: 1227, + 0xAEC2: 1228, + 0xAEC5: 1229, + 0xAEC9: 1230, + 0xAED1: 1231, + 0xAED7: 1232, + 0xAEE1: 1233, + 0xAEE2: 1234, + 0xAEE5: 1235, + 0xAEE9: 1236, + 0xAEF1: 1237, + 0xAEF3: 1238, + 0xAEF5: 1239, + 0xAEF7: 1240, + 0xAF41: 1241, + 0xAF42: 1242, + 0xAF49: 1243, + 0xAF51: 1244, + 0xAF55: 1245, + 0xAF57: 1246, + 0xAF61: 1247, + 0xAF62: 1248, + 0xAF65: 1249, + 0xAF69: 1250, + 0xAF6A: 1251, + 0xAF71: 1252, + 0xAF73: 1253, + 0xAF75: 1254, + 0xAF77: 1255, + 0xAFA1: 1256, + 0xAFA2: 1257, + 0xAFA5: 1258, + 0xAFA8: 1259, + 0xAFA9: 1260, + 0xAFB0: 1261, + 0xAFB1: 1262, + 0xAFB3: 1263, + 0xAFB5: 1264, + 0xAFB7: 1265, + 0xAFBC: 1266, + 0xB061: 1267, + 0xB062: 1268, + 0xB064: 1269, + 0xB065: 1270, + 0xB069: 1271, + 0xB071: 1272, + 0xB073: 1273, + 0xB076: 1274, + 0xB077: 1275, + 0xB07D: 1276, + 0xB081: 1277, + 0xB082: 1278, + 0xB085: 1279, + 0xB089: 1280, + 0xB091: 1281, + 0xB093: 1282, + 0xB096: 1283, + 0xB097: 1284, + 0xB0B7: 1285, + 0xB0E1: 1286, + 0xB0E2: 1287, + 0xB0E5: 1288, + 0xB0E9: 1289, + 0xB0EB: 1290, + 0xB0F1: 1291, + 0xB0F3: 1292, + 0xB0F6: 1293, + 0xB0F7: 1294, + 0xB141: 1295, + 0xB145: 1296, + 0xB149: 1297, + 0xB185: 1298, + 0xB1A1: 1299, + 0xB1A2: 1300, + 0xB1A5: 1301, + 0xB1A8: 1302, + 0xB1A9: 1303, + 0xB1AB: 1304, + 0xB1B1: 1305, + 0xB1B3: 1306, + 0xB1B7: 1307, + 0xB1C1: 1308, + 0xB1C2: 1309, + 0xB1C5: 1310, + 0xB1D6: 1311, + 0xB1E1: 1312, + 0xB1F6: 1313, + 0xB241: 1314, + 0xB245: 1315, + 0xB249: 1316, + 0xB251: 1317, + 0xB253: 1318, + 0xB261: 1319, + 0xB281: 1320, + 0xB282: 1321, + 0xB285: 1322, + 0xB289: 1323, + 0xB291: 1324, + 0xB293: 1325, + 0xB297: 1326, + 0xB2A1: 1327, + 0xB2B6: 1328, + 0xB2C1: 1329, + 0xB2E1: 1330, + 0xB2E5: 1331, + 0xB357: 1332, + 0xB361: 1333, + 0xB362: 1334, + 0xB365: 1335, + 0xB369: 1336, + 0xB36B: 1337, + 0xB370: 1338, + 0xB371: 1339, + 0xB373: 1340, + 0xB381: 1341, + 0xB385: 1342, + 0xB389: 1343, + 0xB391: 1344, + 0xB3A1: 1345, + 0xB3A2: 1346, + 0xB3A5: 1347, + 0xB3A9: 1348, + 0xB3B1: 1349, + 0xB3B3: 1350, + 0xB3B5: 1351, + 0xB3B7: 1352, + 0xB461: 1353, + 0xB462: 1354, + 0xB465: 1355, + 0xB466: 1356, + 0xB467: 1357, + 0xB469: 1358, + 0xB46A: 1359, + 0xB46B: 1360, + 0xB470: 1361, + 0xB471: 1362, + 0xB473: 1363, + 0xB475: 1364, + 0xB476: 1365, + 0xB477: 1366, + 0xB47B: 1367, + 0xB47C: 1368, + 0xB481: 1369, + 0xB482: 1370, + 0xB485: 1371, + 0xB489: 1372, + 0xB491: 1373, + 0xB493: 1374, + 0xB495: 1375, + 0xB496: 1376, + 0xB497: 1377, + 0xB4A1: 1378, + 0xB4A2: 1379, + 0xB4A5: 1380, + 0xB4A9: 1381, + 0xB4AC: 1382, + 0xB4B1: 1383, + 0xB4B3: 1384, + 0xB4B5: 1385, + 0xB4B7: 1386, + 0xB4BB: 1387, + 0xB4BD: 1388, + 0xB4C1: 1389, + 0xB4C5: 1390, + 0xB4C9: 1391, + 0xB4D3: 1392, + 0xB4E1: 1393, + 0xB4E2: 1394, + 0xB4E5: 1395, + 0xB4E6: 1396, + 0xB4E8: 1397, + 0xB4E9: 1398, + 0xB4EA: 1399, + 0xB4EB: 1400, + 0xB4F1: 1401, + 0xB4F3: 1402, + 0xB4F4: 1403, + 0xB4F5: 1404, + 0xB4F6: 1405, + 0xB4F7: 1406, + 0xB4F8: 1407, + 0xB4FA: 1408, + 0xB4FC: 1409, + 0xB541: 1410, + 0xB542: 1411, + 0xB545: 1412, + 0xB549: 1413, + 0xB551: 1414, + 0xB553: 1415, + 0xB555: 1416, + 0xB557: 1417, + 0xB561: 1418, + 0xB562: 1419, + 0xB563: 1420, + 0xB565: 1421, + 0xB569: 1422, + 0xB56B: 1423, + 0xB56C: 1424, + 0xB571: 1425, + 0xB573: 1426, + 0xB574: 1427, + 0xB575: 1428, + 0xB576: 1429, + 0xB577: 1430, + 0xB57B: 1431, + 0xB57C: 1432, + 0xB57D: 1433, + 0xB581: 1434, + 0xB585: 1435, + 0xB589: 1436, + 0xB591: 1437, + 0xB593: 1438, + 0xB595: 1439, + 0xB596: 1440, + 0xB5A1: 1441, + 0xB5A2: 1442, + 0xB5A5: 1443, + 0xB5A9: 1444, + 0xB5AA: 1445, + 0xB5AB: 1446, + 0xB5AD: 1447, + 0xB5B0: 1448, + 0xB5B1: 1449, + 0xB5B3: 1450, + 0xB5B5: 1451, + 0xB5B7: 1452, + 0xB5B9: 1453, + 0xB5C1: 1454, + 0xB5C2: 1455, + 0xB5C5: 1456, + 0xB5C9: 1457, + 0xB5D1: 1458, + 0xB5D3: 1459, + 0xB5D5: 1460, + 0xB5D6: 1461, + 0xB5D7: 1462, + 0xB5E1: 1463, + 0xB5E2: 1464, + 0xB5E5: 1465, + 0xB5F1: 1466, + 0xB5F5: 1467, + 0xB5F7: 1468, + 0xB641: 1469, + 0xB642: 1470, + 0xB645: 1471, + 0xB649: 1472, + 0xB651: 1473, + 0xB653: 1474, + 0xB655: 1475, + 0xB657: 1476, + 0xB661: 1477, + 0xB662: 1478, + 0xB665: 1479, + 0xB669: 1480, + 0xB671: 1481, + 0xB673: 1482, + 0xB675: 1483, + 0xB677: 1484, + 0xB681: 1485, + 0xB682: 1486, + 0xB685: 1487, + 0xB689: 1488, + 0xB68A: 1489, + 0xB68B: 1490, + 0xB691: 1491, + 0xB693: 1492, + 0xB695: 1493, + 0xB697: 1494, + 0xB6A1: 1495, + 0xB6A2: 1496, + 0xB6A5: 1497, + 0xB6A9: 1498, + 0xB6B1: 1499, + 0xB6B3: 1500, + 0xB6B6: 1501, + 0xB6B7: 1502, + 0xB6C1: 1503, + 0xB6C2: 1504, + 0xB6C5: 1505, + 0xB6C9: 1506, + 0xB6D1: 1507, + 0xB6D3: 1508, + 0xB6D7: 1509, + 0xB6E1: 1510, + 0xB6E2: 1511, + 0xB6E5: 1512, + 0xB6E9: 1513, + 0xB6F1: 1514, + 0xB6F3: 1515, + 0xB6F5: 1516, + 0xB6F7: 1517, + 0xB741: 1518, + 0xB742: 1519, + 0xB745: 1520, + 0xB749: 1521, + 0xB751: 1522, + 0xB753: 1523, + 0xB755: 1524, + 0xB757: 1525, + 0xB759: 1526, + 0xB761: 1527, + 0xB762: 1528, + 0xB765: 1529, + 0xB769: 1530, + 0xB76F: 1531, + 0xB771: 1532, + 0xB773: 1533, + 0xB775: 1534, + 0xB777: 1535, + 0xB778: 1536, + 0xB779: 1537, + 0xB77A: 1538, + 0xB77B: 1539, + 0xB77C: 1540, + 0xB77D: 1541, + 0xB781: 1542, + 0xB785: 1543, + 0xB789: 1544, + 0xB791: 1545, + 0xB795: 1546, + 0xB7A1: 1547, + 0xB7A2: 1548, + 0xB7A5: 1549, + 0xB7A9: 1550, + 0xB7AA: 1551, + 0xB7AB: 1552, + 0xB7B0: 1553, + 0xB7B1: 1554, + 0xB7B3: 1555, + 0xB7B5: 1556, + 0xB7B6: 1557, + 0xB7B7: 1558, + 0xB7B8: 1559, + 0xB7BC: 1560, + 0xB861: 1561, + 0xB862: 1562, + 0xB865: 1563, + 0xB867: 1564, + 0xB868: 1565, + 0xB869: 1566, + 0xB86B: 1567, + 0xB871: 1568, + 0xB873: 1569, + 0xB875: 1570, + 0xB876: 1571, + 0xB877: 1572, + 0xB878: 1573, + 0xB881: 1574, + 0xB882: 1575, + 0xB885: 1576, + 0xB889: 1577, + 0xB891: 1578, + 0xB893: 1579, + 0xB895: 1580, + 0xB896: 1581, + 0xB897: 1582, + 0xB8A1: 1583, + 0xB8A2: 1584, + 0xB8A5: 1585, + 0xB8A7: 1586, + 0xB8A9: 1587, + 0xB8B1: 1588, + 0xB8B7: 1589, + 0xB8C1: 1590, + 0xB8C5: 1591, + 0xB8C9: 1592, + 0xB8E1: 1593, + 0xB8E2: 1594, + 0xB8E5: 1595, + 0xB8E9: 1596, + 0xB8EB: 1597, + 0xB8F1: 1598, + 0xB8F3: 1599, + 0xB8F5: 1600, + 0xB8F7: 1601, + 0xB8F8: 1602, + 0xB941: 1603, + 0xB942: 1604, + 0xB945: 1605, + 0xB949: 1606, + 0xB951: 1607, + 0xB953: 1608, + 0xB955: 1609, + 0xB957: 1610, + 0xB961: 1611, + 0xB965: 1612, + 0xB969: 1613, + 0xB971: 1614, + 0xB973: 1615, + 0xB976: 1616, + 0xB977: 1617, + 0xB981: 1618, + 0xB9A1: 1619, + 0xB9A2: 1620, + 0xB9A5: 1621, + 0xB9A9: 1622, + 0xB9AB: 1623, + 0xB9B1: 1624, + 0xB9B3: 1625, + 0xB9B5: 1626, + 0xB9B7: 1627, + 0xB9B8: 1628, + 0xB9B9: 1629, + 0xB9BD: 1630, + 0xB9C1: 1631, + 0xB9C2: 1632, + 0xB9C9: 1633, + 0xB9D3: 1634, + 0xB9D5: 1635, + 0xB9D7: 1636, + 0xB9E1: 1637, + 0xB9F6: 1638, + 0xB9F7: 1639, + 0xBA41: 1640, + 0xBA45: 1641, + 0xBA49: 1642, + 0xBA51: 1643, + 0xBA53: 1644, + 0xBA55: 1645, + 0xBA57: 1646, + 0xBA61: 1647, + 0xBA62: 1648, + 0xBA65: 1649, + 0xBA77: 1650, + 0xBA81: 1651, + 0xBA82: 1652, + 0xBA85: 1653, + 0xBA89: 1654, + 0xBA8A: 1655, + 0xBA8B: 1656, + 0xBA91: 1657, + 0xBA93: 1658, + 0xBA95: 1659, + 0xBA97: 1660, + 0xBAA1: 1661, + 0xBAB6: 1662, + 0xBAC1: 1663, + 0xBAE1: 1664, + 0xBAE2: 1665, + 0xBAE5: 1666, + 0xBAE9: 1667, + 0xBAF1: 1668, + 0xBAF3: 1669, + 0xBAF5: 1670, + 0xBB41: 1671, + 0xBB45: 1672, + 0xBB49: 1673, + 0xBB51: 1674, + 0xBB61: 1675, + 0xBB62: 1676, + 0xBB65: 1677, + 0xBB69: 1678, + 0xBB71: 1679, + 0xBB73: 1680, + 0xBB75: 1681, + 0xBB77: 1682, + 0xBBA1: 1683, + 0xBBA2: 1684, + 0xBBA5: 1685, + 0xBBA8: 1686, + 0xBBA9: 1687, + 0xBBAB: 1688, + 0xBBB1: 1689, + 0xBBB3: 1690, + 0xBBB5: 1691, + 0xBBB7: 1692, + 0xBBB8: 1693, + 0xBBBB: 1694, + 0xBBBC: 1695, + 0xBC61: 1696, + 0xBC62: 1697, + 0xBC65: 1698, + 0xBC67: 1699, + 0xBC69: 1700, + 0xBC6C: 1701, + 0xBC71: 1702, + 0xBC73: 1703, + 0xBC75: 1704, + 0xBC76: 1705, + 0xBC77: 1706, + 0xBC81: 1707, + 0xBC82: 1708, + 0xBC85: 1709, + 0xBC89: 1710, + 0xBC91: 1711, + 0xBC93: 1712, + 0xBC95: 1713, + 0xBC96: 1714, + 0xBC97: 1715, + 0xBCA1: 1716, + 0xBCA5: 1717, + 0xBCB7: 1718, + 0xBCE1: 1719, + 0xBCE2: 1720, + 0xBCE5: 1721, + 0xBCE9: 1722, + 0xBCF1: 1723, + 0xBCF3: 1724, + 0xBCF5: 1725, + 0xBCF6: 1726, + 0xBCF7: 1727, + 0xBD41: 1728, + 0xBD57: 1729, + 0xBD61: 1730, + 0xBD76: 1731, + 0xBDA1: 1732, + 0xBDA2: 1733, + 0xBDA5: 1734, + 0xBDA9: 1735, + 0xBDB1: 1736, + 0xBDB3: 1737, + 0xBDB5: 1738, + 0xBDB7: 1739, + 0xBDB9: 1740, + 0xBDC1: 1741, + 0xBDC2: 1742, + 0xBDC9: 1743, + 0xBDD6: 1744, + 0xBDE1: 1745, + 0xBDF6: 1746, + 0xBE41: 1747, + 0xBE45: 1748, + 0xBE49: 1749, + 0xBE51: 1750, + 0xBE53: 1751, + 0xBE77: 1752, + 0xBE81: 1753, + 0xBE82: 1754, + 0xBE85: 1755, + 0xBE89: 1756, + 0xBE91: 1757, + 0xBE93: 1758, + 0xBE97: 1759, + 0xBEA1: 1760, + 0xBEB6: 1761, + 0xBEB7: 1762, + 0xBEE1: 1763, + 0xBF41: 1764, + 0xBF61: 1765, + 0xBF71: 1766, + 0xBF75: 1767, + 0xBF77: 1768, + 0xBFA1: 1769, + 0xBFA2: 1770, + 0xBFA5: 1771, + 0xBFA9: 1772, + 0xBFB1: 1773, + 0xBFB3: 1774, + 0xBFB7: 1775, + 0xBFB8: 1776, + 0xBFBD: 1777, + 0xC061: 1778, + 0xC062: 1779, + 0xC065: 1780, + 0xC067: 1781, + 0xC069: 1782, + 0xC071: 1783, + 0xC073: 1784, + 0xC075: 1785, + 0xC076: 1786, + 0xC077: 1787, + 0xC078: 1788, + 0xC081: 1789, + 0xC082: 1790, + 0xC085: 1791, + 0xC089: 1792, + 0xC091: 1793, + 0xC093: 1794, + 0xC095: 1795, + 0xC096: 1796, + 0xC097: 1797, + 0xC0A1: 1798, + 0xC0A5: 1799, + 0xC0A7: 1800, + 0xC0A9: 1801, + 0xC0B1: 1802, + 0xC0B7: 1803, + 0xC0E1: 1804, + 0xC0E2: 1805, + 0xC0E5: 1806, + 0xC0E9: 1807, + 0xC0F1: 1808, + 0xC0F3: 1809, + 0xC0F5: 1810, + 0xC0F6: 1811, + 0xC0F7: 1812, + 0xC141: 1813, + 0xC142: 1814, + 0xC145: 1815, + 0xC149: 1816, + 0xC151: 1817, + 0xC153: 1818, + 0xC155: 1819, + 0xC157: 1820, + 0xC161: 1821, + 0xC165: 1822, + 0xC176: 1823, + 0xC181: 1824, + 0xC185: 1825, + 0xC197: 1826, + 0xC1A1: 1827, + 0xC1A2: 1828, + 0xC1A5: 1829, + 0xC1A9: 1830, + 0xC1B1: 1831, + 0xC1B3: 1832, + 0xC1B5: 1833, + 0xC1B7: 1834, + 0xC1C1: 1835, + 0xC1C5: 1836, + 0xC1C9: 1837, + 0xC1D7: 1838, + 0xC241: 1839, + 0xC245: 1840, + 0xC249: 1841, + 0xC251: 1842, + 0xC253: 1843, + 0xC255: 1844, + 0xC257: 1845, + 0xC261: 1846, + 0xC271: 1847, + 0xC281: 1848, + 0xC282: 1849, + 0xC285: 1850, + 0xC289: 1851, + 0xC291: 1852, + 0xC293: 1853, + 0xC295: 1854, + 0xC297: 1855, + 0xC2A1: 1856, + 0xC2B6: 1857, + 0xC2C1: 1858, + 0xC2C5: 1859, + 0xC2E1: 1860, + 0xC2E5: 1861, + 0xC2E9: 1862, + 0xC2F1: 1863, + 0xC2F3: 1864, + 0xC2F5: 1865, + 0xC2F7: 1866, + 0xC341: 1867, + 0xC345: 1868, + 0xC349: 1869, + 0xC351: 1870, + 0xC357: 1871, + 0xC361: 1872, + 0xC362: 1873, + 0xC365: 1874, + 0xC369: 1875, + 0xC371: 1876, + 0xC373: 1877, + 0xC375: 1878, + 0xC377: 1879, + 0xC3A1: 1880, + 0xC3A2: 1881, + 0xC3A5: 1882, + 0xC3A8: 1883, + 0xC3A9: 1884, + 0xC3AA: 1885, + 0xC3B1: 1886, + 0xC3B3: 1887, + 0xC3B5: 1888, + 0xC3B7: 1889, + 0xC461: 1890, + 0xC462: 1891, + 0xC465: 1892, + 0xC469: 1893, + 0xC471: 1894, + 0xC473: 1895, + 0xC475: 1896, + 0xC477: 1897, + 0xC481: 1898, + 0xC482: 1899, + 0xC485: 1900, + 0xC489: 1901, + 0xC491: 1902, + 0xC493: 1903, + 0xC495: 1904, + 0xC496: 1905, + 0xC497: 1906, + 0xC4A1: 1907, + 0xC4A2: 1908, + 0xC4B7: 1909, + 0xC4E1: 1910, + 0xC4E2: 1911, + 0xC4E5: 1912, + 0xC4E8: 1913, + 0xC4E9: 1914, + 0xC4F1: 1915, + 0xC4F3: 1916, + 0xC4F5: 1917, + 0xC4F6: 1918, + 0xC4F7: 1919, + 0xC541: 1920, + 0xC542: 1921, + 0xC545: 1922, + 0xC549: 1923, + 0xC551: 1924, + 0xC553: 1925, + 0xC555: 1926, + 0xC557: 1927, + 0xC561: 1928, + 0xC565: 1929, + 0xC569: 1930, + 0xC571: 1931, + 0xC573: 1932, + 0xC575: 1933, + 0xC576: 1934, + 0xC577: 1935, + 0xC581: 1936, + 0xC5A1: 1937, + 0xC5A2: 1938, + 0xC5A5: 1939, + 0xC5A9: 1940, + 0xC5B1: 1941, + 0xC5B3: 1942, + 0xC5B5: 1943, + 0xC5B7: 1944, + 0xC5C1: 1945, + 0xC5C2: 1946, + 0xC5C5: 1947, + 0xC5C9: 1948, + 0xC5D1: 1949, + 0xC5D7: 1950, + 0xC5E1: 1951, + 0xC5F7: 1952, + 0xC641: 1953, + 0xC649: 1954, + 0xC661: 1955, + 0xC681: 1956, + 0xC682: 1957, + 0xC685: 1958, + 0xC689: 1959, + 0xC691: 1960, + 0xC693: 1961, + 0xC695: 1962, + 0xC697: 1963, + 0xC6A1: 1964, + 0xC6A5: 1965, + 0xC6A9: 1966, + 0xC6B7: 1967, + 0xC6C1: 1968, + 0xC6D7: 1969, + 0xC6E1: 1970, + 0xC6E2: 1971, + 0xC6E5: 1972, + 0xC6E9: 1973, + 0xC6F1: 1974, + 0xC6F3: 1975, + 0xC6F5: 1976, + 0xC6F7: 1977, + 0xC741: 1978, + 0xC745: 1979, + 0xC749: 1980, + 0xC751: 1981, + 0xC761: 1982, + 0xC762: 1983, + 0xC765: 1984, + 0xC769: 1985, + 0xC771: 1986, + 0xC773: 1987, + 0xC777: 1988, + 0xC7A1: 1989, + 0xC7A2: 1990, + 0xC7A5: 1991, + 0xC7A9: 1992, + 0xC7B1: 1993, + 0xC7B3: 1994, + 0xC7B5: 1995, + 0xC7B7: 1996, + 0xC861: 1997, + 0xC862: 1998, + 0xC865: 1999, + 0xC869: 2000, + 0xC86A: 2001, + 0xC871: 2002, + 0xC873: 2003, + 0xC875: 2004, + 0xC876: 2005, + 0xC877: 2006, + 0xC881: 2007, + 0xC882: 2008, + 0xC885: 2009, + 0xC889: 2010, + 0xC891: 2011, + 0xC893: 2012, + 0xC895: 2013, + 0xC896: 2014, + 0xC897: 2015, + 0xC8A1: 2016, + 0xC8B7: 2017, + 0xC8E1: 2018, + 0xC8E2: 2019, + 0xC8E5: 2020, + 0xC8E9: 2021, + 0xC8EB: 2022, + 0xC8F1: 2023, + 0xC8F3: 2024, + 0xC8F5: 2025, + 0xC8F6: 2026, + 0xC8F7: 2027, + 0xC941: 2028, + 0xC942: 2029, + 0xC945: 2030, + 0xC949: 2031, + 0xC951: 2032, + 0xC953: 2033, + 0xC955: 2034, + 0xC957: 2035, + 0xC961: 2036, + 0xC965: 2037, + 0xC976: 2038, + 0xC981: 2039, + 0xC985: 2040, + 0xC9A1: 2041, + 0xC9A2: 2042, + 0xC9A5: 2043, + 0xC9A9: 2044, + 0xC9B1: 2045, + 0xC9B3: 2046, + 0xC9B5: 2047, + 0xC9B7: 2048, + 0xC9BC: 2049, + 0xC9C1: 2050, + 0xC9C5: 2051, + 0xC9E1: 2052, + 0xCA41: 2053, + 0xCA45: 2054, + 0xCA55: 2055, + 0xCA57: 2056, + 0xCA61: 2057, + 0xCA81: 2058, + 0xCA82: 2059, + 0xCA85: 2060, + 0xCA89: 2061, + 0xCA91: 2062, + 0xCA93: 2063, + 0xCA95: 2064, + 0xCA97: 2065, + 0xCAA1: 2066, + 0xCAB6: 2067, + 0xCAC1: 2068, + 0xCAE1: 2069, + 0xCAE2: 2070, + 0xCAE5: 2071, + 0xCAE9: 2072, + 0xCAF1: 2073, + 0xCAF3: 2074, + 0xCAF7: 2075, + 0xCB41: 2076, + 0xCB45: 2077, + 0xCB49: 2078, + 0xCB51: 2079, + 0xCB57: 2080, + 0xCB61: 2081, + 0xCB62: 2082, + 0xCB65: 2083, + 0xCB68: 2084, + 0xCB69: 2085, + 0xCB6B: 2086, + 0xCB71: 2087, + 0xCB73: 2088, + 0xCB75: 2089, + 0xCB81: 2090, + 0xCB85: 2091, + 0xCB89: 2092, + 0xCB91: 2093, + 0xCB93: 2094, + 0xCBA1: 2095, + 0xCBA2: 2096, + 0xCBA5: 2097, + 0xCBA9: 2098, + 0xCBB1: 2099, + 0xCBB3: 2100, + 0xCBB5: 2101, + 0xCBB7: 2102, + 0xCC61: 2103, + 0xCC62: 2104, + 0xCC63: 2105, + 0xCC65: 2106, + 0xCC69: 2107, + 0xCC6B: 2108, + 0xCC71: 2109, + 0xCC73: 2110, + 0xCC75: 2111, + 0xCC76: 2112, + 0xCC77: 2113, + 0xCC7B: 2114, + 0xCC81: 2115, + 0xCC82: 2116, + 0xCC85: 2117, + 0xCC89: 2118, + 0xCC91: 2119, + 0xCC93: 2120, + 0xCC95: 2121, + 0xCC96: 2122, + 0xCC97: 2123, + 0xCCA1: 2124, + 0xCCA2: 2125, + 0xCCE1: 2126, + 0xCCE2: 2127, + 0xCCE5: 2128, + 0xCCE9: 2129, + 0xCCF1: 2130, + 0xCCF3: 2131, + 0xCCF5: 2132, + 0xCCF6: 2133, + 0xCCF7: 2134, + 0xCD41: 2135, + 0xCD42: 2136, + 0xCD45: 2137, + 0xCD49: 2138, + 0xCD51: 2139, + 0xCD53: 2140, + 0xCD55: 2141, + 0xCD57: 2142, + 0xCD61: 2143, + 0xCD65: 2144, + 0xCD69: 2145, + 0xCD71: 2146, + 0xCD73: 2147, + 0xCD76: 2148, + 0xCD77: 2149, + 0xCD81: 2150, + 0xCD89: 2151, + 0xCD93: 2152, + 0xCD95: 2153, + 0xCDA1: 2154, + 0xCDA2: 2155, + 0xCDA5: 2156, + 0xCDA9: 2157, + 0xCDB1: 2158, + 0xCDB3: 2159, + 0xCDB5: 2160, + 0xCDB7: 2161, + 0xCDC1: 2162, + 0xCDD7: 2163, + 0xCE41: 2164, + 0xCE45: 2165, + 0xCE61: 2166, + 0xCE65: 2167, + 0xCE69: 2168, + 0xCE73: 2169, + 0xCE75: 2170, + 0xCE81: 2171, + 0xCE82: 2172, + 0xCE85: 2173, + 0xCE88: 2174, + 0xCE89: 2175, + 0xCE8B: 2176, + 0xCE91: 2177, + 0xCE93: 2178, + 0xCE95: 2179, + 0xCE97: 2180, + 0xCEA1: 2181, + 0xCEB7: 2182, + 0xCEE1: 2183, + 0xCEE5: 2184, + 0xCEE9: 2185, + 0xCEF1: 2186, + 0xCEF5: 2187, + 0xCF41: 2188, + 0xCF45: 2189, + 0xCF49: 2190, + 0xCF51: 2191, + 0xCF55: 2192, + 0xCF57: 2193, + 0xCF61: 2194, + 0xCF65: 2195, + 0xCF69: 2196, + 0xCF71: 2197, + 0xCF73: 2198, + 0xCF75: 2199, + 0xCFA1: 2200, + 0xCFA2: 2201, + 0xCFA5: 2202, + 0xCFA9: 2203, + 0xCFB1: 2204, + 0xCFB3: 2205, + 0xCFB5: 2206, + 0xCFB7: 2207, + 0xD061: 2208, + 0xD062: 2209, + 0xD065: 2210, + 0xD069: 2211, + 0xD06E: 2212, + 0xD071: 2213, + 0xD073: 2214, + 0xD075: 2215, + 0xD077: 2216, + 0xD081: 2217, + 0xD082: 2218, + 0xD085: 2219, + 0xD089: 2220, + 0xD091: 2221, + 0xD093: 2222, + 0xD095: 2223, + 0xD096: 2224, + 0xD097: 2225, + 0xD0A1: 2226, + 0xD0B7: 2227, + 0xD0E1: 2228, + 0xD0E2: 2229, + 0xD0E5: 2230, + 0xD0E9: 2231, + 0xD0EB: 2232, + 0xD0F1: 2233, + 0xD0F3: 2234, + 0xD0F5: 2235, + 0xD0F7: 2236, + 0xD141: 2237, + 0xD142: 2238, + 0xD145: 2239, + 0xD149: 2240, + 0xD151: 2241, + 0xD153: 2242, + 0xD155: 2243, + 0xD157: 2244, + 0xD161: 2245, + 0xD162: 2246, + 0xD165: 2247, + 0xD169: 2248, + 0xD171: 2249, + 0xD173: 2250, + 0xD175: 2251, + 0xD176: 2252, + 0xD177: 2253, + 0xD181: 2254, + 0xD185: 2255, + 0xD189: 2256, + 0xD193: 2257, + 0xD1A1: 2258, + 0xD1A2: 2259, + 0xD1A5: 2260, + 0xD1A9: 2261, + 0xD1AE: 2262, + 0xD1B1: 2263, + 0xD1B3: 2264, + 0xD1B5: 2265, + 0xD1B7: 2266, + 0xD1BB: 2267, + 0xD1C1: 2268, + 0xD1C2: 2269, + 0xD1C5: 2270, + 0xD1C9: 2271, + 0xD1D5: 2272, + 0xD1D7: 2273, + 0xD1E1: 2274, + 0xD1E2: 2275, + 0xD1E5: 2276, + 0xD1F5: 2277, + 0xD1F7: 2278, + 0xD241: 2279, + 0xD242: 2280, + 0xD245: 2281, + 0xD249: 2282, + 0xD253: 2283, + 0xD255: 2284, + 0xD257: 2285, + 0xD261: 2286, + 0xD265: 2287, + 0xD269: 2288, + 0xD273: 2289, + 0xD275: 2290, + 0xD281: 2291, + 0xD282: 2292, + 0xD285: 2293, + 0xD289: 2294, + 0xD28E: 2295, + 0xD291: 2296, + 0xD295: 2297, + 0xD297: 2298, + 0xD2A1: 2299, + 0xD2A5: 2300, + 0xD2A9: 2301, + 0xD2B1: 2302, + 0xD2B7: 2303, + 0xD2C1: 2304, + 0xD2C2: 2305, + 0xD2C5: 2306, + 0xD2C9: 2307, + 0xD2D7: 2308, + 0xD2E1: 2309, + 0xD2E2: 2310, + 0xD2E5: 2311, + 0xD2E9: 2312, + 0xD2F1: 2313, + 0xD2F3: 2314, + 0xD2F5: 2315, + 0xD2F7: 2316, + 0xD341: 2317, + 0xD342: 2318, + 0xD345: 2319, + 0xD349: 2320, + 0xD351: 2321, + 0xD355: 2322, + 0xD357: 2323, + 0xD361: 2324, + 0xD362: 2325, + 0xD365: 2326, + 0xD367: 2327, + 0xD368: 2328, + 0xD369: 2329, + 0xD36A: 2330, + 0xD371: 2331, + 0xD373: 2332, + 0xD375: 2333, + 0xD377: 2334, + 0xD37B: 2335, + 0xD381: 2336, + 0xD385: 2337, + 0xD389: 2338, + 0xD391: 2339, + 0xD393: 2340, + 0xD397: 2341, + 0xD3A1: 2342, + 0xD3A2: 2343, + 0xD3A5: 2344, + 0xD3A9: 2345, + 0xD3B1: 2346, + 0xD3B3: 2347, + 0xD3B5: 2348, + 0xD3B7: 2349, +} diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/johabprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/johabprober.py new file mode 100644 index 0000000..d7364ba --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/johabprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import JOHABDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import JOHAB_SM_MODEL + + +class JOHABProber(MultiByteCharSetProber): + def __init__(self) -> None: + super().__init__() + self.coding_sm = CodingStateMachine(JOHAB_SM_MODEL) + self.distribution_analyzer = JOHABDistributionAnalysis() + self.reset() + + @property + def charset_name(self) -> str: + return "Johab" + + @property + def language(self) -> str: + return "Korean" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/jpcntx.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/jpcntx.py new file mode 100644 index 0000000..2f53bdd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/jpcntx.py @@ -0,0 +1,238 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import List, Tuple, Union + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +# fmt: off +jp2_char_context = ( + (0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1), + (2, 4, 0, 4, 0, 3, 0, 4, 0, 3, 4, 4, 4, 2, 4, 3, 3, 4, 3, 2, 3, 3, 4, 2, 3, 3, 3, 2, 4, 1, 4, 3, 3, 1, 5, 4, 3, 4, 3, 4, 3, 5, 3, 0, 3, 5, 4, 2, 0, 3, 1, 0, 3, 3, 0, 3, 3, 0, 1, 1, 0, 4, 3, 0, 3, 3, 0, 4, 0, 2, 0, 3, 5, 5, 5, 5, 4, 0, 4, 1, 0, 3, 4), + (0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2), + (0, 4, 0, 5, 0, 5, 0, 4, 0, 4, 5, 4, 4, 3, 5, 3, 5, 1, 5, 3, 4, 3, 4, 4, 3, 4, 3, 3, 4, 3, 5, 4, 4, 3, 5, 5, 3, 5, 5, 5, 3, 5, 5, 3, 4, 5, 5, 3, 1, 3, 2, 0, 3, 4, 0, 4, 2, 0, 4, 2, 1, 5, 3, 2, 3, 5, 0, 4, 0, 2, 0, 5, 4, 4, 5, 4, 5, 0, 4, 0, 0, 4, 4), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), + (0, 3, 0, 4, 0, 3, 0, 3, 0, 4, 5, 4, 3, 3, 3, 3, 4, 3, 5, 4, 4, 3, 5, 4, 4, 3, 4, 3, 4, 4, 4, 4, 5, 3, 4, 4, 3, 4, 5, 5, 4, 5, 5, 1, 4, 5, 4, 3, 0, 3, 3, 1, 3, 3, 0, 4, 4, 0, 3, 3, 1, 5, 3, 3, 3, 5, 0, 4, 0, 3, 0, 4, 4, 3, 4, 3, 3, 0, 4, 1, 1, 3, 4), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), + (0, 4, 0, 3, 0, 3, 0, 4, 0, 3, 4, 4, 3, 2, 2, 1, 2, 1, 3, 1, 3, 3, 3, 3, 3, 4, 3, 1, 3, 3, 5, 3, 3, 0, 4, 3, 0, 5, 4, 3, 3, 5, 4, 4, 3, 4, 4, 5, 0, 1, 2, 0, 1, 2, 0, 2, 2, 0, 1, 0, 0, 5, 2, 2, 1, 4, 0, 3, 0, 1, 0, 4, 4, 3, 5, 4, 3, 0, 2, 1, 0, 4, 3), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), + (0, 3, 0, 5, 0, 4, 0, 2, 1, 4, 4, 2, 4, 1, 4, 2, 4, 2, 4, 3, 3, 3, 4, 3, 3, 3, 3, 1, 4, 2, 3, 3, 3, 1, 4, 4, 1, 1, 1, 4, 3, 3, 2, 0, 2, 4, 3, 2, 0, 3, 3, 0, 3, 1, 1, 0, 0, 0, 3, 3, 0, 4, 2, 2, 3, 4, 0, 4, 0, 3, 0, 4, 4, 5, 3, 4, 4, 0, 3, 0, 0, 1, 4), + (1, 4, 0, 4, 0, 4, 0, 4, 0, 3, 5, 4, 4, 3, 4, 3, 5, 4, 3, 3, 4, 3, 5, 4, 4, 4, 4, 3, 4, 2, 4, 3, 3, 1, 5, 4, 3, 2, 4, 5, 4, 5, 5, 4, 4, 5, 4, 4, 0, 3, 2, 2, 3, 3, 0, 4, 3, 1, 3, 2, 1, 4, 3, 3, 4, 5, 0, 3, 0, 2, 0, 4, 5, 5, 4, 5, 4, 0, 4, 0, 0, 5, 4), + (0, 5, 0, 5, 0, 4, 0, 3, 0, 4, 4, 3, 4, 3, 3, 3, 4, 0, 4, 4, 4, 3, 4, 3, 4, 3, 3, 1, 4, 2, 4, 3, 4, 0, 5, 4, 1, 4, 5, 4, 4, 5, 3, 2, 4, 3, 4, 3, 2, 4, 1, 3, 3, 3, 2, 3, 2, 0, 4, 3, 3, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 5, 4, 4, 4, 3, 0, 4, 1, 0, 1, 3), + (0, 3, 1, 4, 0, 3, 0, 2, 0, 3, 4, 4, 3, 1, 4, 2, 3, 3, 4, 3, 4, 3, 4, 3, 4, 4, 3, 2, 3, 1, 5, 4, 4, 1, 4, 4, 3, 5, 4, 4, 3, 5, 5, 4, 3, 4, 4, 3, 1, 2, 3, 1, 2, 2, 0, 3, 2, 0, 3, 1, 0, 5, 3, 3, 3, 4, 3, 3, 3, 3, 4, 4, 4, 4, 5, 4, 2, 0, 3, 3, 2, 4, 3), + (0, 2, 0, 3, 0, 1, 0, 1, 0, 0, 3, 2, 0, 0, 2, 0, 1, 0, 2, 1, 3, 3, 3, 1, 2, 3, 1, 0, 1, 0, 4, 2, 1, 1, 3, 3, 0, 4, 3, 3, 1, 4, 3, 3, 0, 3, 3, 2, 0, 0, 0, 0, 1, 0, 0, 2, 0, 0, 0, 0, 0, 4, 1, 0, 2, 3, 2, 2, 2, 1, 3, 3, 3, 4, 4, 3, 2, 0, 3, 1, 0, 3, 3), + (0, 4, 0, 4, 0, 3, 0, 3, 0, 4, 4, 4, 3, 3, 3, 3, 3, 3, 4, 3, 4, 2, 4, 3, 4, 3, 3, 2, 4, 3, 4, 5, 4, 1, 4, 5, 3, 5, 4, 5, 3, 5, 4, 0, 3, 5, 5, 3, 1, 3, 3, 2, 2, 3, 0, 3, 4, 1, 3, 3, 2, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 5, 4, 4, 5, 3, 0, 4, 1, 0, 3, 4), + (0, 2, 0, 3, 0, 3, 0, 0, 0, 2, 2, 2, 1, 0, 1, 0, 0, 0, 3, 0, 3, 0, 3, 0, 1, 3, 1, 0, 3, 1, 3, 3, 3, 1, 3, 3, 3, 0, 1, 3, 1, 3, 4, 0, 0, 3, 1, 1, 0, 3, 2, 0, 0, 0, 0, 1, 3, 0, 1, 0, 0, 3, 3, 2, 0, 3, 0, 0, 0, 0, 0, 3, 4, 3, 4, 3, 3, 0, 3, 0, 0, 2, 3), + (2, 3, 0, 3, 0, 2, 0, 1, 0, 3, 3, 4, 3, 1, 3, 1, 1, 1, 3, 1, 4, 3, 4, 3, 3, 3, 0, 0, 3, 1, 5, 4, 3, 1, 4, 3, 2, 5, 5, 4, 4, 4, 4, 3, 3, 4, 4, 4, 0, 2, 1, 1, 3, 2, 0, 1, 2, 0, 0, 1, 0, 4, 1, 3, 3, 3, 0, 3, 0, 1, 0, 4, 4, 4, 5, 5, 3, 0, 2, 0, 0, 4, 4), + (0, 2, 0, 1, 0, 3, 1, 3, 0, 2, 3, 3, 3, 0, 3, 1, 0, 0, 3, 0, 3, 2, 3, 1, 3, 2, 1, 1, 0, 0, 4, 2, 1, 0, 2, 3, 1, 4, 3, 2, 0, 4, 4, 3, 1, 3, 1, 3, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 4, 1, 1, 1, 2, 0, 3, 0, 0, 0, 3, 4, 2, 4, 3, 2, 0, 1, 0, 0, 3, 3), + (0, 1, 0, 4, 0, 5, 0, 4, 0, 2, 4, 4, 2, 3, 3, 2, 3, 3, 5, 3, 3, 3, 4, 3, 4, 2, 3, 0, 4, 3, 3, 3, 4, 1, 4, 3, 2, 1, 5, 5, 3, 4, 5, 1, 3, 5, 4, 2, 0, 3, 3, 0, 1, 3, 0, 4, 2, 0, 1, 3, 1, 4, 3, 3, 3, 3, 0, 3, 0, 1, 0, 3, 4, 4, 4, 5, 5, 0, 3, 0, 1, 4, 5), + (0, 2, 0, 3, 0, 3, 0, 0, 0, 2, 3, 1, 3, 0, 4, 0, 1, 1, 3, 0, 3, 4, 3, 2, 3, 1, 0, 3, 3, 2, 3, 1, 3, 0, 2, 3, 0, 2, 1, 4, 1, 2, 2, 0, 0, 3, 3, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 0, 2, 2, 0, 3, 2, 1, 3, 3, 0, 2, 0, 2, 0, 0, 3, 3, 1, 2, 4, 0, 3, 0, 2, 2, 3), + (2, 4, 0, 5, 0, 4, 0, 4, 0, 2, 4, 4, 4, 3, 4, 3, 3, 3, 1, 2, 4, 3, 4, 3, 4, 4, 5, 0, 3, 3, 3, 3, 2, 0, 4, 3, 1, 4, 3, 4, 1, 4, 4, 3, 3, 4, 4, 3, 1, 2, 3, 0, 4, 2, 0, 4, 1, 0, 3, 3, 0, 4, 3, 3, 3, 4, 0, 4, 0, 2, 0, 3, 5, 3, 4, 5, 2, 0, 3, 0, 0, 4, 5), + (0, 3, 0, 4, 0, 1, 0, 1, 0, 1, 3, 2, 2, 1, 3, 0, 3, 0, 2, 0, 2, 0, 3, 0, 2, 0, 0, 0, 1, 0, 1, 1, 0, 0, 3, 1, 0, 0, 0, 4, 0, 3, 1, 0, 2, 1, 3, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 4, 2, 2, 3, 1, 0, 3, 0, 0, 0, 1, 4, 4, 4, 3, 0, 0, 4, 0, 0, 1, 4), + (1, 4, 1, 5, 0, 3, 0, 3, 0, 4, 5, 4, 4, 3, 5, 3, 3, 4, 4, 3, 4, 1, 3, 3, 3, 3, 2, 1, 4, 1, 5, 4, 3, 1, 4, 4, 3, 5, 4, 4, 3, 5, 4, 3, 3, 4, 4, 4, 0, 3, 3, 1, 2, 3, 0, 3, 1, 0, 3, 3, 0, 5, 4, 4, 4, 4, 4, 4, 3, 3, 5, 4, 4, 3, 3, 5, 4, 0, 3, 2, 0, 4, 4), + (0, 2, 0, 3, 0, 1, 0, 0, 0, 1, 3, 3, 3, 2, 4, 1, 3, 0, 3, 1, 3, 0, 2, 2, 1, 1, 0, 0, 2, 0, 4, 3, 1, 0, 4, 3, 0, 4, 4, 4, 1, 4, 3, 1, 1, 3, 3, 1, 0, 2, 0, 0, 1, 3, 0, 0, 0, 0, 2, 0, 0, 4, 3, 2, 4, 3, 5, 4, 3, 3, 3, 4, 3, 3, 4, 3, 3, 0, 2, 1, 0, 3, 3), + (0, 2, 0, 4, 0, 3, 0, 2, 0, 2, 5, 5, 3, 4, 4, 4, 4, 1, 4, 3, 3, 0, 4, 3, 4, 3, 1, 3, 3, 2, 4, 3, 0, 3, 4, 3, 0, 3, 4, 4, 2, 4, 4, 0, 4, 5, 3, 3, 2, 2, 1, 1, 1, 2, 0, 1, 5, 0, 3, 3, 2, 4, 3, 3, 3, 4, 0, 3, 0, 2, 0, 4, 4, 3, 5, 5, 0, 0, 3, 0, 2, 3, 3), + (0, 3, 0, 4, 0, 3, 0, 1, 0, 3, 4, 3, 3, 1, 3, 3, 3, 0, 3, 1, 3, 0, 4, 3, 3, 1, 1, 0, 3, 0, 3, 3, 0, 0, 4, 4, 0, 1, 5, 4, 3, 3, 5, 0, 3, 3, 4, 3, 0, 2, 0, 1, 1, 1, 0, 1, 3, 0, 1, 2, 1, 3, 3, 2, 3, 3, 0, 3, 0, 1, 0, 1, 3, 3, 4, 4, 1, 0, 1, 2, 2, 1, 3), + (0, 1, 0, 4, 0, 4, 0, 3, 0, 1, 3, 3, 3, 2, 3, 1, 1, 0, 3, 0, 3, 3, 4, 3, 2, 4, 2, 0, 1, 0, 4, 3, 2, 0, 4, 3, 0, 5, 3, 3, 2, 4, 4, 4, 3, 3, 3, 4, 0, 1, 3, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 4, 2, 3, 3, 3, 0, 3, 0, 0, 0, 4, 4, 4, 5, 3, 2, 0, 3, 3, 0, 3, 5), + (0, 2, 0, 3, 0, 0, 0, 3, 0, 1, 3, 0, 2, 0, 0, 0, 1, 0, 3, 1, 1, 3, 3, 0, 0, 3, 0, 0, 3, 0, 2, 3, 1, 0, 3, 1, 0, 3, 3, 2, 0, 4, 2, 2, 0, 2, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 2, 0, 1, 0, 1, 0, 0, 0, 1, 3, 1, 2, 0, 0, 0, 1, 0, 0, 1, 4), + (0, 3, 0, 3, 0, 5, 0, 1, 0, 2, 4, 3, 1, 3, 3, 2, 1, 1, 5, 2, 1, 0, 5, 1, 2, 0, 0, 0, 3, 3, 2, 2, 3, 2, 4, 3, 0, 0, 3, 3, 1, 3, 3, 0, 2, 5, 3, 4, 0, 3, 3, 0, 1, 2, 0, 2, 2, 0, 3, 2, 0, 2, 2, 3, 3, 3, 0, 2, 0, 1, 0, 3, 4, 4, 2, 5, 4, 0, 3, 0, 0, 3, 5), + (0, 3, 0, 3, 0, 3, 0, 1, 0, 3, 3, 3, 3, 0, 3, 0, 2, 0, 2, 1, 1, 0, 2, 0, 1, 0, 0, 0, 2, 1, 0, 0, 1, 0, 3, 2, 0, 0, 3, 3, 1, 2, 3, 1, 0, 3, 3, 0, 0, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 2, 3, 1, 2, 3, 0, 3, 0, 1, 0, 3, 2, 1, 0, 4, 3, 0, 1, 1, 0, 3, 3), + (0, 4, 0, 5, 0, 3, 0, 3, 0, 4, 5, 5, 4, 3, 5, 3, 4, 3, 5, 3, 3, 2, 5, 3, 4, 4, 4, 3, 4, 3, 4, 5, 5, 3, 4, 4, 3, 4, 4, 5, 4, 4, 4, 3, 4, 5, 5, 4, 2, 3, 4, 2, 3, 4, 0, 3, 3, 1, 4, 3, 2, 4, 3, 3, 5, 5, 0, 3, 0, 3, 0, 5, 5, 5, 5, 4, 4, 0, 4, 0, 1, 4, 4), + (0, 4, 0, 4, 0, 3, 0, 3, 0, 3, 5, 4, 4, 2, 3, 2, 5, 1, 3, 2, 5, 1, 4, 2, 3, 2, 3, 3, 4, 3, 3, 3, 3, 2, 5, 4, 1, 3, 3, 5, 3, 4, 4, 0, 4, 4, 3, 1, 1, 3, 1, 0, 2, 3, 0, 2, 3, 0, 3, 0, 0, 4, 3, 1, 3, 4, 0, 3, 0, 2, 0, 4, 4, 4, 3, 4, 5, 0, 4, 0, 0, 3, 4), + (0, 3, 0, 3, 0, 3, 1, 2, 0, 3, 4, 4, 3, 3, 3, 0, 2, 2, 4, 3, 3, 1, 3, 3, 3, 1, 1, 0, 3, 1, 4, 3, 2, 3, 4, 4, 2, 4, 4, 4, 3, 4, 4, 3, 2, 4, 4, 3, 1, 3, 3, 1, 3, 3, 0, 4, 1, 0, 2, 2, 1, 4, 3, 2, 3, 3, 5, 4, 3, 3, 5, 4, 4, 3, 3, 0, 4, 0, 3, 2, 2, 4, 4), + (0, 2, 0, 1, 0, 0, 0, 0, 0, 1, 2, 1, 3, 0, 0, 0, 0, 0, 2, 0, 1, 2, 1, 0, 0, 1, 0, 0, 0, 0, 3, 0, 0, 1, 0, 1, 1, 3, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 0, 3, 4, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1), + (0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 4, 0, 4, 1, 4, 0, 3, 0, 4, 0, 3, 0, 4, 0, 3, 0, 3, 0, 4, 1, 5, 1, 4, 0, 0, 3, 0, 5, 0, 5, 2, 0, 1, 0, 0, 0, 2, 1, 4, 0, 1, 3, 0, 0, 3, 0, 0, 3, 1, 1, 4, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0), + (1, 4, 0, 5, 0, 3, 0, 2, 0, 3, 5, 4, 4, 3, 4, 3, 5, 3, 4, 3, 3, 0, 4, 3, 3, 3, 3, 3, 3, 2, 4, 4, 3, 1, 3, 4, 4, 5, 4, 4, 3, 4, 4, 1, 3, 5, 4, 3, 3, 3, 1, 2, 2, 3, 3, 1, 3, 1, 3, 3, 3, 5, 3, 3, 4, 5, 0, 3, 0, 3, 0, 3, 4, 3, 4, 4, 3, 0, 3, 0, 2, 4, 3), + (0, 1, 0, 4, 0, 0, 0, 0, 0, 1, 4, 0, 4, 1, 4, 2, 4, 0, 3, 0, 1, 0, 1, 0, 0, 0, 0, 0, 2, 0, 3, 1, 1, 1, 0, 3, 0, 0, 0, 1, 2, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 3, 0, 0, 0, 0, 3, 2, 0, 2, 2, 0, 1, 0, 0, 0, 2, 3, 2, 3, 3, 0, 0, 0, 0, 2, 1, 0), + (0, 5, 1, 5, 0, 3, 0, 3, 0, 5, 4, 4, 5, 1, 5, 3, 3, 0, 4, 3, 4, 3, 5, 3, 4, 3, 3, 2, 4, 3, 4, 3, 3, 0, 3, 3, 1, 4, 4, 3, 4, 4, 4, 3, 4, 5, 5, 3, 2, 3, 1, 1, 3, 3, 1, 3, 1, 1, 3, 3, 2, 4, 5, 3, 3, 5, 0, 4, 0, 3, 0, 4, 4, 3, 5, 3, 3, 0, 3, 4, 0, 4, 3), + (0, 5, 0, 5, 0, 3, 0, 2, 0, 4, 4, 3, 5, 2, 4, 3, 3, 3, 4, 4, 4, 3, 5, 3, 5, 3, 3, 1, 4, 0, 4, 3, 3, 0, 3, 3, 0, 4, 4, 4, 4, 5, 4, 3, 3, 5, 5, 3, 2, 3, 1, 2, 3, 2, 0, 1, 0, 0, 3, 2, 2, 4, 4, 3, 1, 5, 0, 4, 0, 3, 0, 4, 3, 1, 3, 2, 1, 0, 3, 3, 0, 3, 3), + (0, 4, 0, 5, 0, 5, 0, 4, 0, 4, 5, 5, 5, 3, 4, 3, 3, 2, 5, 4, 4, 3, 5, 3, 5, 3, 4, 0, 4, 3, 4, 4, 3, 2, 4, 4, 3, 4, 5, 4, 4, 5, 5, 0, 3, 5, 5, 4, 1, 3, 3, 2, 3, 3, 1, 3, 1, 0, 4, 3, 1, 4, 4, 3, 4, 5, 0, 4, 0, 2, 0, 4, 3, 4, 4, 3, 3, 0, 4, 0, 0, 5, 5), + (0, 4, 0, 4, 0, 5, 0, 1, 1, 3, 3, 4, 4, 3, 4, 1, 3, 0, 5, 1, 3, 0, 3, 1, 3, 1, 1, 0, 3, 0, 3, 3, 4, 0, 4, 3, 0, 4, 4, 4, 3, 4, 4, 0, 3, 5, 4, 1, 0, 3, 0, 0, 2, 3, 0, 3, 1, 0, 3, 1, 0, 3, 2, 1, 3, 5, 0, 3, 0, 1, 0, 3, 2, 3, 3, 4, 4, 0, 2, 2, 0, 4, 4), + (2, 4, 0, 5, 0, 4, 0, 3, 0, 4, 5, 5, 4, 3, 5, 3, 5, 3, 5, 3, 5, 2, 5, 3, 4, 3, 3, 4, 3, 4, 5, 3, 2, 1, 5, 4, 3, 2, 3, 4, 5, 3, 4, 1, 2, 5, 4, 3, 0, 3, 3, 0, 3, 2, 0, 2, 3, 0, 4, 1, 0, 3, 4, 3, 3, 5, 0, 3, 0, 1, 0, 4, 5, 5, 5, 4, 3, 0, 4, 2, 0, 3, 5), + (0, 5, 0, 4, 0, 4, 0, 2, 0, 5, 4, 3, 4, 3, 4, 3, 3, 3, 4, 3, 4, 2, 5, 3, 5, 3, 4, 1, 4, 3, 4, 4, 4, 0, 3, 5, 0, 4, 4, 4, 4, 5, 3, 1, 3, 4, 5, 3, 3, 3, 3, 3, 3, 3, 0, 2, 2, 0, 3, 3, 2, 4, 3, 3, 3, 5, 3, 4, 1, 3, 3, 5, 3, 2, 0, 0, 0, 0, 4, 3, 1, 3, 3), + (0, 1, 0, 3, 0, 3, 0, 1, 0, 1, 3, 3, 3, 2, 3, 3, 3, 0, 3, 0, 0, 0, 3, 1, 3, 0, 0, 0, 2, 2, 2, 3, 0, 0, 3, 2, 0, 1, 2, 4, 1, 3, 3, 0, 0, 3, 3, 3, 0, 1, 0, 0, 2, 1, 0, 0, 3, 0, 3, 1, 0, 3, 0, 0, 1, 3, 0, 2, 0, 1, 0, 3, 3, 1, 3, 3, 0, 0, 1, 1, 0, 3, 3), + (0, 2, 0, 3, 0, 2, 1, 4, 0, 2, 2, 3, 1, 1, 3, 1, 1, 0, 2, 0, 3, 1, 2, 3, 1, 3, 0, 0, 1, 0, 4, 3, 2, 3, 3, 3, 1, 4, 2, 3, 3, 3, 3, 1, 0, 3, 1, 4, 0, 1, 1, 0, 1, 2, 0, 1, 1, 0, 1, 1, 0, 3, 1, 3, 2, 2, 0, 1, 0, 0, 0, 2, 3, 3, 3, 1, 0, 0, 0, 0, 0, 2, 3), + (0, 5, 0, 4, 0, 5, 0, 2, 0, 4, 5, 5, 3, 3, 4, 3, 3, 1, 5, 4, 4, 2, 4, 4, 4, 3, 4, 2, 4, 3, 5, 5, 4, 3, 3, 4, 3, 3, 5, 5, 4, 5, 5, 1, 3, 4, 5, 3, 1, 4, 3, 1, 3, 3, 0, 3, 3, 1, 4, 3, 1, 4, 5, 3, 3, 5, 0, 4, 0, 3, 0, 5, 3, 3, 1, 4, 3, 0, 4, 0, 1, 5, 3), + (0, 5, 0, 5, 0, 4, 0, 2, 0, 4, 4, 3, 4, 3, 3, 3, 3, 3, 5, 4, 4, 4, 4, 4, 4, 5, 3, 3, 5, 2, 4, 4, 4, 3, 4, 4, 3, 3, 4, 4, 5, 5, 3, 3, 4, 3, 4, 3, 3, 4, 3, 3, 3, 3, 1, 2, 2, 1, 4, 3, 3, 5, 4, 4, 3, 4, 0, 4, 0, 3, 0, 4, 4, 4, 4, 4, 1, 0, 4, 2, 0, 2, 4), + (0, 4, 0, 4, 0, 3, 0, 1, 0, 3, 5, 2, 3, 0, 3, 0, 2, 1, 4, 2, 3, 3, 4, 1, 4, 3, 3, 2, 4, 1, 3, 3, 3, 0, 3, 3, 0, 0, 3, 3, 3, 5, 3, 3, 3, 3, 3, 2, 0, 2, 0, 0, 2, 0, 0, 2, 0, 0, 1, 0, 0, 3, 1, 2, 2, 3, 0, 3, 0, 2, 0, 4, 4, 3, 3, 4, 1, 0, 3, 0, 0, 2, 4), + (0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 2, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 3, 1, 3, 0, 3, 2, 0, 0, 0, 1, 0, 3, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 2, 0, 0, 0, 0, 0, 0, 2), + (0, 2, 1, 3, 0, 2, 0, 2, 0, 3, 3, 3, 3, 1, 3, 1, 3, 3, 3, 3, 3, 3, 4, 2, 2, 1, 2, 1, 4, 0, 4, 3, 1, 3, 3, 3, 2, 4, 3, 5, 4, 3, 3, 3, 3, 3, 3, 3, 0, 1, 3, 0, 2, 0, 0, 1, 0, 0, 1, 0, 0, 4, 2, 0, 2, 3, 0, 3, 3, 0, 3, 3, 4, 2, 3, 1, 4, 0, 1, 2, 0, 2, 3), + (0, 3, 0, 3, 0, 1, 0, 3, 0, 2, 3, 3, 3, 0, 3, 1, 2, 0, 3, 3, 2, 3, 3, 2, 3, 2, 3, 1, 3, 0, 4, 3, 2, 0, 3, 3, 1, 4, 3, 3, 2, 3, 4, 3, 1, 3, 3, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 4, 1, 1, 0, 3, 0, 3, 1, 0, 2, 3, 3, 3, 3, 3, 1, 0, 0, 2, 0, 3, 3), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 2, 0, 3, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 3, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 2, 0, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 3), + (0, 2, 0, 3, 1, 3, 0, 3, 0, 2, 3, 3, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 1, 3, 0, 2, 3, 1, 1, 4, 3, 3, 2, 3, 3, 1, 2, 2, 4, 1, 3, 3, 0, 1, 4, 2, 3, 0, 1, 3, 0, 3, 0, 0, 1, 3, 0, 2, 0, 0, 3, 3, 2, 1, 3, 0, 3, 0, 2, 0, 3, 4, 4, 4, 3, 1, 0, 3, 0, 0, 3, 3), + (0, 2, 0, 1, 0, 2, 0, 0, 0, 1, 3, 2, 2, 1, 3, 0, 1, 1, 3, 0, 3, 2, 3, 1, 2, 0, 2, 0, 1, 1, 3, 3, 3, 0, 3, 3, 1, 1, 2, 3, 2, 3, 3, 1, 2, 3, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 0, 2, 1, 2, 1, 3, 0, 3, 0, 0, 0, 3, 4, 4, 4, 3, 2, 0, 2, 0, 0, 2, 4), + (0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 3), + (0, 3, 0, 3, 0, 2, 0, 3, 0, 3, 3, 3, 2, 3, 2, 2, 2, 0, 3, 1, 3, 3, 3, 2, 3, 3, 0, 0, 3, 0, 3, 2, 2, 0, 2, 3, 1, 4, 3, 4, 3, 3, 2, 3, 1, 5, 4, 4, 0, 3, 1, 2, 1, 3, 0, 3, 1, 1, 2, 0, 2, 3, 1, 3, 1, 3, 0, 3, 0, 1, 0, 3, 3, 4, 4, 2, 1, 0, 2, 1, 0, 2, 4), + (0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 4, 2, 5, 1, 4, 0, 2, 0, 2, 1, 3, 1, 4, 0, 2, 1, 0, 0, 2, 1, 4, 1, 1, 0, 3, 3, 0, 5, 1, 3, 2, 3, 3, 1, 0, 3, 2, 3, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 4, 0, 1, 0, 3, 0, 2, 0, 1, 0, 3, 3, 3, 4, 3, 3, 0, 0, 0, 0, 2, 3), + (0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 1, 0, 0, 0, 0, 0, 3), + (0, 1, 0, 3, 0, 4, 0, 3, 0, 2, 4, 3, 1, 0, 3, 2, 2, 1, 3, 1, 2, 2, 3, 1, 1, 1, 2, 1, 3, 0, 1, 2, 0, 1, 3, 2, 1, 3, 0, 5, 5, 1, 0, 0, 1, 3, 2, 1, 0, 3, 0, 0, 1, 0, 0, 0, 0, 0, 3, 4, 0, 1, 1, 1, 3, 2, 0, 2, 0, 1, 0, 2, 3, 3, 1, 2, 3, 0, 1, 0, 1, 0, 4), + (0, 0, 0, 1, 0, 3, 0, 3, 0, 2, 2, 1, 0, 0, 4, 0, 3, 0, 3, 1, 3, 0, 3, 0, 3, 0, 1, 0, 3, 0, 3, 1, 3, 0, 3, 3, 0, 0, 1, 2, 1, 1, 1, 0, 1, 2, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 1, 2, 0, 0, 2, 0, 0, 0, 0, 2, 3, 3, 3, 3, 0, 0, 0, 0, 1, 4), + (0, 0, 0, 3, 0, 3, 0, 0, 0, 0, 3, 1, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 3, 0, 2, 0, 2, 3, 0, 0, 2, 2, 3, 1, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 2, 0, 0, 0, 0, 2, 3), + (2, 4, 0, 5, 0, 5, 0, 4, 0, 3, 4, 3, 3, 3, 4, 3, 3, 3, 4, 3, 4, 4, 5, 4, 5, 5, 5, 2, 3, 0, 5, 5, 4, 1, 5, 4, 3, 1, 5, 4, 3, 4, 4, 3, 3, 4, 3, 3, 0, 3, 2, 0, 2, 3, 0, 3, 0, 0, 3, 3, 0, 5, 3, 2, 3, 3, 0, 3, 0, 3, 0, 3, 4, 5, 4, 5, 3, 0, 4, 3, 0, 3, 4), + (0, 3, 0, 3, 0, 3, 0, 3, 0, 3, 3, 4, 3, 2, 3, 2, 3, 0, 4, 3, 3, 3, 3, 3, 3, 3, 3, 0, 3, 2, 4, 3, 3, 1, 3, 4, 3, 4, 4, 4, 3, 4, 4, 3, 2, 4, 4, 1, 0, 2, 0, 0, 1, 1, 0, 2, 0, 0, 3, 1, 0, 5, 3, 2, 1, 3, 0, 3, 0, 1, 2, 4, 3, 2, 4, 3, 3, 0, 3, 2, 0, 4, 4), + (0, 3, 0, 3, 0, 1, 0, 0, 0, 1, 4, 3, 3, 2, 3, 1, 3, 1, 4, 2, 3, 2, 4, 2, 3, 4, 3, 0, 2, 2, 3, 3, 3, 0, 3, 3, 3, 0, 3, 4, 1, 3, 3, 0, 3, 4, 3, 3, 0, 1, 1, 0, 1, 0, 0, 0, 4, 0, 3, 0, 0, 3, 1, 2, 1, 3, 0, 4, 0, 1, 0, 4, 3, 3, 4, 3, 3, 0, 2, 0, 0, 3, 3), + (0, 3, 0, 4, 0, 1, 0, 3, 0, 3, 4, 3, 3, 0, 3, 3, 3, 1, 3, 1, 3, 3, 4, 3, 3, 3, 0, 0, 3, 1, 5, 3, 3, 1, 3, 3, 2, 5, 4, 3, 3, 4, 5, 3, 2, 5, 3, 4, 0, 1, 0, 0, 0, 0, 0, 2, 0, 0, 1, 1, 0, 4, 2, 2, 1, 3, 0, 3, 0, 2, 0, 4, 4, 3, 5, 3, 2, 0, 1, 1, 0, 3, 4), + (0, 5, 0, 4, 0, 5, 0, 2, 0, 4, 4, 3, 3, 2, 3, 3, 3, 1, 4, 3, 4, 1, 5, 3, 4, 3, 4, 0, 4, 2, 4, 3, 4, 1, 5, 4, 0, 4, 4, 4, 4, 5, 4, 1, 3, 5, 4, 2, 1, 4, 1, 1, 3, 2, 0, 3, 1, 0, 3, 2, 1, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 4, 4, 3, 3, 3, 0, 4, 2, 0, 3, 4), + (1, 4, 0, 4, 0, 3, 0, 1, 0, 3, 3, 3, 1, 1, 3, 3, 2, 2, 3, 3, 1, 0, 3, 2, 2, 1, 2, 0, 3, 1, 2, 1, 2, 0, 3, 2, 0, 2, 2, 3, 3, 4, 3, 0, 3, 3, 1, 2, 0, 1, 1, 3, 1, 2, 0, 0, 3, 0, 1, 1, 0, 3, 2, 2, 3, 3, 0, 3, 0, 0, 0, 2, 3, 3, 4, 3, 3, 0, 1, 0, 0, 1, 4), + (0, 4, 0, 4, 0, 4, 0, 0, 0, 3, 4, 4, 3, 1, 4, 2, 3, 2, 3, 3, 3, 1, 4, 3, 4, 0, 3, 0, 4, 2, 3, 3, 2, 2, 5, 4, 2, 1, 3, 4, 3, 4, 3, 1, 3, 3, 4, 2, 0, 2, 1, 0, 3, 3, 0, 0, 2, 0, 3, 1, 0, 4, 4, 3, 4, 3, 0, 4, 0, 1, 0, 2, 4, 4, 4, 4, 4, 0, 3, 2, 0, 3, 3), + (0, 0, 0, 1, 0, 4, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 3, 2, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 2), + (0, 2, 0, 3, 0, 4, 0, 4, 0, 1, 3, 3, 3, 0, 4, 0, 2, 1, 2, 1, 1, 1, 2, 0, 3, 1, 1, 0, 1, 0, 3, 1, 0, 0, 3, 3, 2, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 2, 0, 2, 2, 0, 3, 1, 0, 0, 1, 0, 1, 1, 0, 1, 2, 0, 3, 0, 0, 0, 0, 1, 0, 0, 3, 3, 4, 3, 1, 0, 1, 0, 3, 0, 2), + (0, 0, 0, 3, 0, 5, 0, 0, 0, 0, 1, 0, 2, 0, 3, 1, 0, 1, 3, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 4, 0, 0, 0, 2, 3, 0, 1, 4, 1, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 3, 0, 0, 0, 0, 0, 3), + (0, 2, 0, 5, 0, 5, 0, 1, 0, 2, 4, 3, 3, 2, 5, 1, 3, 2, 3, 3, 3, 0, 4, 1, 2, 0, 3, 0, 4, 0, 2, 2, 1, 1, 5, 3, 0, 0, 1, 4, 2, 3, 2, 0, 3, 3, 3, 2, 0, 2, 4, 1, 1, 2, 0, 1, 1, 0, 3, 1, 0, 1, 3, 1, 2, 3, 0, 2, 0, 0, 0, 1, 3, 5, 4, 4, 4, 0, 3, 0, 0, 1, 3), + (0, 4, 0, 5, 0, 4, 0, 4, 0, 4, 5, 4, 3, 3, 4, 3, 3, 3, 4, 3, 4, 4, 5, 3, 4, 5, 4, 2, 4, 2, 3, 4, 3, 1, 4, 4, 1, 3, 5, 4, 4, 5, 5, 4, 4, 5, 5, 5, 2, 3, 3, 1, 4, 3, 1, 3, 3, 0, 3, 3, 1, 4, 3, 4, 4, 4, 0, 3, 0, 4, 0, 3, 3, 4, 4, 5, 0, 0, 4, 3, 0, 4, 5), + (0, 4, 0, 4, 0, 3, 0, 3, 0, 3, 4, 4, 4, 3, 3, 2, 4, 3, 4, 3, 4, 3, 5, 3, 4, 3, 2, 1, 4, 2, 4, 4, 3, 1, 3, 4, 2, 4, 5, 5, 3, 4, 5, 4, 1, 5, 4, 3, 0, 3, 2, 2, 3, 2, 1, 3, 1, 0, 3, 3, 3, 5, 3, 3, 3, 5, 4, 4, 2, 3, 3, 4, 3, 3, 3, 2, 1, 0, 3, 2, 1, 4, 3), + (0, 4, 0, 5, 0, 4, 0, 3, 0, 3, 5, 5, 3, 2, 4, 3, 4, 0, 5, 4, 4, 1, 4, 4, 4, 3, 3, 3, 4, 3, 5, 5, 2, 3, 3, 4, 1, 2, 5, 5, 3, 5, 5, 2, 3, 5, 5, 4, 0, 3, 2, 0, 3, 3, 1, 1, 5, 1, 4, 1, 0, 4, 3, 2, 3, 5, 0, 4, 0, 3, 0, 5, 4, 3, 4, 3, 0, 0, 4, 1, 0, 4, 4), + (1, 3, 0, 4, 0, 2, 0, 2, 0, 2, 5, 5, 3, 3, 3, 3, 3, 0, 4, 2, 3, 4, 4, 4, 3, 4, 0, 0, 3, 4, 5, 4, 3, 3, 3, 3, 2, 5, 5, 4, 5, 5, 5, 4, 3, 5, 5, 5, 1, 3, 1, 0, 1, 0, 0, 3, 2, 0, 4, 2, 0, 5, 2, 3, 2, 4, 1, 3, 0, 3, 0, 4, 5, 4, 5, 4, 3, 0, 4, 2, 0, 5, 4), + (0, 3, 0, 4, 0, 5, 0, 3, 0, 3, 4, 4, 3, 2, 3, 2, 3, 3, 3, 3, 3, 2, 4, 3, 3, 2, 2, 0, 3, 3, 3, 3, 3, 1, 3, 3, 3, 0, 4, 4, 3, 4, 4, 1, 1, 4, 4, 2, 0, 3, 1, 0, 1, 1, 0, 4, 1, 0, 2, 3, 1, 3, 3, 1, 3, 4, 0, 3, 0, 1, 0, 3, 1, 3, 0, 0, 1, 0, 2, 0, 0, 4, 4), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), + (0, 3, 0, 3, 0, 2, 0, 3, 0, 1, 5, 4, 3, 3, 3, 1, 4, 2, 1, 2, 3, 4, 4, 2, 4, 4, 5, 0, 3, 1, 4, 3, 4, 0, 4, 3, 3, 3, 2, 3, 2, 5, 3, 4, 3, 2, 2, 3, 0, 0, 3, 0, 2, 1, 0, 1, 2, 0, 0, 0, 0, 2, 1, 1, 3, 1, 0, 2, 0, 4, 0, 3, 4, 4, 4, 5, 2, 0, 2, 0, 0, 1, 3), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 4, 2, 1, 1, 0, 1, 0, 3, 2, 0, 0, 3, 1, 1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 0, 0, 2, 0, 0, 0, 1, 4, 0, 4, 2, 1, 0, 0, 0, 0, 0, 1), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 2, 0, 2, 1, 0, 0, 1, 2, 1, 0, 1, 1, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 1, 0, 0, 0, 0, 0, 1, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 2), + (0, 4, 0, 4, 0, 4, 0, 3, 0, 4, 4, 3, 4, 2, 4, 3, 2, 0, 4, 4, 4, 3, 5, 3, 5, 3, 3, 2, 4, 2, 4, 3, 4, 3, 1, 4, 0, 2, 3, 4, 4, 4, 3, 3, 3, 4, 4, 4, 3, 4, 1, 3, 4, 3, 2, 1, 2, 1, 3, 3, 3, 4, 4, 3, 3, 5, 0, 4, 0, 3, 0, 4, 3, 3, 3, 2, 1, 0, 3, 0, 0, 3, 3), + (0, 4, 0, 3, 0, 3, 0, 3, 0, 3, 5, 5, 3, 3, 3, 3, 4, 3, 4, 3, 3, 3, 4, 4, 4, 3, 3, 3, 3, 4, 3, 5, 3, 3, 1, 3, 2, 4, 5, 5, 5, 5, 4, 3, 4, 5, 5, 3, 2, 2, 3, 3, 3, 3, 2, 3, 3, 1, 2, 3, 2, 4, 3, 3, 3, 4, 0, 4, 0, 2, 0, 4, 3, 2, 2, 1, 2, 0, 3, 0, 0, 4, 1), +) +# fmt: on + + +class JapaneseContextAnalysis: + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self) -> None: + self._total_rel = 0 + self._rel_sample: List[int] = [] + self._need_to_skip_char_num = 0 + self._last_char_order = -1 + self._done = False + self.reset() + + def reset(self) -> None: + self._total_rel = 0 # total sequence received + # category counters, each integer counts sequence in its category + self._rel_sample = [0] * self.NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._need_to_skip_char_num = 0 + self._last_char_order = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + + def feed(self, byte_str: Union[bytes, bytearray], num_bytes: int) -> None: + if self._done: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i : i + 2]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + else: + if (order != -1) and (self._last_char_order != -1): + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[ + jp2_char_context[self._last_char_order][order] + ] += 1 + self._last_char_order = order + + def got_enough_data(self) -> bool: + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self) -> float: + # This is just one way to calculate confidence. It works well for me. + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + return self.DONT_KNOW + + def get_order(self, _: Union[bytes, bytearray]) -> Tuple[int, int]: + return -1, 1 + + +class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self) -> None: + super().__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self) -> str: + return self._charset_name + + def get_order(self, byte_str: Union[bytes, bytearray]) -> Tuple[int, int]: + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): + char_len = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self._charset_name = "CP932" + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, char_len + + return -1, char_len + + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, byte_str: Union[bytes, bytearray]) -> Tuple[int, int]: + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + char_len = 2 + elif first_char == 0x8F: + char_len = 3 + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, char_len + + return -1, char_len diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langbulgarianmodel.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..9946682 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langbulgarianmodel.py @@ -0,0 +1,4649 @@ +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +BULGARIAN_LANG_MODEL = { + 63: { # 'e' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 1, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 45: { # '\xad' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 1, # 'М' + 36: 0, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 31: { # 'А' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 1, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 1, # 'О' + 30: 2, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 2, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 0, # 'и' + 26: 2, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 1, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 32: { # 'Б' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 2, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 1, # 'Щ' + 61: 2, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 1, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 35: { # 'В' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 2, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 2, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 43: { # 'Г' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 1, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 37: { # 'Д' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 2, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 44: { # 'Е' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Х' + 53: 2, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 0, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 1, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 55: { # 'Ж' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 47: { # 'З' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 40: { # 'И' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 2, # 'М' + 36: 2, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 2, # 'Я' + 1: 1, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 3, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 0, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 59: { # 'Й' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 33: { # 'К' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 46: { # 'Л' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 2, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 38: { # 'М' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 36: { # 'Н' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 2, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 1, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 2, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 41: { # 'О' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 2, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 0, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 2, # 'ч' + 27: 0, # 'ш' + 24: 2, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 30: { # 'П' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 2, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 39: { # 'Р' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 2, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 1, # 'с' + 5: 0, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 28: { # 'С' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 3, # 'А' + 32: 2, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 34: { # 'Т' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 2, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 51: { # 'У' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 2, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 2, # 'с' + 5: 1, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 48: { # 'Ф' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 49: { # 'Х' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 53: { # 'Ц' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 50: { # 'Ч' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 54: { # 'Ш' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 57: { # 'Щ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 61: { # 'Ъ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 60: { # 'Ю' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 1, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 0, # 'е' + 23: 2, # 'ж' + 15: 1, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 56: { # 'Я' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 1, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 1: { # 'а' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 18: { # 'б' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 0, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 2, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 3, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 9: { # 'в' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 0, # 'в' + 20: 2, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 20: { # 'г' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 11: { # 'д' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 1, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 3: { # 'е' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 2, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 23: { # 'ж' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 15: { # 'з' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 2: { # 'и' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 1, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 26: { # 'й' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 12: { # 'к' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 3, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 10: { # 'л' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 1, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 3, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 14: { # 'м' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 1, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 6: { # 'н' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 2, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 3, # 'ф' + 25: 2, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 4: { # 'о' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 13: { # 'п' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 7: { # 'р' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 1, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 2, # 'ч' + 27: 3, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 8: { # 'с' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 5: { # 'т' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 19: { # 'у' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 2, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 2, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 2, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 29: { # 'ф' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 2, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 25: { # 'х' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 22: { # 'ц' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 0, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 21: { # 'ч' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 27: { # 'ш' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 2, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 1, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 24: { # 'щ' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 1, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 17: { # 'ъ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 1, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 3, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 2, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 52: { # 'ь' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 1, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 42: { # 'ю' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 1, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 1, # 'е' + 23: 2, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 1, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 16: { # 'я' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 1, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 3, # 'х' + 22: 2, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 2, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 58: { # 'є' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 62: { # '№' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +ISO_8859_5_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 77, # 'A' + 66: 90, # 'B' + 67: 99, # 'C' + 68: 100, # 'D' + 69: 72, # 'E' + 70: 109, # 'F' + 71: 107, # 'G' + 72: 101, # 'H' + 73: 79, # 'I' + 74: 185, # 'J' + 75: 81, # 'K' + 76: 102, # 'L' + 77: 76, # 'M' + 78: 94, # 'N' + 79: 82, # 'O' + 80: 110, # 'P' + 81: 186, # 'Q' + 82: 108, # 'R' + 83: 91, # 'S' + 84: 74, # 'T' + 85: 119, # 'U' + 86: 84, # 'V' + 87: 96, # 'W' + 88: 111, # 'X' + 89: 187, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 65, # 'a' + 98: 69, # 'b' + 99: 70, # 'c' + 100: 66, # 'd' + 101: 63, # 'e' + 102: 68, # 'f' + 103: 112, # 'g' + 104: 103, # 'h' + 105: 92, # 'i' + 106: 194, # 'j' + 107: 104, # 'k' + 108: 95, # 'l' + 109: 86, # 'm' + 110: 87, # 'n' + 111: 71, # 'o' + 112: 116, # 'p' + 113: 195, # 'q' + 114: 85, # 'r' + 115: 93, # 's' + 116: 97, # 't' + 117: 113, # 'u' + 118: 196, # 'v' + 119: 197, # 'w' + 120: 198, # 'x' + 121: 199, # 'y' + 122: 200, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 194, # '\x80' + 129: 195, # '\x81' + 130: 196, # '\x82' + 131: 197, # '\x83' + 132: 198, # '\x84' + 133: 199, # '\x85' + 134: 200, # '\x86' + 135: 201, # '\x87' + 136: 202, # '\x88' + 137: 203, # '\x89' + 138: 204, # '\x8a' + 139: 205, # '\x8b' + 140: 206, # '\x8c' + 141: 207, # '\x8d' + 142: 208, # '\x8e' + 143: 209, # '\x8f' + 144: 210, # '\x90' + 145: 211, # '\x91' + 146: 212, # '\x92' + 147: 213, # '\x93' + 148: 214, # '\x94' + 149: 215, # '\x95' + 150: 216, # '\x96' + 151: 217, # '\x97' + 152: 218, # '\x98' + 153: 219, # '\x99' + 154: 220, # '\x9a' + 155: 221, # '\x9b' + 156: 222, # '\x9c' + 157: 223, # '\x9d' + 158: 224, # '\x9e' + 159: 225, # '\x9f' + 160: 81, # '\xa0' + 161: 226, # 'Ё' + 162: 227, # 'Ђ' + 163: 228, # 'Ѓ' + 164: 229, # 'Є' + 165: 230, # 'Ѕ' + 166: 105, # 'І' + 167: 231, # 'Ї' + 168: 232, # 'Ј' + 169: 233, # 'Љ' + 170: 234, # 'Њ' + 171: 235, # 'Ћ' + 172: 236, # 'Ќ' + 173: 45, # '\xad' + 174: 237, # 'Ў' + 175: 238, # 'Џ' + 176: 31, # 'А' + 177: 32, # 'Б' + 178: 35, # 'В' + 179: 43, # 'Г' + 180: 37, # 'Д' + 181: 44, # 'Е' + 182: 55, # 'Ж' + 183: 47, # 'З' + 184: 40, # 'И' + 185: 59, # 'Й' + 186: 33, # 'К' + 187: 46, # 'Л' + 188: 38, # 'М' + 189: 36, # 'Н' + 190: 41, # 'О' + 191: 30, # 'П' + 192: 39, # 'Р' + 193: 28, # 'С' + 194: 34, # 'Т' + 195: 51, # 'У' + 196: 48, # 'Ф' + 197: 49, # 'Х' + 198: 53, # 'Ц' + 199: 50, # 'Ч' + 200: 54, # 'Ш' + 201: 57, # 'Щ' + 202: 61, # 'Ъ' + 203: 239, # 'Ы' + 204: 67, # 'Ь' + 205: 240, # 'Э' + 206: 60, # 'Ю' + 207: 56, # 'Я' + 208: 1, # 'а' + 209: 18, # 'б' + 210: 9, # 'в' + 211: 20, # 'г' + 212: 11, # 'д' + 213: 3, # 'е' + 214: 23, # 'ж' + 215: 15, # 'з' + 216: 2, # 'и' + 217: 26, # 'й' + 218: 12, # 'к' + 219: 10, # 'л' + 220: 14, # 'м' + 221: 6, # 'н' + 222: 4, # 'о' + 223: 13, # 'п' + 224: 7, # 'р' + 225: 8, # 'с' + 226: 5, # 'т' + 227: 19, # 'у' + 228: 29, # 'ф' + 229: 25, # 'х' + 230: 22, # 'ц' + 231: 21, # 'ч' + 232: 27, # 'ш' + 233: 24, # 'щ' + 234: 17, # 'ъ' + 235: 75, # 'ы' + 236: 52, # 'ь' + 237: 241, # 'э' + 238: 42, # 'ю' + 239: 16, # 'я' + 240: 62, # '№' + 241: 242, # 'ё' + 242: 243, # 'ђ' + 243: 244, # 'ѓ' + 244: 58, # 'є' + 245: 245, # 'ѕ' + 246: 98, # 'і' + 247: 246, # 'ї' + 248: 247, # 'ј' + 249: 248, # 'љ' + 250: 249, # 'њ' + 251: 250, # 'ћ' + 252: 251, # 'ќ' + 253: 91, # '§' + 254: 252, # 'ў' + 255: 253, # 'џ' +} + +ISO_8859_5_BULGARIAN_MODEL = SingleByteCharSetModel( + charset_name="ISO-8859-5", + language="Bulgarian", + char_to_order_map=ISO_8859_5_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet="АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя", +) + +WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 77, # 'A' + 66: 90, # 'B' + 67: 99, # 'C' + 68: 100, # 'D' + 69: 72, # 'E' + 70: 109, # 'F' + 71: 107, # 'G' + 72: 101, # 'H' + 73: 79, # 'I' + 74: 185, # 'J' + 75: 81, # 'K' + 76: 102, # 'L' + 77: 76, # 'M' + 78: 94, # 'N' + 79: 82, # 'O' + 80: 110, # 'P' + 81: 186, # 'Q' + 82: 108, # 'R' + 83: 91, # 'S' + 84: 74, # 'T' + 85: 119, # 'U' + 86: 84, # 'V' + 87: 96, # 'W' + 88: 111, # 'X' + 89: 187, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 65, # 'a' + 98: 69, # 'b' + 99: 70, # 'c' + 100: 66, # 'd' + 101: 63, # 'e' + 102: 68, # 'f' + 103: 112, # 'g' + 104: 103, # 'h' + 105: 92, # 'i' + 106: 194, # 'j' + 107: 104, # 'k' + 108: 95, # 'l' + 109: 86, # 'm' + 110: 87, # 'n' + 111: 71, # 'o' + 112: 116, # 'p' + 113: 195, # 'q' + 114: 85, # 'r' + 115: 93, # 's' + 116: 97, # 't' + 117: 113, # 'u' + 118: 196, # 'v' + 119: 197, # 'w' + 120: 198, # 'x' + 121: 199, # 'y' + 122: 200, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 206, # 'Ђ' + 129: 207, # 'Ѓ' + 130: 208, # '‚' + 131: 209, # 'ѓ' + 132: 210, # '„' + 133: 211, # '…' + 134: 212, # '†' + 135: 213, # '‡' + 136: 120, # '€' + 137: 214, # '‰' + 138: 215, # 'Љ' + 139: 216, # '‹' + 140: 217, # 'Њ' + 141: 218, # 'Ќ' + 142: 219, # 'Ћ' + 143: 220, # 'Џ' + 144: 221, # 'ђ' + 145: 78, # '‘' + 146: 64, # '’' + 147: 83, # '“' + 148: 121, # '”' + 149: 98, # '•' + 150: 117, # '–' + 151: 105, # '—' + 152: 222, # None + 153: 223, # '™' + 154: 224, # 'љ' + 155: 225, # '›' + 156: 226, # 'њ' + 157: 227, # 'ќ' + 158: 228, # 'ћ' + 159: 229, # 'џ' + 160: 88, # '\xa0' + 161: 230, # 'Ў' + 162: 231, # 'ў' + 163: 232, # 'Ј' + 164: 233, # '¤' + 165: 122, # 'Ґ' + 166: 89, # '¦' + 167: 106, # '§' + 168: 234, # 'Ё' + 169: 235, # '©' + 170: 236, # 'Є' + 171: 237, # '«' + 172: 238, # '¬' + 173: 45, # '\xad' + 174: 239, # '®' + 175: 240, # 'Ї' + 176: 73, # '°' + 177: 80, # '±' + 178: 118, # 'І' + 179: 114, # 'і' + 180: 241, # 'ґ' + 181: 242, # 'µ' + 182: 243, # '¶' + 183: 244, # '·' + 184: 245, # 'ё' + 185: 62, # '№' + 186: 58, # 'є' + 187: 246, # '»' + 188: 247, # 'ј' + 189: 248, # 'Ѕ' + 190: 249, # 'ѕ' + 191: 250, # 'ї' + 192: 31, # 'А' + 193: 32, # 'Б' + 194: 35, # 'В' + 195: 43, # 'Г' + 196: 37, # 'Д' + 197: 44, # 'Е' + 198: 55, # 'Ж' + 199: 47, # 'З' + 200: 40, # 'И' + 201: 59, # 'Й' + 202: 33, # 'К' + 203: 46, # 'Л' + 204: 38, # 'М' + 205: 36, # 'Н' + 206: 41, # 'О' + 207: 30, # 'П' + 208: 39, # 'Р' + 209: 28, # 'С' + 210: 34, # 'Т' + 211: 51, # 'У' + 212: 48, # 'Ф' + 213: 49, # 'Х' + 214: 53, # 'Ц' + 215: 50, # 'Ч' + 216: 54, # 'Ш' + 217: 57, # 'Щ' + 218: 61, # 'Ъ' + 219: 251, # 'Ы' + 220: 67, # 'Ь' + 221: 252, # 'Э' + 222: 60, # 'Ю' + 223: 56, # 'Я' + 224: 1, # 'а' + 225: 18, # 'б' + 226: 9, # 'в' + 227: 20, # 'г' + 228: 11, # 'д' + 229: 3, # 'е' + 230: 23, # 'ж' + 231: 15, # 'з' + 232: 2, # 'и' + 233: 26, # 'й' + 234: 12, # 'к' + 235: 10, # 'л' + 236: 14, # 'м' + 237: 6, # 'н' + 238: 4, # 'о' + 239: 13, # 'п' + 240: 7, # 'р' + 241: 8, # 'с' + 242: 5, # 'т' + 243: 19, # 'у' + 244: 29, # 'ф' + 245: 25, # 'х' + 246: 22, # 'ц' + 247: 21, # 'ч' + 248: 27, # 'ш' + 249: 24, # 'щ' + 250: 17, # 'ъ' + 251: 75, # 'ы' + 252: 52, # 'ь' + 253: 253, # 'э' + 254: 42, # 'ю' + 255: 16, # 'я' +} + +WINDOWS_1251_BULGARIAN_MODEL = SingleByteCharSetModel( + charset_name="windows-1251", + language="Bulgarian", + char_to_order_map=WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet="АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя", +) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langgreekmodel.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langgreekmodel.py new file mode 100644 index 0000000..cfb8639 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langgreekmodel.py @@ -0,0 +1,4397 @@ +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +GREEK_LANG_MODEL = { + 60: { # 'e' + 60: 2, # 'e' + 55: 1, # 'o' + 58: 2, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 55: { # 'o' + 60: 0, # 'e' + 55: 2, # 'o' + 58: 2, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 1, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 58: { # 't' + 60: 2, # 'e' + 55: 1, # 'o' + 58: 1, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 1, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 36: { # '·' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 61: { # 'Ά' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 1, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 1, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 46: { # 'Έ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 2, # 'β' + 20: 2, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 1, # 'σ' + 2: 2, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 54: { # 'Ό' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 31: { # 'Α' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 2, # 'Β' + 43: 2, # 'Γ' + 41: 1, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 2, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 1, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 2, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 2, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 1, # 'θ' + 5: 0, # 'ι' + 11: 2, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 2, # 'ς' + 7: 2, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 51: { # 'Β' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 1, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 43: { # 'Γ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 1, # 'Α' + 51: 0, # 'Β' + 43: 2, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 1, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 41: { # 'Δ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 1, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 34: { # 'Ε' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 2, # 'Γ' + 41: 2, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 1, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 2, # 'Χ' + 57: 2, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 1, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 1, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 2, # 'τ' + 12: 2, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 1, # 'ύ' + 27: 0, # 'ώ' + }, + 40: { # 'Η' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 1, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 2, # 'Θ' + 47: 0, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 52: { # 'Θ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 1, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 47: { # 'Ι' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 1, # 'Β' + 43: 1, # 'Γ' + 41: 2, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 2, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 1, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 1, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 44: { # 'Κ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 1, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 1, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 53: { # 'Λ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 2, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 2, # 'Σ' + 33: 0, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 1, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 38: { # 'Μ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 2, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 2, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 49: { # 'Ν' + 60: 2, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 1, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 1, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 59: { # 'Ξ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 39: { # 'Ο' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 1, # 'Β' + 43: 2, # 'Γ' + 41: 2, # 'Δ' + 34: 2, # 'Ε' + 40: 1, # 'Η' + 52: 2, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 2, # 'Φ' + 50: 2, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 2, # 'υ' + 28: 1, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 35: { # 'Π' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 2, # 'Λ' + 38: 1, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 1, # 'έ' + 22: 1, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 3, # 'ώ' + }, + 48: { # 'Ρ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 1, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 1, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 1, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 37: { # 'Σ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 2, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 2, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 2, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 2, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 33: { # 'Τ' + 60: 0, # 'e' + 55: 1, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 2, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 3, # 'ώ' + }, + 45: { # 'Υ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 2, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 2, # 'Η' + 52: 2, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 1, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 56: { # 'Φ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 1, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 1, # 'ύ' + 27: 1, # 'ώ' + }, + 50: { # 'Χ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 1, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 1, # 'Ν' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 1, # 'Ω' + 17: 2, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 57: { # 'Ω' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 1, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 2, # 'ς' + 7: 2, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 17: { # 'ά' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 3, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 3, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 18: { # 'έ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 3, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 22: { # 'ή' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 15: { # 'ί' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 3, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 1, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 1: { # 'α' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 2, # 'ε' + 32: 3, # 'ζ' + 13: 1, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 29: { # 'β' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 2, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 3, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 20: { # 'γ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 3, # 'ώ' + }, + 21: { # 'δ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 3: { # 'ε' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 2, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 2, # 'ε' + 32: 2, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ό' + 26: 3, # 'ύ' + 27: 2, # 'ώ' + }, + 32: { # 'ζ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 1, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 2, # 'ώ' + }, + 13: { # 'η' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 25: { # 'θ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 1, # 'λ' + 10: 3, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 5: { # 'ι' + 60: 0, # 'e' + 55: 1, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 0, # 'ύ' + 27: 3, # 'ώ' + }, + 11: { # 'κ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 2, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 16: { # 'λ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 1, # 'β' + 20: 2, # 'γ' + 21: 1, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 10: { # 'μ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 3, # 'φ' + 23: 0, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 6: { # 'ν' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 1, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 30: { # 'ξ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ό' + 26: 3, # 'ύ' + 27: 1, # 'ώ' + }, + 4: { # 'ο' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 2, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 1, # 'ό' + 26: 3, # 'ύ' + 27: 2, # 'ώ' + }, + 9: { # 'π' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 3, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 2, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 3, # 'ώ' + }, + 8: { # 'ρ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 1, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 3, # 'ο' + 9: 2, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 14: { # 'ς' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 7: { # 'σ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 2, # 'ώ' + }, + 2: { # 'τ' + 60: 0, # 'e' + 55: 2, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 12: { # 'υ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 2, # 'ε' + 32: 2, # 'ζ' + 13: 2, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 2, # 'ώ' + }, + 28: { # 'φ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 1, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 23: { # 'χ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 42: { # 'ψ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 1, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 1, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 24: { # 'ω' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 1, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 19: { # 'ό' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 1, # 'ε' + 32: 2, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 1, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 26: { # 'ύ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 2, # 'β' + 20: 2, # 'γ' + 21: 1, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 27: { # 'ώ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 1, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 1, # 'η' + 25: 2, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 1, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 1, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1253_GREEK_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 82, # 'A' + 66: 100, # 'B' + 67: 104, # 'C' + 68: 94, # 'D' + 69: 98, # 'E' + 70: 101, # 'F' + 71: 116, # 'G' + 72: 102, # 'H' + 73: 111, # 'I' + 74: 187, # 'J' + 75: 117, # 'K' + 76: 92, # 'L' + 77: 88, # 'M' + 78: 113, # 'N' + 79: 85, # 'O' + 80: 79, # 'P' + 81: 118, # 'Q' + 82: 105, # 'R' + 83: 83, # 'S' + 84: 67, # 'T' + 85: 114, # 'U' + 86: 119, # 'V' + 87: 95, # 'W' + 88: 99, # 'X' + 89: 109, # 'Y' + 90: 188, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 72, # 'a' + 98: 70, # 'b' + 99: 80, # 'c' + 100: 81, # 'd' + 101: 60, # 'e' + 102: 96, # 'f' + 103: 93, # 'g' + 104: 89, # 'h' + 105: 68, # 'i' + 106: 120, # 'j' + 107: 97, # 'k' + 108: 77, # 'l' + 109: 86, # 'm' + 110: 69, # 'n' + 111: 55, # 'o' + 112: 78, # 'p' + 113: 115, # 'q' + 114: 65, # 'r' + 115: 66, # 's' + 116: 58, # 't' + 117: 76, # 'u' + 118: 106, # 'v' + 119: 103, # 'w' + 120: 87, # 'x' + 121: 107, # 'y' + 122: 112, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 255, # '€' + 129: 255, # None + 130: 255, # '‚' + 131: 255, # 'ƒ' + 132: 255, # '„' + 133: 255, # '…' + 134: 255, # '†' + 135: 255, # '‡' + 136: 255, # None + 137: 255, # '‰' + 138: 255, # None + 139: 255, # '‹' + 140: 255, # None + 141: 255, # None + 142: 255, # None + 143: 255, # None + 144: 255, # None + 145: 255, # '‘' + 146: 255, # '’' + 147: 255, # '“' + 148: 255, # '”' + 149: 255, # '•' + 150: 255, # '–' + 151: 255, # '—' + 152: 255, # None + 153: 255, # '™' + 154: 255, # None + 155: 255, # '›' + 156: 255, # None + 157: 255, # None + 158: 255, # None + 159: 255, # None + 160: 253, # '\xa0' + 161: 233, # '΅' + 162: 61, # 'Ά' + 163: 253, # '£' + 164: 253, # '¤' + 165: 253, # '¥' + 166: 253, # '¦' + 167: 253, # '§' + 168: 253, # '¨' + 169: 253, # '©' + 170: 253, # None + 171: 253, # '«' + 172: 253, # '¬' + 173: 74, # '\xad' + 174: 253, # '®' + 175: 253, # '―' + 176: 253, # '°' + 177: 253, # '±' + 178: 253, # '²' + 179: 253, # '³' + 180: 247, # '΄' + 181: 253, # 'µ' + 182: 253, # '¶' + 183: 36, # '·' + 184: 46, # 'Έ' + 185: 71, # 'Ή' + 186: 73, # 'Ί' + 187: 253, # '»' + 188: 54, # 'Ό' + 189: 253, # '½' + 190: 108, # 'Ύ' + 191: 123, # 'Ώ' + 192: 110, # 'ΐ' + 193: 31, # 'Α' + 194: 51, # 'Β' + 195: 43, # 'Γ' + 196: 41, # 'Δ' + 197: 34, # 'Ε' + 198: 91, # 'Ζ' + 199: 40, # 'Η' + 200: 52, # 'Θ' + 201: 47, # 'Ι' + 202: 44, # 'Κ' + 203: 53, # 'Λ' + 204: 38, # 'Μ' + 205: 49, # 'Ν' + 206: 59, # 'Ξ' + 207: 39, # 'Ο' + 208: 35, # 'Π' + 209: 48, # 'Ρ' + 210: 250, # None + 211: 37, # 'Σ' + 212: 33, # 'Τ' + 213: 45, # 'Υ' + 214: 56, # 'Φ' + 215: 50, # 'Χ' + 216: 84, # 'Ψ' + 217: 57, # 'Ω' + 218: 120, # 'Ϊ' + 219: 121, # 'Ϋ' + 220: 17, # 'ά' + 221: 18, # 'έ' + 222: 22, # 'ή' + 223: 15, # 'ί' + 224: 124, # 'ΰ' + 225: 1, # 'α' + 226: 29, # 'β' + 227: 20, # 'γ' + 228: 21, # 'δ' + 229: 3, # 'ε' + 230: 32, # 'ζ' + 231: 13, # 'η' + 232: 25, # 'θ' + 233: 5, # 'ι' + 234: 11, # 'κ' + 235: 16, # 'λ' + 236: 10, # 'μ' + 237: 6, # 'ν' + 238: 30, # 'ξ' + 239: 4, # 'ο' + 240: 9, # 'π' + 241: 8, # 'ρ' + 242: 14, # 'ς' + 243: 7, # 'σ' + 244: 2, # 'τ' + 245: 12, # 'υ' + 246: 28, # 'φ' + 247: 23, # 'χ' + 248: 42, # 'ψ' + 249: 24, # 'ω' + 250: 64, # 'ϊ' + 251: 75, # 'ϋ' + 252: 19, # 'ό' + 253: 26, # 'ύ' + 254: 27, # 'ώ' + 255: 253, # None +} + +WINDOWS_1253_GREEK_MODEL = SingleByteCharSetModel( + charset_name="windows-1253", + language="Greek", + char_to_order_map=WINDOWS_1253_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet="ΆΈΉΊΌΎΏΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπρςστυφχψωόύώ", +) + +ISO_8859_7_GREEK_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 82, # 'A' + 66: 100, # 'B' + 67: 104, # 'C' + 68: 94, # 'D' + 69: 98, # 'E' + 70: 101, # 'F' + 71: 116, # 'G' + 72: 102, # 'H' + 73: 111, # 'I' + 74: 187, # 'J' + 75: 117, # 'K' + 76: 92, # 'L' + 77: 88, # 'M' + 78: 113, # 'N' + 79: 85, # 'O' + 80: 79, # 'P' + 81: 118, # 'Q' + 82: 105, # 'R' + 83: 83, # 'S' + 84: 67, # 'T' + 85: 114, # 'U' + 86: 119, # 'V' + 87: 95, # 'W' + 88: 99, # 'X' + 89: 109, # 'Y' + 90: 188, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 72, # 'a' + 98: 70, # 'b' + 99: 80, # 'c' + 100: 81, # 'd' + 101: 60, # 'e' + 102: 96, # 'f' + 103: 93, # 'g' + 104: 89, # 'h' + 105: 68, # 'i' + 106: 120, # 'j' + 107: 97, # 'k' + 108: 77, # 'l' + 109: 86, # 'm' + 110: 69, # 'n' + 111: 55, # 'o' + 112: 78, # 'p' + 113: 115, # 'q' + 114: 65, # 'r' + 115: 66, # 's' + 116: 58, # 't' + 117: 76, # 'u' + 118: 106, # 'v' + 119: 103, # 'w' + 120: 87, # 'x' + 121: 107, # 'y' + 122: 112, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 255, # '\x80' + 129: 255, # '\x81' + 130: 255, # '\x82' + 131: 255, # '\x83' + 132: 255, # '\x84' + 133: 255, # '\x85' + 134: 255, # '\x86' + 135: 255, # '\x87' + 136: 255, # '\x88' + 137: 255, # '\x89' + 138: 255, # '\x8a' + 139: 255, # '\x8b' + 140: 255, # '\x8c' + 141: 255, # '\x8d' + 142: 255, # '\x8e' + 143: 255, # '\x8f' + 144: 255, # '\x90' + 145: 255, # '\x91' + 146: 255, # '\x92' + 147: 255, # '\x93' + 148: 255, # '\x94' + 149: 255, # '\x95' + 150: 255, # '\x96' + 151: 255, # '\x97' + 152: 255, # '\x98' + 153: 255, # '\x99' + 154: 255, # '\x9a' + 155: 255, # '\x9b' + 156: 255, # '\x9c' + 157: 255, # '\x9d' + 158: 255, # '\x9e' + 159: 255, # '\x9f' + 160: 253, # '\xa0' + 161: 233, # '‘' + 162: 90, # '’' + 163: 253, # '£' + 164: 253, # '€' + 165: 253, # '₯' + 166: 253, # '¦' + 167: 253, # '§' + 168: 253, # '¨' + 169: 253, # '©' + 170: 253, # 'ͺ' + 171: 253, # '«' + 172: 253, # '¬' + 173: 74, # '\xad' + 174: 253, # None + 175: 253, # '―' + 176: 253, # '°' + 177: 253, # '±' + 178: 253, # '²' + 179: 253, # '³' + 180: 247, # '΄' + 181: 248, # '΅' + 182: 61, # 'Ά' + 183: 36, # '·' + 184: 46, # 'Έ' + 185: 71, # 'Ή' + 186: 73, # 'Ί' + 187: 253, # '»' + 188: 54, # 'Ό' + 189: 253, # '½' + 190: 108, # 'Ύ' + 191: 123, # 'Ώ' + 192: 110, # 'ΐ' + 193: 31, # 'Α' + 194: 51, # 'Β' + 195: 43, # 'Γ' + 196: 41, # 'Δ' + 197: 34, # 'Ε' + 198: 91, # 'Ζ' + 199: 40, # 'Η' + 200: 52, # 'Θ' + 201: 47, # 'Ι' + 202: 44, # 'Κ' + 203: 53, # 'Λ' + 204: 38, # 'Μ' + 205: 49, # 'Ν' + 206: 59, # 'Ξ' + 207: 39, # 'Ο' + 208: 35, # 'Π' + 209: 48, # 'Ρ' + 210: 250, # None + 211: 37, # 'Σ' + 212: 33, # 'Τ' + 213: 45, # 'Υ' + 214: 56, # 'Φ' + 215: 50, # 'Χ' + 216: 84, # 'Ψ' + 217: 57, # 'Ω' + 218: 120, # 'Ϊ' + 219: 121, # 'Ϋ' + 220: 17, # 'ά' + 221: 18, # 'έ' + 222: 22, # 'ή' + 223: 15, # 'ί' + 224: 124, # 'ΰ' + 225: 1, # 'α' + 226: 29, # 'β' + 227: 20, # 'γ' + 228: 21, # 'δ' + 229: 3, # 'ε' + 230: 32, # 'ζ' + 231: 13, # 'η' + 232: 25, # 'θ' + 233: 5, # 'ι' + 234: 11, # 'κ' + 235: 16, # 'λ' + 236: 10, # 'μ' + 237: 6, # 'ν' + 238: 30, # 'ξ' + 239: 4, # 'ο' + 240: 9, # 'π' + 241: 8, # 'ρ' + 242: 14, # 'ς' + 243: 7, # 'σ' + 244: 2, # 'τ' + 245: 12, # 'υ' + 246: 28, # 'φ' + 247: 23, # 'χ' + 248: 42, # 'ψ' + 249: 24, # 'ω' + 250: 64, # 'ϊ' + 251: 75, # 'ϋ' + 252: 19, # 'ό' + 253: 26, # 'ύ' + 254: 27, # 'ώ' + 255: 253, # None +} + +ISO_8859_7_GREEK_MODEL = SingleByteCharSetModel( + charset_name="ISO-8859-7", + language="Greek", + char_to_order_map=ISO_8859_7_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet="ΆΈΉΊΌΎΏΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπρςστυφχψωόύώ", +) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langhebrewmodel.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langhebrewmodel.py new file mode 100644 index 0000000..56d2975 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langhebrewmodel.py @@ -0,0 +1,4380 @@ +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +HEBREW_LANG_MODEL = { + 50: { # 'a' + 50: 0, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 2, # 'l' + 54: 2, # 'n' + 49: 0, # 'o' + 51: 2, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 1, # 'ק' + 7: 0, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 60: { # 'c' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 61: { # 'd' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 0, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 42: { # 'e' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 2, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 2, # 'l' + 54: 2, # 'n' + 49: 1, # 'o' + 51: 2, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 1, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 53: { # 'i' + 50: 1, # 'a' + 60: 2, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 0, # 'i' + 56: 1, # 'l' + 54: 2, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 56: { # 'l' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 2, # 'e' + 53: 2, # 'i' + 56: 2, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 54: { # 'n' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 49: { # 'o' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 2, # 'n' + 49: 1, # 'o' + 51: 2, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 51: { # 'r' + 50: 2, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 2, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 43: { # 's' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 2, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 2, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 44: { # 't' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 2, # 'e' + 53: 2, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 63: { # 'u' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 34: { # '\xa0' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 1, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 2, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 55: { # '´' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 2, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 1, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 48: { # '¼' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 39: { # '½' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 57: { # '¾' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 30: { # 'ְ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 2, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 1, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 2, # 'ע' + 26: 0, # 'ף' + 18: 2, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 59: { # 'ֱ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 1, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 0, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 41: { # 'ֲ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 0, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 33: { # 'ִ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 1, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 0, # 'ַ' + 29: 1, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 2, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 37: { # 'ֵ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 1, # 'ַ' + 29: 1, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 1, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 36: { # 'ֶ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 1, # 'ַ' + 29: 1, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 1, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 2, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 31: { # 'ַ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 0, # 'ַ' + 29: 2, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 2, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 29: { # 'ָ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 1, # 'ַ' + 29: 2, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 2, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 35: { # 'ֹ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 62: { # 'ֻ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 1, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 28: { # 'ּ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 3, # 'ְ' + 59: 0, # 'ֱ' + 41: 1, # 'ֲ' + 33: 3, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 3, # 'ַ' + 29: 3, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 2, # 'ׁ' + 45: 1, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 1, # 'ה' + 2: 2, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 1, # 'ם' + 6: 2, # 'מ' + 23: 1, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 38: { # 'ׁ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 2, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 45: { # 'ׂ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 2, # 'ֶ' + 31: 1, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 2, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 9: { # 'א' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 2, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 2, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 8: { # 'ב' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 3, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 1, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 20: { # 'ג' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 2, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 1, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 16: { # 'ד' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 1, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 3: { # 'ה' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 1, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 3, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 0, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 2: { # 'ו' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 3, # 'ֹ' + 62: 0, # 'ֻ' + 28: 3, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 3, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 24: { # 'ז' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 1, # 'ֲ' + 33: 1, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 1, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 14: { # 'ח' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 1, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 1, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 22: { # 'ט' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 1, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 1, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 3, # 'ר' + 10: 2, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 1: { # 'י' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 3, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 25: { # 'ך' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 2, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 15: { # 'כ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 3, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 2, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 4: { # 'ל' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 3, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 11: { # 'ם' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 6: { # 'מ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 0, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 23: { # 'ן' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 12: { # 'נ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 19: { # 'ס' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 1, # 'ָ' + 35: 1, # 'ֹ' + 62: 2, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 1, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 1, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 13: { # 'ע' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 1, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 1, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 2, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 26: { # 'ף' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 18: { # 'פ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 2, # 'ֶ' + 31: 1, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 2, # 'ב' + 20: 3, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 27: { # 'ץ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 21: { # 'צ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 1, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 1, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 0, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 17: { # 'ק' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 2, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 7: { # 'ר' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 2, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 1, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 3, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 10: { # 'ש' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 1, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 1, # 'ַ' + 29: 1, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 3, # 'ׁ' + 45: 2, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 5: { # 'ת' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 1, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 32: { # '–' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 52: { # '’' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 47: { # '“' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 46: { # '”' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 58: { # '†' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 2, # '†' + 40: 0, # '…' + }, + 40: { # '…' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1255_HEBREW_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 69, # 'A' + 66: 91, # 'B' + 67: 79, # 'C' + 68: 80, # 'D' + 69: 92, # 'E' + 70: 89, # 'F' + 71: 97, # 'G' + 72: 90, # 'H' + 73: 68, # 'I' + 74: 111, # 'J' + 75: 112, # 'K' + 76: 82, # 'L' + 77: 73, # 'M' + 78: 95, # 'N' + 79: 85, # 'O' + 80: 78, # 'P' + 81: 121, # 'Q' + 82: 86, # 'R' + 83: 71, # 'S' + 84: 67, # 'T' + 85: 102, # 'U' + 86: 107, # 'V' + 87: 84, # 'W' + 88: 114, # 'X' + 89: 103, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 50, # 'a' + 98: 74, # 'b' + 99: 60, # 'c' + 100: 61, # 'd' + 101: 42, # 'e' + 102: 76, # 'f' + 103: 70, # 'g' + 104: 64, # 'h' + 105: 53, # 'i' + 106: 105, # 'j' + 107: 93, # 'k' + 108: 56, # 'l' + 109: 65, # 'm' + 110: 54, # 'n' + 111: 49, # 'o' + 112: 66, # 'p' + 113: 110, # 'q' + 114: 51, # 'r' + 115: 43, # 's' + 116: 44, # 't' + 117: 63, # 'u' + 118: 81, # 'v' + 119: 77, # 'w' + 120: 98, # 'x' + 121: 75, # 'y' + 122: 108, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 124, # '€' + 129: 202, # None + 130: 203, # '‚' + 131: 204, # 'ƒ' + 132: 205, # '„' + 133: 40, # '…' + 134: 58, # '†' + 135: 206, # '‡' + 136: 207, # 'ˆ' + 137: 208, # '‰' + 138: 209, # None + 139: 210, # '‹' + 140: 211, # None + 141: 212, # None + 142: 213, # None + 143: 214, # None + 144: 215, # None + 145: 83, # '‘' + 146: 52, # '’' + 147: 47, # '“' + 148: 46, # '”' + 149: 72, # '•' + 150: 32, # '–' + 151: 94, # '—' + 152: 216, # '˜' + 153: 113, # '™' + 154: 217, # None + 155: 109, # '›' + 156: 218, # None + 157: 219, # None + 158: 220, # None + 159: 221, # None + 160: 34, # '\xa0' + 161: 116, # '¡' + 162: 222, # '¢' + 163: 118, # '£' + 164: 100, # '₪' + 165: 223, # '¥' + 166: 224, # '¦' + 167: 117, # '§' + 168: 119, # '¨' + 169: 104, # '©' + 170: 125, # '×' + 171: 225, # '«' + 172: 226, # '¬' + 173: 87, # '\xad' + 174: 99, # '®' + 175: 227, # '¯' + 176: 106, # '°' + 177: 122, # '±' + 178: 123, # '²' + 179: 228, # '³' + 180: 55, # '´' + 181: 229, # 'µ' + 182: 230, # '¶' + 183: 101, # '·' + 184: 231, # '¸' + 185: 232, # '¹' + 186: 120, # '÷' + 187: 233, # '»' + 188: 48, # '¼' + 189: 39, # '½' + 190: 57, # '¾' + 191: 234, # '¿' + 192: 30, # 'ְ' + 193: 59, # 'ֱ' + 194: 41, # 'ֲ' + 195: 88, # 'ֳ' + 196: 33, # 'ִ' + 197: 37, # 'ֵ' + 198: 36, # 'ֶ' + 199: 31, # 'ַ' + 200: 29, # 'ָ' + 201: 35, # 'ֹ' + 202: 235, # None + 203: 62, # 'ֻ' + 204: 28, # 'ּ' + 205: 236, # 'ֽ' + 206: 126, # '־' + 207: 237, # 'ֿ' + 208: 238, # '׀' + 209: 38, # 'ׁ' + 210: 45, # 'ׂ' + 211: 239, # '׃' + 212: 240, # 'װ' + 213: 241, # 'ױ' + 214: 242, # 'ײ' + 215: 243, # '׳' + 216: 127, # '״' + 217: 244, # None + 218: 245, # None + 219: 246, # None + 220: 247, # None + 221: 248, # None + 222: 249, # None + 223: 250, # None + 224: 9, # 'א' + 225: 8, # 'ב' + 226: 20, # 'ג' + 227: 16, # 'ד' + 228: 3, # 'ה' + 229: 2, # 'ו' + 230: 24, # 'ז' + 231: 14, # 'ח' + 232: 22, # 'ט' + 233: 1, # 'י' + 234: 25, # 'ך' + 235: 15, # 'כ' + 236: 4, # 'ל' + 237: 11, # 'ם' + 238: 6, # 'מ' + 239: 23, # 'ן' + 240: 12, # 'נ' + 241: 19, # 'ס' + 242: 13, # 'ע' + 243: 26, # 'ף' + 244: 18, # 'פ' + 245: 27, # 'ץ' + 246: 21, # 'צ' + 247: 17, # 'ק' + 248: 7, # 'ר' + 249: 10, # 'ש' + 250: 5, # 'ת' + 251: 251, # None + 252: 252, # None + 253: 128, # '\u200e' + 254: 96, # '\u200f' + 255: 253, # None +} + +WINDOWS_1255_HEBREW_MODEL = SingleByteCharSetModel( + charset_name="windows-1255", + language="Hebrew", + char_to_order_map=WINDOWS_1255_HEBREW_CHAR_TO_ORDER, + language_model=HEBREW_LANG_MODEL, + typical_positive_ratio=0.984004, + keep_ascii_letters=False, + alphabet="אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ", +) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langhungarianmodel.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langhungarianmodel.py new file mode 100644 index 0000000..09a0d32 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langhungarianmodel.py @@ -0,0 +1,4649 @@ +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +HUNGARIAN_LANG_MODEL = { + 28: { # 'A' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 2, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 2, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 2, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 1, # 'Á' + 44: 0, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 40: { # 'B' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 3, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 54: { # 'C' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 3, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 45: { # 'D' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 32: { # 'E' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 2, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 50: { # 'F' + 28: 1, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 0, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 49: { # 'G' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 2, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 38: { # 'H' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 0, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 1, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 2, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 2, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 39: { # 'I' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 2, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 53: { # 'J' + 28: 2, # 'A' + 40: 0, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 0, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 36: { # 'K' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 2, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 41: { # 'L' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 34: { # 'M' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 3, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 1, # 'ű' + }, + 35: { # 'N' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 2, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 2, # 'Y' + 52: 1, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 47: { # 'O' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 2, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 46: { # 'P' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 3, # 'á' + 15: 2, # 'é' + 30: 0, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 43: { # 'R' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 2, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 2, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 33: { # 'S' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 3, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 1, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 37: { # 'T' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 2, # 'Á' + 44: 2, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 57: { # 'U' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 48: { # 'V' + 28: 2, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 2, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 55: { # 'Y' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 2, # 'Z' + 2: 1, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 52: { # 'Z' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 1, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 2: { # 'a' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 18: { # 'b' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 26: { # 'c' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 1, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 2, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 2, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 17: { # 'd' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 2, # 'k' + 6: 1, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 1: { # 'e' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 2, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 27: { # 'f' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 3, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 3, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 12: { # 'g' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 20: { # 'h' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 9: { # 'i' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 3, # 'ó' + 24: 1, # 'ö' + 31: 2, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 1, # 'ű' + }, + 22: { # 'j' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 1, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 1, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 7: { # 'k' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 1, # 'ú' + 29: 3, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 6: { # 'l' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 3, # 'ő' + 56: 1, # 'ű' + }, + 13: { # 'm' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 1, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 3, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 2, # 'ű' + }, + 4: { # 'n' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 1, # 'x' + 16: 3, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 8: { # 'o' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 23: { # 'p' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 10: { # 'r' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 2, # 'ű' + }, + 5: { # 's' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 3: { # 't' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 1, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 3, # 'ő' + 56: 2, # 'ű' + }, + 21: { # 'u' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 2, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 1, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 1, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 19: { # 'v' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 2, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 62: { # 'x' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 16: { # 'y' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 2, # 'ű' + }, + 11: { # 'z' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 51: { # 'Á' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 44: { # 'É' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 61: { # 'Í' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 0, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 58: { # 'Ó' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 2, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 59: { # 'Ö' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 60: { # 'Ú' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 2, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 63: { # 'Ü' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 14: { # 'á' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 15: { # 'é' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 30: { # 'í' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 25: { # 'ó' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 1, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 24: { # 'ö' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 0, # 'a' + 18: 3, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 31: { # 'ú' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 3, # 'j' + 7: 1, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 29: { # 'ü' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 0, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 42: { # 'ő' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 56: { # 'ű' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 28, # 'A' + 66: 40, # 'B' + 67: 54, # 'C' + 68: 45, # 'D' + 69: 32, # 'E' + 70: 50, # 'F' + 71: 49, # 'G' + 72: 38, # 'H' + 73: 39, # 'I' + 74: 53, # 'J' + 75: 36, # 'K' + 76: 41, # 'L' + 77: 34, # 'M' + 78: 35, # 'N' + 79: 47, # 'O' + 80: 46, # 'P' + 81: 72, # 'Q' + 82: 43, # 'R' + 83: 33, # 'S' + 84: 37, # 'T' + 85: 57, # 'U' + 86: 48, # 'V' + 87: 64, # 'W' + 88: 68, # 'X' + 89: 55, # 'Y' + 90: 52, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 2, # 'a' + 98: 18, # 'b' + 99: 26, # 'c' + 100: 17, # 'd' + 101: 1, # 'e' + 102: 27, # 'f' + 103: 12, # 'g' + 104: 20, # 'h' + 105: 9, # 'i' + 106: 22, # 'j' + 107: 7, # 'k' + 108: 6, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 8, # 'o' + 112: 23, # 'p' + 113: 67, # 'q' + 114: 10, # 'r' + 115: 5, # 's' + 116: 3, # 't' + 117: 21, # 'u' + 118: 19, # 'v' + 119: 65, # 'w' + 120: 62, # 'x' + 121: 16, # 'y' + 122: 11, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 161, # '€' + 129: 162, # None + 130: 163, # '‚' + 131: 164, # None + 132: 165, # '„' + 133: 166, # '…' + 134: 167, # '†' + 135: 168, # '‡' + 136: 169, # None + 137: 170, # '‰' + 138: 171, # 'Š' + 139: 172, # '‹' + 140: 173, # 'Ś' + 141: 174, # 'Ť' + 142: 175, # 'Ž' + 143: 176, # 'Ź' + 144: 177, # None + 145: 178, # '‘' + 146: 179, # '’' + 147: 180, # '“' + 148: 78, # '”' + 149: 181, # '•' + 150: 69, # '–' + 151: 182, # '—' + 152: 183, # None + 153: 184, # '™' + 154: 185, # 'š' + 155: 186, # '›' + 156: 187, # 'ś' + 157: 188, # 'ť' + 158: 189, # 'ž' + 159: 190, # 'ź' + 160: 191, # '\xa0' + 161: 192, # 'ˇ' + 162: 193, # '˘' + 163: 194, # 'Ł' + 164: 195, # '¤' + 165: 196, # 'Ą' + 166: 197, # '¦' + 167: 76, # '§' + 168: 198, # '¨' + 169: 199, # '©' + 170: 200, # 'Ş' + 171: 201, # '«' + 172: 202, # '¬' + 173: 203, # '\xad' + 174: 204, # '®' + 175: 205, # 'Ż' + 176: 81, # '°' + 177: 206, # '±' + 178: 207, # '˛' + 179: 208, # 'ł' + 180: 209, # '´' + 181: 210, # 'µ' + 182: 211, # '¶' + 183: 212, # '·' + 184: 213, # '¸' + 185: 214, # 'ą' + 186: 215, # 'ş' + 187: 216, # '»' + 188: 217, # 'Ľ' + 189: 218, # '˝' + 190: 219, # 'ľ' + 191: 220, # 'ż' + 192: 221, # 'Ŕ' + 193: 51, # 'Á' + 194: 83, # 'Â' + 195: 222, # 'Ă' + 196: 80, # 'Ä' + 197: 223, # 'Ĺ' + 198: 224, # 'Ć' + 199: 225, # 'Ç' + 200: 226, # 'Č' + 201: 44, # 'É' + 202: 227, # 'Ę' + 203: 228, # 'Ë' + 204: 229, # 'Ě' + 205: 61, # 'Í' + 206: 230, # 'Î' + 207: 231, # 'Ď' + 208: 232, # 'Đ' + 209: 233, # 'Ń' + 210: 234, # 'Ň' + 211: 58, # 'Ó' + 212: 235, # 'Ô' + 213: 66, # 'Ő' + 214: 59, # 'Ö' + 215: 236, # '×' + 216: 237, # 'Ř' + 217: 238, # 'Ů' + 218: 60, # 'Ú' + 219: 70, # 'Ű' + 220: 63, # 'Ü' + 221: 239, # 'Ý' + 222: 240, # 'Ţ' + 223: 241, # 'ß' + 224: 84, # 'ŕ' + 225: 14, # 'á' + 226: 75, # 'â' + 227: 242, # 'ă' + 228: 71, # 'ä' + 229: 82, # 'ĺ' + 230: 243, # 'ć' + 231: 73, # 'ç' + 232: 244, # 'č' + 233: 15, # 'é' + 234: 85, # 'ę' + 235: 79, # 'ë' + 236: 86, # 'ě' + 237: 30, # 'í' + 238: 77, # 'î' + 239: 87, # 'ď' + 240: 245, # 'đ' + 241: 246, # 'ń' + 242: 247, # 'ň' + 243: 25, # 'ó' + 244: 74, # 'ô' + 245: 42, # 'ő' + 246: 24, # 'ö' + 247: 248, # '÷' + 248: 249, # 'ř' + 249: 250, # 'ů' + 250: 31, # 'ú' + 251: 56, # 'ű' + 252: 29, # 'ü' + 253: 251, # 'ý' + 254: 252, # 'ţ' + 255: 253, # '˙' +} + +WINDOWS_1250_HUNGARIAN_MODEL = SingleByteCharSetModel( + charset_name="windows-1250", + language="Hungarian", + char_to_order_map=WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet="ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÁÉÍÓÖÚÜáéíóöúüŐőŰű", +) + +ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 28, # 'A' + 66: 40, # 'B' + 67: 54, # 'C' + 68: 45, # 'D' + 69: 32, # 'E' + 70: 50, # 'F' + 71: 49, # 'G' + 72: 38, # 'H' + 73: 39, # 'I' + 74: 53, # 'J' + 75: 36, # 'K' + 76: 41, # 'L' + 77: 34, # 'M' + 78: 35, # 'N' + 79: 47, # 'O' + 80: 46, # 'P' + 81: 71, # 'Q' + 82: 43, # 'R' + 83: 33, # 'S' + 84: 37, # 'T' + 85: 57, # 'U' + 86: 48, # 'V' + 87: 64, # 'W' + 88: 68, # 'X' + 89: 55, # 'Y' + 90: 52, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 2, # 'a' + 98: 18, # 'b' + 99: 26, # 'c' + 100: 17, # 'd' + 101: 1, # 'e' + 102: 27, # 'f' + 103: 12, # 'g' + 104: 20, # 'h' + 105: 9, # 'i' + 106: 22, # 'j' + 107: 7, # 'k' + 108: 6, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 8, # 'o' + 112: 23, # 'p' + 113: 67, # 'q' + 114: 10, # 'r' + 115: 5, # 's' + 116: 3, # 't' + 117: 21, # 'u' + 118: 19, # 'v' + 119: 65, # 'w' + 120: 62, # 'x' + 121: 16, # 'y' + 122: 11, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 159, # '\x80' + 129: 160, # '\x81' + 130: 161, # '\x82' + 131: 162, # '\x83' + 132: 163, # '\x84' + 133: 164, # '\x85' + 134: 165, # '\x86' + 135: 166, # '\x87' + 136: 167, # '\x88' + 137: 168, # '\x89' + 138: 169, # '\x8a' + 139: 170, # '\x8b' + 140: 171, # '\x8c' + 141: 172, # '\x8d' + 142: 173, # '\x8e' + 143: 174, # '\x8f' + 144: 175, # '\x90' + 145: 176, # '\x91' + 146: 177, # '\x92' + 147: 178, # '\x93' + 148: 179, # '\x94' + 149: 180, # '\x95' + 150: 181, # '\x96' + 151: 182, # '\x97' + 152: 183, # '\x98' + 153: 184, # '\x99' + 154: 185, # '\x9a' + 155: 186, # '\x9b' + 156: 187, # '\x9c' + 157: 188, # '\x9d' + 158: 189, # '\x9e' + 159: 190, # '\x9f' + 160: 191, # '\xa0' + 161: 192, # 'Ą' + 162: 193, # '˘' + 163: 194, # 'Ł' + 164: 195, # '¤' + 165: 196, # 'Ľ' + 166: 197, # 'Ś' + 167: 75, # '§' + 168: 198, # '¨' + 169: 199, # 'Š' + 170: 200, # 'Ş' + 171: 201, # 'Ť' + 172: 202, # 'Ź' + 173: 203, # '\xad' + 174: 204, # 'Ž' + 175: 205, # 'Ż' + 176: 79, # '°' + 177: 206, # 'ą' + 178: 207, # '˛' + 179: 208, # 'ł' + 180: 209, # '´' + 181: 210, # 'ľ' + 182: 211, # 'ś' + 183: 212, # 'ˇ' + 184: 213, # '¸' + 185: 214, # 'š' + 186: 215, # 'ş' + 187: 216, # 'ť' + 188: 217, # 'ź' + 189: 218, # '˝' + 190: 219, # 'ž' + 191: 220, # 'ż' + 192: 221, # 'Ŕ' + 193: 51, # 'Á' + 194: 81, # 'Â' + 195: 222, # 'Ă' + 196: 78, # 'Ä' + 197: 223, # 'Ĺ' + 198: 224, # 'Ć' + 199: 225, # 'Ç' + 200: 226, # 'Č' + 201: 44, # 'É' + 202: 227, # 'Ę' + 203: 228, # 'Ë' + 204: 229, # 'Ě' + 205: 61, # 'Í' + 206: 230, # 'Î' + 207: 231, # 'Ď' + 208: 232, # 'Đ' + 209: 233, # 'Ń' + 210: 234, # 'Ň' + 211: 58, # 'Ó' + 212: 235, # 'Ô' + 213: 66, # 'Ő' + 214: 59, # 'Ö' + 215: 236, # '×' + 216: 237, # 'Ř' + 217: 238, # 'Ů' + 218: 60, # 'Ú' + 219: 69, # 'Ű' + 220: 63, # 'Ü' + 221: 239, # 'Ý' + 222: 240, # 'Ţ' + 223: 241, # 'ß' + 224: 82, # 'ŕ' + 225: 14, # 'á' + 226: 74, # 'â' + 227: 242, # 'ă' + 228: 70, # 'ä' + 229: 80, # 'ĺ' + 230: 243, # 'ć' + 231: 72, # 'ç' + 232: 244, # 'č' + 233: 15, # 'é' + 234: 83, # 'ę' + 235: 77, # 'ë' + 236: 84, # 'ě' + 237: 30, # 'í' + 238: 76, # 'î' + 239: 85, # 'ď' + 240: 245, # 'đ' + 241: 246, # 'ń' + 242: 247, # 'ň' + 243: 25, # 'ó' + 244: 73, # 'ô' + 245: 42, # 'ő' + 246: 24, # 'ö' + 247: 248, # '÷' + 248: 249, # 'ř' + 249: 250, # 'ů' + 250: 31, # 'ú' + 251: 56, # 'ű' + 252: 29, # 'ü' + 253: 251, # 'ý' + 254: 252, # 'ţ' + 255: 253, # '˙' +} + +ISO_8859_2_HUNGARIAN_MODEL = SingleByteCharSetModel( + charset_name="ISO-8859-2", + language="Hungarian", + char_to_order_map=ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet="ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÁÉÍÓÖÚÜáéíóöúüŐőŰű", +) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langrussianmodel.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langrussianmodel.py new file mode 100644 index 0000000..39a5388 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langrussianmodel.py @@ -0,0 +1,5725 @@ +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +RUSSIAN_LANG_MODEL = { + 37: { # 'А' + 37: 0, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 44: { # 'Б' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 2, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 33: { # 'В' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 0, # 'ю' + 16: 1, # 'я' + }, + 46: { # 'Г' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 41: { # 'Д' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 3, # 'ж' + 20: 1, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 48: { # 'Е' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 2, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 1, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'р' + 7: 3, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 56: { # 'Ж' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 1, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 2, # 'ю' + 16: 0, # 'я' + }, + 51: { # 'З' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 1, # 'я' + }, + 42: { # 'И' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 2, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 60: { # 'Й' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 36: { # 'К' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 2, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 49: { # 'Л' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 0, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 2, # 'ю' + 16: 1, # 'я' + }, + 38: { # 'М' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 1, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 31: { # 'Н' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 2, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 34: { # 'О' + 37: 0, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 2, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 2, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 35: { # 'П' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 1, # 'с' + 6: 1, # 'т' + 14: 2, # 'у' + 39: 1, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 2, # 'я' + }, + 45: { # 'Р' + 37: 2, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 2, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 2, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 2, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 2, # 'я' + }, + 32: { # 'С' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 2, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 40: { # 'Т' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 2, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 52: { # 'У' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 1, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 1, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 0, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 53: { # 'Ф' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 1, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 55: { # 'Х' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 58: { # 'Ц' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 50: { # 'Ч' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 57: { # 'Ш' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 63: { # 'Щ' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 62: { # 'Ы' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 61: { # 'Ь' + 37: 0, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 47: { # 'Э' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 59: { # 'Ю' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 43: { # 'Я' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 1, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 3: { # 'а' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 21: { # 'б' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 2, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 10: { # 'в' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 19: { # 'г' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 13: { # 'д' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 3, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 2: { # 'е' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 24: { # 'ж' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 1, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 0, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 20: { # 'з' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 4: { # 'и' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 23: { # 'й' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 2, # 'я' + }, + 11: { # 'к' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 3, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 8: { # 'л' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 3, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 1, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 1, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 12: { # 'м' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 5: { # 'н' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 2, # 'щ' + 54: 1, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 1: { # 'о' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 15: { # 'п' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 0, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 9: { # 'р' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 2, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 7: { # 'с' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 6: { # 'т' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 2, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 2, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 14: { # 'у' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 2, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 2, # 'я' + }, + 39: { # 'ф' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 2, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 26: { # 'х' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 3, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 28: { # 'ц' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 1, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 1, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 22: { # 'ч' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 3, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 25: { # 'ш' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 29: { # 'щ' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 2, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 54: { # 'ъ' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 2, # 'я' + }, + 18: { # 'ы' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 1, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 2, # 'я' + }, + 17: { # 'ь' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 0, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 1, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 0, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 30: { # 'э' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 27: { # 'ю' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 1, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 0, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 1, # 'я' + }, + 16: { # 'я' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 2, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 2, # 'ю' + 16: 2, # 'я' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +IBM866_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 37, # 'А' + 129: 44, # 'Б' + 130: 33, # 'В' + 131: 46, # 'Г' + 132: 41, # 'Д' + 133: 48, # 'Е' + 134: 56, # 'Ж' + 135: 51, # 'З' + 136: 42, # 'И' + 137: 60, # 'Й' + 138: 36, # 'К' + 139: 49, # 'Л' + 140: 38, # 'М' + 141: 31, # 'Н' + 142: 34, # 'О' + 143: 35, # 'П' + 144: 45, # 'Р' + 145: 32, # 'С' + 146: 40, # 'Т' + 147: 52, # 'У' + 148: 53, # 'Ф' + 149: 55, # 'Х' + 150: 58, # 'Ц' + 151: 50, # 'Ч' + 152: 57, # 'Ш' + 153: 63, # 'Щ' + 154: 70, # 'Ъ' + 155: 62, # 'Ы' + 156: 61, # 'Ь' + 157: 47, # 'Э' + 158: 59, # 'Ю' + 159: 43, # 'Я' + 160: 3, # 'а' + 161: 21, # 'б' + 162: 10, # 'в' + 163: 19, # 'г' + 164: 13, # 'д' + 165: 2, # 'е' + 166: 24, # 'ж' + 167: 20, # 'з' + 168: 4, # 'и' + 169: 23, # 'й' + 170: 11, # 'к' + 171: 8, # 'л' + 172: 12, # 'м' + 173: 5, # 'н' + 174: 1, # 'о' + 175: 15, # 'п' + 176: 191, # '░' + 177: 192, # '▒' + 178: 193, # '▓' + 179: 194, # '│' + 180: 195, # '┤' + 181: 196, # '╡' + 182: 197, # '╢' + 183: 198, # '╖' + 184: 199, # '╕' + 185: 200, # '╣' + 186: 201, # '║' + 187: 202, # '╗' + 188: 203, # '╝' + 189: 204, # '╜' + 190: 205, # '╛' + 191: 206, # '┐' + 192: 207, # '└' + 193: 208, # '┴' + 194: 209, # '┬' + 195: 210, # '├' + 196: 211, # '─' + 197: 212, # '┼' + 198: 213, # '╞' + 199: 214, # '╟' + 200: 215, # '╚' + 201: 216, # '╔' + 202: 217, # '╩' + 203: 218, # '╦' + 204: 219, # '╠' + 205: 220, # '═' + 206: 221, # '╬' + 207: 222, # '╧' + 208: 223, # '╨' + 209: 224, # '╤' + 210: 225, # '╥' + 211: 226, # '╙' + 212: 227, # '╘' + 213: 228, # '╒' + 214: 229, # '╓' + 215: 230, # '╫' + 216: 231, # '╪' + 217: 232, # '┘' + 218: 233, # '┌' + 219: 234, # '█' + 220: 235, # '▄' + 221: 236, # '▌' + 222: 237, # '▐' + 223: 238, # '▀' + 224: 9, # 'р' + 225: 7, # 'с' + 226: 6, # 'т' + 227: 14, # 'у' + 228: 39, # 'ф' + 229: 26, # 'х' + 230: 28, # 'ц' + 231: 22, # 'ч' + 232: 25, # 'ш' + 233: 29, # 'щ' + 234: 54, # 'ъ' + 235: 18, # 'ы' + 236: 17, # 'ь' + 237: 30, # 'э' + 238: 27, # 'ю' + 239: 16, # 'я' + 240: 239, # 'Ё' + 241: 68, # 'ё' + 242: 240, # 'Є' + 243: 241, # 'є' + 244: 242, # 'Ї' + 245: 243, # 'ї' + 246: 244, # 'Ў' + 247: 245, # 'ў' + 248: 246, # '°' + 249: 247, # '∙' + 250: 248, # '·' + 251: 249, # '√' + 252: 250, # '№' + 253: 251, # '¤' + 254: 252, # '■' + 255: 255, # '\xa0' +} + +IBM866_RUSSIAN_MODEL = SingleByteCharSetModel( + charset_name="IBM866", + language="Russian", + char_to_order_map=IBM866_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё", +) + +WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # 'Ђ' + 129: 192, # 'Ѓ' + 130: 193, # '‚' + 131: 194, # 'ѓ' + 132: 195, # '„' + 133: 196, # '…' + 134: 197, # '†' + 135: 198, # '‡' + 136: 199, # '€' + 137: 200, # '‰' + 138: 201, # 'Љ' + 139: 202, # '‹' + 140: 203, # 'Њ' + 141: 204, # 'Ќ' + 142: 205, # 'Ћ' + 143: 206, # 'Џ' + 144: 207, # 'ђ' + 145: 208, # '‘' + 146: 209, # '’' + 147: 210, # '“' + 148: 211, # '”' + 149: 212, # '•' + 150: 213, # '–' + 151: 214, # '—' + 152: 215, # None + 153: 216, # '™' + 154: 217, # 'љ' + 155: 218, # '›' + 156: 219, # 'њ' + 157: 220, # 'ќ' + 158: 221, # 'ћ' + 159: 222, # 'џ' + 160: 223, # '\xa0' + 161: 224, # 'Ў' + 162: 225, # 'ў' + 163: 226, # 'Ј' + 164: 227, # '¤' + 165: 228, # 'Ґ' + 166: 229, # '¦' + 167: 230, # '§' + 168: 231, # 'Ё' + 169: 232, # '©' + 170: 233, # 'Є' + 171: 234, # '«' + 172: 235, # '¬' + 173: 236, # '\xad' + 174: 237, # '®' + 175: 238, # 'Ї' + 176: 239, # '°' + 177: 240, # '±' + 178: 241, # 'І' + 179: 242, # 'і' + 180: 243, # 'ґ' + 181: 244, # 'µ' + 182: 245, # '¶' + 183: 246, # '·' + 184: 68, # 'ё' + 185: 247, # '№' + 186: 248, # 'є' + 187: 249, # '»' + 188: 250, # 'ј' + 189: 251, # 'Ѕ' + 190: 252, # 'ѕ' + 191: 253, # 'ї' + 192: 37, # 'А' + 193: 44, # 'Б' + 194: 33, # 'В' + 195: 46, # 'Г' + 196: 41, # 'Д' + 197: 48, # 'Е' + 198: 56, # 'Ж' + 199: 51, # 'З' + 200: 42, # 'И' + 201: 60, # 'Й' + 202: 36, # 'К' + 203: 49, # 'Л' + 204: 38, # 'М' + 205: 31, # 'Н' + 206: 34, # 'О' + 207: 35, # 'П' + 208: 45, # 'Р' + 209: 32, # 'С' + 210: 40, # 'Т' + 211: 52, # 'У' + 212: 53, # 'Ф' + 213: 55, # 'Х' + 214: 58, # 'Ц' + 215: 50, # 'Ч' + 216: 57, # 'Ш' + 217: 63, # 'Щ' + 218: 70, # 'Ъ' + 219: 62, # 'Ы' + 220: 61, # 'Ь' + 221: 47, # 'Э' + 222: 59, # 'Ю' + 223: 43, # 'Я' + 224: 3, # 'а' + 225: 21, # 'б' + 226: 10, # 'в' + 227: 19, # 'г' + 228: 13, # 'д' + 229: 2, # 'е' + 230: 24, # 'ж' + 231: 20, # 'з' + 232: 4, # 'и' + 233: 23, # 'й' + 234: 11, # 'к' + 235: 8, # 'л' + 236: 12, # 'м' + 237: 5, # 'н' + 238: 1, # 'о' + 239: 15, # 'п' + 240: 9, # 'р' + 241: 7, # 'с' + 242: 6, # 'т' + 243: 14, # 'у' + 244: 39, # 'ф' + 245: 26, # 'х' + 246: 28, # 'ц' + 247: 22, # 'ч' + 248: 25, # 'ш' + 249: 29, # 'щ' + 250: 54, # 'ъ' + 251: 18, # 'ы' + 252: 17, # 'ь' + 253: 30, # 'э' + 254: 27, # 'ю' + 255: 16, # 'я' +} + +WINDOWS_1251_RUSSIAN_MODEL = SingleByteCharSetModel( + charset_name="windows-1251", + language="Russian", + char_to_order_map=WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё", +) + +IBM855_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # 'ђ' + 129: 192, # 'Ђ' + 130: 193, # 'ѓ' + 131: 194, # 'Ѓ' + 132: 68, # 'ё' + 133: 195, # 'Ё' + 134: 196, # 'є' + 135: 197, # 'Є' + 136: 198, # 'ѕ' + 137: 199, # 'Ѕ' + 138: 200, # 'і' + 139: 201, # 'І' + 140: 202, # 'ї' + 141: 203, # 'Ї' + 142: 204, # 'ј' + 143: 205, # 'Ј' + 144: 206, # 'љ' + 145: 207, # 'Љ' + 146: 208, # 'њ' + 147: 209, # 'Њ' + 148: 210, # 'ћ' + 149: 211, # 'Ћ' + 150: 212, # 'ќ' + 151: 213, # 'Ќ' + 152: 214, # 'ў' + 153: 215, # 'Ў' + 154: 216, # 'џ' + 155: 217, # 'Џ' + 156: 27, # 'ю' + 157: 59, # 'Ю' + 158: 54, # 'ъ' + 159: 70, # 'Ъ' + 160: 3, # 'а' + 161: 37, # 'А' + 162: 21, # 'б' + 163: 44, # 'Б' + 164: 28, # 'ц' + 165: 58, # 'Ц' + 166: 13, # 'д' + 167: 41, # 'Д' + 168: 2, # 'е' + 169: 48, # 'Е' + 170: 39, # 'ф' + 171: 53, # 'Ф' + 172: 19, # 'г' + 173: 46, # 'Г' + 174: 218, # '«' + 175: 219, # '»' + 176: 220, # '░' + 177: 221, # '▒' + 178: 222, # '▓' + 179: 223, # '│' + 180: 224, # '┤' + 181: 26, # 'х' + 182: 55, # 'Х' + 183: 4, # 'и' + 184: 42, # 'И' + 185: 225, # '╣' + 186: 226, # '║' + 187: 227, # '╗' + 188: 228, # '╝' + 189: 23, # 'й' + 190: 60, # 'Й' + 191: 229, # '┐' + 192: 230, # '└' + 193: 231, # '┴' + 194: 232, # '┬' + 195: 233, # '├' + 196: 234, # '─' + 197: 235, # '┼' + 198: 11, # 'к' + 199: 36, # 'К' + 200: 236, # '╚' + 201: 237, # '╔' + 202: 238, # '╩' + 203: 239, # '╦' + 204: 240, # '╠' + 205: 241, # '═' + 206: 242, # '╬' + 207: 243, # '¤' + 208: 8, # 'л' + 209: 49, # 'Л' + 210: 12, # 'м' + 211: 38, # 'М' + 212: 5, # 'н' + 213: 31, # 'Н' + 214: 1, # 'о' + 215: 34, # 'О' + 216: 15, # 'п' + 217: 244, # '┘' + 218: 245, # '┌' + 219: 246, # '█' + 220: 247, # '▄' + 221: 35, # 'П' + 222: 16, # 'я' + 223: 248, # '▀' + 224: 43, # 'Я' + 225: 9, # 'р' + 226: 45, # 'Р' + 227: 7, # 'с' + 228: 32, # 'С' + 229: 6, # 'т' + 230: 40, # 'Т' + 231: 14, # 'у' + 232: 52, # 'У' + 233: 24, # 'ж' + 234: 56, # 'Ж' + 235: 10, # 'в' + 236: 33, # 'В' + 237: 17, # 'ь' + 238: 61, # 'Ь' + 239: 249, # '№' + 240: 250, # '\xad' + 241: 18, # 'ы' + 242: 62, # 'Ы' + 243: 20, # 'з' + 244: 51, # 'З' + 245: 25, # 'ш' + 246: 57, # 'Ш' + 247: 30, # 'э' + 248: 47, # 'Э' + 249: 29, # 'щ' + 250: 63, # 'Щ' + 251: 22, # 'ч' + 252: 50, # 'Ч' + 253: 251, # '§' + 254: 252, # '■' + 255: 255, # '\xa0' +} + +IBM855_RUSSIAN_MODEL = SingleByteCharSetModel( + charset_name="IBM855", + language="Russian", + char_to_order_map=IBM855_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё", +) + +KOI8_R_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # '─' + 129: 192, # '│' + 130: 193, # '┌' + 131: 194, # '┐' + 132: 195, # '└' + 133: 196, # '┘' + 134: 197, # '├' + 135: 198, # '┤' + 136: 199, # '┬' + 137: 200, # '┴' + 138: 201, # '┼' + 139: 202, # '▀' + 140: 203, # '▄' + 141: 204, # '█' + 142: 205, # '▌' + 143: 206, # '▐' + 144: 207, # '░' + 145: 208, # '▒' + 146: 209, # '▓' + 147: 210, # '⌠' + 148: 211, # '■' + 149: 212, # '∙' + 150: 213, # '√' + 151: 214, # '≈' + 152: 215, # '≤' + 153: 216, # '≥' + 154: 217, # '\xa0' + 155: 218, # '⌡' + 156: 219, # '°' + 157: 220, # '²' + 158: 221, # '·' + 159: 222, # '÷' + 160: 223, # '═' + 161: 224, # '║' + 162: 225, # '╒' + 163: 68, # 'ё' + 164: 226, # '╓' + 165: 227, # '╔' + 166: 228, # '╕' + 167: 229, # '╖' + 168: 230, # '╗' + 169: 231, # '╘' + 170: 232, # '╙' + 171: 233, # '╚' + 172: 234, # '╛' + 173: 235, # '╜' + 174: 236, # '╝' + 175: 237, # '╞' + 176: 238, # '╟' + 177: 239, # '╠' + 178: 240, # '╡' + 179: 241, # 'Ё' + 180: 242, # '╢' + 181: 243, # '╣' + 182: 244, # '╤' + 183: 245, # '╥' + 184: 246, # '╦' + 185: 247, # '╧' + 186: 248, # '╨' + 187: 249, # '╩' + 188: 250, # '╪' + 189: 251, # '╫' + 190: 252, # '╬' + 191: 253, # '©' + 192: 27, # 'ю' + 193: 3, # 'а' + 194: 21, # 'б' + 195: 28, # 'ц' + 196: 13, # 'д' + 197: 2, # 'е' + 198: 39, # 'ф' + 199: 19, # 'г' + 200: 26, # 'х' + 201: 4, # 'и' + 202: 23, # 'й' + 203: 11, # 'к' + 204: 8, # 'л' + 205: 12, # 'м' + 206: 5, # 'н' + 207: 1, # 'о' + 208: 15, # 'п' + 209: 16, # 'я' + 210: 9, # 'р' + 211: 7, # 'с' + 212: 6, # 'т' + 213: 14, # 'у' + 214: 24, # 'ж' + 215: 10, # 'в' + 216: 17, # 'ь' + 217: 18, # 'ы' + 218: 20, # 'з' + 219: 25, # 'ш' + 220: 30, # 'э' + 221: 29, # 'щ' + 222: 22, # 'ч' + 223: 54, # 'ъ' + 224: 59, # 'Ю' + 225: 37, # 'А' + 226: 44, # 'Б' + 227: 58, # 'Ц' + 228: 41, # 'Д' + 229: 48, # 'Е' + 230: 53, # 'Ф' + 231: 46, # 'Г' + 232: 55, # 'Х' + 233: 42, # 'И' + 234: 60, # 'Й' + 235: 36, # 'К' + 236: 49, # 'Л' + 237: 38, # 'М' + 238: 31, # 'Н' + 239: 34, # 'О' + 240: 35, # 'П' + 241: 43, # 'Я' + 242: 45, # 'Р' + 243: 32, # 'С' + 244: 40, # 'Т' + 245: 52, # 'У' + 246: 56, # 'Ж' + 247: 33, # 'В' + 248: 61, # 'Ь' + 249: 62, # 'Ы' + 250: 51, # 'З' + 251: 57, # 'Ш' + 252: 47, # 'Э' + 253: 63, # 'Щ' + 254: 50, # 'Ч' + 255: 70, # 'Ъ' +} + +KOI8_R_RUSSIAN_MODEL = SingleByteCharSetModel( + charset_name="KOI8-R", + language="Russian", + char_to_order_map=KOI8_R_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё", +) + +MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 37, # 'А' + 129: 44, # 'Б' + 130: 33, # 'В' + 131: 46, # 'Г' + 132: 41, # 'Д' + 133: 48, # 'Е' + 134: 56, # 'Ж' + 135: 51, # 'З' + 136: 42, # 'И' + 137: 60, # 'Й' + 138: 36, # 'К' + 139: 49, # 'Л' + 140: 38, # 'М' + 141: 31, # 'Н' + 142: 34, # 'О' + 143: 35, # 'П' + 144: 45, # 'Р' + 145: 32, # 'С' + 146: 40, # 'Т' + 147: 52, # 'У' + 148: 53, # 'Ф' + 149: 55, # 'Х' + 150: 58, # 'Ц' + 151: 50, # 'Ч' + 152: 57, # 'Ш' + 153: 63, # 'Щ' + 154: 70, # 'Ъ' + 155: 62, # 'Ы' + 156: 61, # 'Ь' + 157: 47, # 'Э' + 158: 59, # 'Ю' + 159: 43, # 'Я' + 160: 191, # '†' + 161: 192, # '°' + 162: 193, # 'Ґ' + 163: 194, # '£' + 164: 195, # '§' + 165: 196, # '•' + 166: 197, # '¶' + 167: 198, # 'І' + 168: 199, # '®' + 169: 200, # '©' + 170: 201, # '™' + 171: 202, # 'Ђ' + 172: 203, # 'ђ' + 173: 204, # '≠' + 174: 205, # 'Ѓ' + 175: 206, # 'ѓ' + 176: 207, # '∞' + 177: 208, # '±' + 178: 209, # '≤' + 179: 210, # '≥' + 180: 211, # 'і' + 181: 212, # 'µ' + 182: 213, # 'ґ' + 183: 214, # 'Ј' + 184: 215, # 'Є' + 185: 216, # 'є' + 186: 217, # 'Ї' + 187: 218, # 'ї' + 188: 219, # 'Љ' + 189: 220, # 'љ' + 190: 221, # 'Њ' + 191: 222, # 'њ' + 192: 223, # 'ј' + 193: 224, # 'Ѕ' + 194: 225, # '¬' + 195: 226, # '√' + 196: 227, # 'ƒ' + 197: 228, # '≈' + 198: 229, # '∆' + 199: 230, # '«' + 200: 231, # '»' + 201: 232, # '…' + 202: 233, # '\xa0' + 203: 234, # 'Ћ' + 204: 235, # 'ћ' + 205: 236, # 'Ќ' + 206: 237, # 'ќ' + 207: 238, # 'ѕ' + 208: 239, # '–' + 209: 240, # '—' + 210: 241, # '“' + 211: 242, # '”' + 212: 243, # '‘' + 213: 244, # '’' + 214: 245, # '÷' + 215: 246, # '„' + 216: 247, # 'Ў' + 217: 248, # 'ў' + 218: 249, # 'Џ' + 219: 250, # 'џ' + 220: 251, # '№' + 221: 252, # 'Ё' + 222: 68, # 'ё' + 223: 16, # 'я' + 224: 3, # 'а' + 225: 21, # 'б' + 226: 10, # 'в' + 227: 19, # 'г' + 228: 13, # 'д' + 229: 2, # 'е' + 230: 24, # 'ж' + 231: 20, # 'з' + 232: 4, # 'и' + 233: 23, # 'й' + 234: 11, # 'к' + 235: 8, # 'л' + 236: 12, # 'м' + 237: 5, # 'н' + 238: 1, # 'о' + 239: 15, # 'п' + 240: 9, # 'р' + 241: 7, # 'с' + 242: 6, # 'т' + 243: 14, # 'у' + 244: 39, # 'ф' + 245: 26, # 'х' + 246: 28, # 'ц' + 247: 22, # 'ч' + 248: 25, # 'ш' + 249: 29, # 'щ' + 250: 54, # 'ъ' + 251: 18, # 'ы' + 252: 17, # 'ь' + 253: 30, # 'э' + 254: 27, # 'ю' + 255: 255, # '€' +} + +MACCYRILLIC_RUSSIAN_MODEL = SingleByteCharSetModel( + charset_name="MacCyrillic", + language="Russian", + char_to_order_map=MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё", +) + +ISO_8859_5_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # '\x80' + 129: 192, # '\x81' + 130: 193, # '\x82' + 131: 194, # '\x83' + 132: 195, # '\x84' + 133: 196, # '\x85' + 134: 197, # '\x86' + 135: 198, # '\x87' + 136: 199, # '\x88' + 137: 200, # '\x89' + 138: 201, # '\x8a' + 139: 202, # '\x8b' + 140: 203, # '\x8c' + 141: 204, # '\x8d' + 142: 205, # '\x8e' + 143: 206, # '\x8f' + 144: 207, # '\x90' + 145: 208, # '\x91' + 146: 209, # '\x92' + 147: 210, # '\x93' + 148: 211, # '\x94' + 149: 212, # '\x95' + 150: 213, # '\x96' + 151: 214, # '\x97' + 152: 215, # '\x98' + 153: 216, # '\x99' + 154: 217, # '\x9a' + 155: 218, # '\x9b' + 156: 219, # '\x9c' + 157: 220, # '\x9d' + 158: 221, # '\x9e' + 159: 222, # '\x9f' + 160: 223, # '\xa0' + 161: 224, # 'Ё' + 162: 225, # 'Ђ' + 163: 226, # 'Ѓ' + 164: 227, # 'Є' + 165: 228, # 'Ѕ' + 166: 229, # 'І' + 167: 230, # 'Ї' + 168: 231, # 'Ј' + 169: 232, # 'Љ' + 170: 233, # 'Њ' + 171: 234, # 'Ћ' + 172: 235, # 'Ќ' + 173: 236, # '\xad' + 174: 237, # 'Ў' + 175: 238, # 'Џ' + 176: 37, # 'А' + 177: 44, # 'Б' + 178: 33, # 'В' + 179: 46, # 'Г' + 180: 41, # 'Д' + 181: 48, # 'Е' + 182: 56, # 'Ж' + 183: 51, # 'З' + 184: 42, # 'И' + 185: 60, # 'Й' + 186: 36, # 'К' + 187: 49, # 'Л' + 188: 38, # 'М' + 189: 31, # 'Н' + 190: 34, # 'О' + 191: 35, # 'П' + 192: 45, # 'Р' + 193: 32, # 'С' + 194: 40, # 'Т' + 195: 52, # 'У' + 196: 53, # 'Ф' + 197: 55, # 'Х' + 198: 58, # 'Ц' + 199: 50, # 'Ч' + 200: 57, # 'Ш' + 201: 63, # 'Щ' + 202: 70, # 'Ъ' + 203: 62, # 'Ы' + 204: 61, # 'Ь' + 205: 47, # 'Э' + 206: 59, # 'Ю' + 207: 43, # 'Я' + 208: 3, # 'а' + 209: 21, # 'б' + 210: 10, # 'в' + 211: 19, # 'г' + 212: 13, # 'д' + 213: 2, # 'е' + 214: 24, # 'ж' + 215: 20, # 'з' + 216: 4, # 'и' + 217: 23, # 'й' + 218: 11, # 'к' + 219: 8, # 'л' + 220: 12, # 'м' + 221: 5, # 'н' + 222: 1, # 'о' + 223: 15, # 'п' + 224: 9, # 'р' + 225: 7, # 'с' + 226: 6, # 'т' + 227: 14, # 'у' + 228: 39, # 'ф' + 229: 26, # 'х' + 230: 28, # 'ц' + 231: 22, # 'ч' + 232: 25, # 'ш' + 233: 29, # 'щ' + 234: 54, # 'ъ' + 235: 18, # 'ы' + 236: 17, # 'ь' + 237: 30, # 'э' + 238: 27, # 'ю' + 239: 16, # 'я' + 240: 239, # '№' + 241: 68, # 'ё' + 242: 240, # 'ђ' + 243: 241, # 'ѓ' + 244: 242, # 'є' + 245: 243, # 'ѕ' + 246: 244, # 'і' + 247: 245, # 'ї' + 248: 246, # 'ј' + 249: 247, # 'љ' + 250: 248, # 'њ' + 251: 249, # 'ћ' + 252: 250, # 'ќ' + 253: 251, # '§' + 254: 252, # 'ў' + 255: 255, # 'џ' +} + +ISO_8859_5_RUSSIAN_MODEL = SingleByteCharSetModel( + charset_name="ISO-8859-5", + language="Russian", + char_to_order_map=ISO_8859_5_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё", +) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langthaimodel.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langthaimodel.py new file mode 100644 index 0000000..489cad9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langthaimodel.py @@ -0,0 +1,4380 @@ +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +THAI_LANG_MODEL = { + 5: { # 'ก' + 5: 2, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 3, # 'ฎ' + 57: 2, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 2, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 2, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 3, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 1, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 30: { # 'ข' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 0, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 2, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 2, # 'ี' + 40: 3, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 24: { # 'ค' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 2, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 3, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 8: { # 'ง' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 1, # 'ฉ' + 34: 2, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 2, # 'ศ' + 46: 1, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 3, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 26: { # 'จ' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 3, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 52: { # 'ฉ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 3, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 1, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 34: { # 'ช' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 1, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 51: { # 'ซ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 3, # 'ึ' + 27: 2, # 'ื' + 32: 1, # 'ุ' + 35: 1, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 1, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 47: { # 'ญ' + 5: 1, # 'ก' + 30: 1, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 3, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 2, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 58: { # 'ฎ' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 1, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 57: { # 'ฏ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 49: { # 'ฐ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 53: { # 'ฑ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 55: { # 'ฒ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 43: { # 'ณ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 3, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 3, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 3, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 20: { # 'ด' + 5: 2, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 2, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 1, # 'ึ' + 27: 2, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 2, # 'ๆ' + 37: 2, # '็' + 6: 1, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 19: { # 'ต' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 2, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 2, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 1, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 2, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 44: { # 'ถ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 3, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 14: { # 'ท' + 5: 1, # 'ก' + 30: 1, # 'ข' + 24: 3, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 3, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 1, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 3, # 'ศ' + 46: 1, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 48: { # 'ธ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 2, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 2, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 3: { # 'น' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 2, # 'ถ' + 14: 3, # 'ท' + 48: 3, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 1, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 3, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 3, # 'โ' + 29: 3, # 'ใ' + 33: 3, # 'ไ' + 50: 2, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 17: { # 'บ' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 1, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 2, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 2, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 25: { # 'ป' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 1, # 'ฎ' + 57: 3, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 1, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 2, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 1, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 39: { # 'ผ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 1, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 0, # 'ุ' + 35: 3, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 1, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 62: { # 'ฝ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 1, # 'ี' + 40: 2, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 1, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 31: { # 'พ' + 5: 1, # 'ก' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 2, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 1, # 'ึ' + 27: 3, # 'ื' + 32: 1, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 0, # '่' + 7: 1, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 54: { # 'ฟ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 45: { # 'ภ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 2, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 9: { # 'ม' + 5: 2, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 3, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 2, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 1, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 2, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 16: { # 'ย' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 2, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 1, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 2, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 2: { # 'ร' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 2, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 3, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 3, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 3, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 2, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 2, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 3, # 'เ' + 28: 3, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 61: { # 'ฤ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 2, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 15: { # 'ล' + 5: 2, # 'ก' + 30: 3, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 3, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 2, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 2, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 12: { # 'ว' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 2, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 42: { # 'ศ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 2, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 3, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 2, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 46: { # 'ษ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 2, # 'ฎ' + 57: 1, # 'ฏ' + 49: 2, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 18: { # 'ส' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 3, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 2, # 'ภ' + 9: 3, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 0, # 'แ' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 1, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 21: { # 'ห' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 4: { # 'อ' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 63: { # 'ฯ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 22: { # 'ะ' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 1, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 10: { # 'ั' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 3, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 2, # 'ฐ' + 53: 0, # 'ฑ' + 55: 3, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 1: { # 'า' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 1, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 2, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'ฝ' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 3, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 3, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 36: { # 'ำ' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 23: { # 'ิ' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 3, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 2, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 3, # 'ศ' + 46: 2, # 'ษ' + 18: 2, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 13: { # 'ี' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 40: { # 'ึ' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 3, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 27: { # 'ื' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 32: { # 'ุ' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 3, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 1, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 1, # 'ศ' + 46: 2, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 35: { # 'ู' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 11: { # 'เ' + 5: 3, # 'ก' + 30: 3, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 3, # 'ฉ' + 34: 3, # 'ช' + 51: 2, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 3, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 3, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 28: { # 'แ' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 3, # 'ต' + 44: 2, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 3, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 41: { # 'โ' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 1, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 29: { # 'ใ' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 33: { # 'ไ' + 5: 1, # 'ก' + 30: 2, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 1, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 2, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 50: { # 'ๆ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 37: { # '็' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 6: { # '่' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 7: { # '้' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 38: { # '์' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 1, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 56: { # '๑' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 2, # '๑' + 59: 1, # '๒' + 60: 1, # '๕' + }, + 59: { # '๒' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 1, # '๑' + 59: 1, # '๒' + 60: 3, # '๕' + }, + 60: { # '๕' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 2, # '๑' + 59: 1, # '๒' + 60: 0, # '๕' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +TIS_620_THAI_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 182, # 'A' + 66: 106, # 'B' + 67: 107, # 'C' + 68: 100, # 'D' + 69: 183, # 'E' + 70: 184, # 'F' + 71: 185, # 'G' + 72: 101, # 'H' + 73: 94, # 'I' + 74: 186, # 'J' + 75: 187, # 'K' + 76: 108, # 'L' + 77: 109, # 'M' + 78: 110, # 'N' + 79: 111, # 'O' + 80: 188, # 'P' + 81: 189, # 'Q' + 82: 190, # 'R' + 83: 89, # 'S' + 84: 95, # 'T' + 85: 112, # 'U' + 86: 113, # 'V' + 87: 191, # 'W' + 88: 192, # 'X' + 89: 193, # 'Y' + 90: 194, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 64, # 'a' + 98: 72, # 'b' + 99: 73, # 'c' + 100: 114, # 'd' + 101: 74, # 'e' + 102: 115, # 'f' + 103: 116, # 'g' + 104: 102, # 'h' + 105: 81, # 'i' + 106: 201, # 'j' + 107: 117, # 'k' + 108: 90, # 'l' + 109: 103, # 'm' + 110: 78, # 'n' + 111: 82, # 'o' + 112: 96, # 'p' + 113: 202, # 'q' + 114: 91, # 'r' + 115: 79, # 's' + 116: 84, # 't' + 117: 104, # 'u' + 118: 105, # 'v' + 119: 97, # 'w' + 120: 98, # 'x' + 121: 92, # 'y' + 122: 203, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 209, # '\x80' + 129: 210, # '\x81' + 130: 211, # '\x82' + 131: 212, # '\x83' + 132: 213, # '\x84' + 133: 88, # '\x85' + 134: 214, # '\x86' + 135: 215, # '\x87' + 136: 216, # '\x88' + 137: 217, # '\x89' + 138: 218, # '\x8a' + 139: 219, # '\x8b' + 140: 220, # '\x8c' + 141: 118, # '\x8d' + 142: 221, # '\x8e' + 143: 222, # '\x8f' + 144: 223, # '\x90' + 145: 224, # '\x91' + 146: 99, # '\x92' + 147: 85, # '\x93' + 148: 83, # '\x94' + 149: 225, # '\x95' + 150: 226, # '\x96' + 151: 227, # '\x97' + 152: 228, # '\x98' + 153: 229, # '\x99' + 154: 230, # '\x9a' + 155: 231, # '\x9b' + 156: 232, # '\x9c' + 157: 233, # '\x9d' + 158: 234, # '\x9e' + 159: 235, # '\x9f' + 160: 236, # None + 161: 5, # 'ก' + 162: 30, # 'ข' + 163: 237, # 'ฃ' + 164: 24, # 'ค' + 165: 238, # 'ฅ' + 166: 75, # 'ฆ' + 167: 8, # 'ง' + 168: 26, # 'จ' + 169: 52, # 'ฉ' + 170: 34, # 'ช' + 171: 51, # 'ซ' + 172: 119, # 'ฌ' + 173: 47, # 'ญ' + 174: 58, # 'ฎ' + 175: 57, # 'ฏ' + 176: 49, # 'ฐ' + 177: 53, # 'ฑ' + 178: 55, # 'ฒ' + 179: 43, # 'ณ' + 180: 20, # 'ด' + 181: 19, # 'ต' + 182: 44, # 'ถ' + 183: 14, # 'ท' + 184: 48, # 'ธ' + 185: 3, # 'น' + 186: 17, # 'บ' + 187: 25, # 'ป' + 188: 39, # 'ผ' + 189: 62, # 'ฝ' + 190: 31, # 'พ' + 191: 54, # 'ฟ' + 192: 45, # 'ภ' + 193: 9, # 'ม' + 194: 16, # 'ย' + 195: 2, # 'ร' + 196: 61, # 'ฤ' + 197: 15, # 'ล' + 198: 239, # 'ฦ' + 199: 12, # 'ว' + 200: 42, # 'ศ' + 201: 46, # 'ษ' + 202: 18, # 'ส' + 203: 21, # 'ห' + 204: 76, # 'ฬ' + 205: 4, # 'อ' + 206: 66, # 'ฮ' + 207: 63, # 'ฯ' + 208: 22, # 'ะ' + 209: 10, # 'ั' + 210: 1, # 'า' + 211: 36, # 'ำ' + 212: 23, # 'ิ' + 213: 13, # 'ี' + 214: 40, # 'ึ' + 215: 27, # 'ื' + 216: 32, # 'ุ' + 217: 35, # 'ู' + 218: 86, # 'ฺ' + 219: 240, # None + 220: 241, # None + 221: 242, # None + 222: 243, # None + 223: 244, # '฿' + 224: 11, # 'เ' + 225: 28, # 'แ' + 226: 41, # 'โ' + 227: 29, # 'ใ' + 228: 33, # 'ไ' + 229: 245, # 'ๅ' + 230: 50, # 'ๆ' + 231: 37, # '็' + 232: 6, # '่' + 233: 7, # '้' + 234: 67, # '๊' + 235: 77, # '๋' + 236: 38, # '์' + 237: 93, # 'ํ' + 238: 246, # '๎' + 239: 247, # '๏' + 240: 68, # '๐' + 241: 56, # '๑' + 242: 59, # '๒' + 243: 65, # '๓' + 244: 69, # '๔' + 245: 60, # '๕' + 246: 70, # '๖' + 247: 80, # '๗' + 248: 71, # '๘' + 249: 87, # '๙' + 250: 248, # '๚' + 251: 249, # '๛' + 252: 250, # None + 253: 251, # None + 254: 252, # None + 255: 253, # None +} + +TIS_620_THAI_MODEL = SingleByteCharSetModel( + charset_name="TIS-620", + language="Thai", + char_to_order_map=TIS_620_THAI_CHAR_TO_ORDER, + language_model=THAI_LANG_MODEL, + typical_positive_ratio=0.926386, + keep_ascii_letters=False, + alphabet="กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛", +) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langturkishmodel.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langturkishmodel.py new file mode 100644 index 0000000..291857c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/langturkishmodel.py @@ -0,0 +1,4380 @@ +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +TURKISH_LANG_MODEL = { + 23: { # 'A' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 37: { # 'B' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 47: { # 'C' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 39: { # 'D' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 0, # 'ş' + }, + 29: { # 'E' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 52: { # 'F' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 1, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 2, # 'ş' + }, + 36: { # 'G' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 2, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 1, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 45: { # 'H' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 2, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 2, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 53: { # 'I' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 60: { # 'J' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 16: { # 'K' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 1, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 49: { # 'L' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 2, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 20: { # 'M' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 0, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 46: { # 'N' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 42: { # 'O' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 2, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 48: { # 'P' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 44: { # 'R' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 35: { # 'S' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 1, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 2, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 31: { # 'T' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 2, # 't' + 14: 2, # 'u' + 32: 1, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 51: { # 'U' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 38: { # 'V' + 23: 1, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 1, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 62: { # 'W' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 43: { # 'Y' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 0, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 1, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 56: { # 'Z' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 1: { # 'a' + 23: 3, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 1, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 21: { # 'b' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 3, # 'g' + 25: 1, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 2, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 28: { # 'c' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 2, # 'T' + 51: 2, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 3, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 1, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 1, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 2, # 'ş' + }, + 12: { # 'd' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 2: { # 'e' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 18: { # 'f' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 1, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 1, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 27: { # 'g' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 25: { # 'h' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 3: { # 'i' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 1, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 3, # 'g' + 25: 1, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 24: { # 'j' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 10: { # 'k' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 2, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 5: { # 'l' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 1, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 2, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 13: { # 'm' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 2, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 4: { # 'n' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 15: { # 'o' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 2, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ğ' + 41: 2, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Ş' + 19: 2, # 'ş' + }, + 26: { # 'p' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 7: { # 'r' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 1, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 8: { # 's' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 9: { # 't' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 14: { # 'u' + 23: 3, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 2, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 2, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 32: { # 'v' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 57: { # 'w' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 1, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 58: { # 'x' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 11: { # 'y' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 22: { # 'z' + 23: 2, # 'A' + 37: 2, # 'B' + 47: 1, # 'C' + 39: 2, # 'D' + 29: 3, # 'E' + 52: 1, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 2, # 'N' + 42: 2, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 3, # 'T' + 51: 2, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 1, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 2, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 1, # 'Ş' + 19: 2, # 'ş' + }, + 63: { # '·' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 54: { # 'Ç' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 3, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 50: { # 'Ö' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 2, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 1, # 'N' + 42: 2, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 1, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 1, # 's' + 9: 2, # 't' + 14: 0, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 55: { # 'Ü' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 59: { # 'â' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 0, # 'ş' + }, + 33: { # 'ç' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 0, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 61: { # 'î' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 1, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 34: { # 'ö' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 1, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 3, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 1, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 17: { # 'ü' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 30: { # 'ğ' + 23: 0, # 'A' + 37: 2, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 2, # 'N' + 42: 2, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 3, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 2, # 'İ' + 6: 2, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 41: { # 'İ' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 2, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 6: { # 'ı' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 40: { # 'Ş' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 2, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 0, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 3, # 'f' + 27: 0, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 1, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 1, # 'ü' + 30: 2, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Ş' + 19: 2, # 'ş' + }, + 19: { # 'ş' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 2, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 1, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +ISO_8859_9_TURKISH_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 255, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 255, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 255, # ' ' + 33: 255, # '!' + 34: 255, # '"' + 35: 255, # '#' + 36: 255, # '$' + 37: 255, # '%' + 38: 255, # '&' + 39: 255, # "'" + 40: 255, # '(' + 41: 255, # ')' + 42: 255, # '*' + 43: 255, # '+' + 44: 255, # ',' + 45: 255, # '-' + 46: 255, # '.' + 47: 255, # '/' + 48: 255, # '0' + 49: 255, # '1' + 50: 255, # '2' + 51: 255, # '3' + 52: 255, # '4' + 53: 255, # '5' + 54: 255, # '6' + 55: 255, # '7' + 56: 255, # '8' + 57: 255, # '9' + 58: 255, # ':' + 59: 255, # ';' + 60: 255, # '<' + 61: 255, # '=' + 62: 255, # '>' + 63: 255, # '?' + 64: 255, # '@' + 65: 23, # 'A' + 66: 37, # 'B' + 67: 47, # 'C' + 68: 39, # 'D' + 69: 29, # 'E' + 70: 52, # 'F' + 71: 36, # 'G' + 72: 45, # 'H' + 73: 53, # 'I' + 74: 60, # 'J' + 75: 16, # 'K' + 76: 49, # 'L' + 77: 20, # 'M' + 78: 46, # 'N' + 79: 42, # 'O' + 80: 48, # 'P' + 81: 69, # 'Q' + 82: 44, # 'R' + 83: 35, # 'S' + 84: 31, # 'T' + 85: 51, # 'U' + 86: 38, # 'V' + 87: 62, # 'W' + 88: 65, # 'X' + 89: 43, # 'Y' + 90: 56, # 'Z' + 91: 255, # '[' + 92: 255, # '\\' + 93: 255, # ']' + 94: 255, # '^' + 95: 255, # '_' + 96: 255, # '`' + 97: 1, # 'a' + 98: 21, # 'b' + 99: 28, # 'c' + 100: 12, # 'd' + 101: 2, # 'e' + 102: 18, # 'f' + 103: 27, # 'g' + 104: 25, # 'h' + 105: 3, # 'i' + 106: 24, # 'j' + 107: 10, # 'k' + 108: 5, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 15, # 'o' + 112: 26, # 'p' + 113: 64, # 'q' + 114: 7, # 'r' + 115: 8, # 's' + 116: 9, # 't' + 117: 14, # 'u' + 118: 32, # 'v' + 119: 57, # 'w' + 120: 58, # 'x' + 121: 11, # 'y' + 122: 22, # 'z' + 123: 255, # '{' + 124: 255, # '|' + 125: 255, # '}' + 126: 255, # '~' + 127: 255, # '\x7f' + 128: 180, # '\x80' + 129: 179, # '\x81' + 130: 178, # '\x82' + 131: 177, # '\x83' + 132: 176, # '\x84' + 133: 175, # '\x85' + 134: 174, # '\x86' + 135: 173, # '\x87' + 136: 172, # '\x88' + 137: 171, # '\x89' + 138: 170, # '\x8a' + 139: 169, # '\x8b' + 140: 168, # '\x8c' + 141: 167, # '\x8d' + 142: 166, # '\x8e' + 143: 165, # '\x8f' + 144: 164, # '\x90' + 145: 163, # '\x91' + 146: 162, # '\x92' + 147: 161, # '\x93' + 148: 160, # '\x94' + 149: 159, # '\x95' + 150: 101, # '\x96' + 151: 158, # '\x97' + 152: 157, # '\x98' + 153: 156, # '\x99' + 154: 155, # '\x9a' + 155: 154, # '\x9b' + 156: 153, # '\x9c' + 157: 152, # '\x9d' + 158: 151, # '\x9e' + 159: 106, # '\x9f' + 160: 150, # '\xa0' + 161: 149, # '¡' + 162: 148, # '¢' + 163: 147, # '£' + 164: 146, # '¤' + 165: 145, # '¥' + 166: 144, # '¦' + 167: 100, # '§' + 168: 143, # '¨' + 169: 142, # '©' + 170: 141, # 'ª' + 171: 140, # '«' + 172: 139, # '¬' + 173: 138, # '\xad' + 174: 137, # '®' + 175: 136, # '¯' + 176: 94, # '°' + 177: 80, # '±' + 178: 93, # '²' + 179: 135, # '³' + 180: 105, # '´' + 181: 134, # 'µ' + 182: 133, # '¶' + 183: 63, # '·' + 184: 132, # '¸' + 185: 131, # '¹' + 186: 130, # 'º' + 187: 129, # '»' + 188: 128, # '¼' + 189: 127, # '½' + 190: 126, # '¾' + 191: 125, # '¿' + 192: 124, # 'À' + 193: 104, # 'Á' + 194: 73, # 'Â' + 195: 99, # 'Ã' + 196: 79, # 'Ä' + 197: 85, # 'Å' + 198: 123, # 'Æ' + 199: 54, # 'Ç' + 200: 122, # 'È' + 201: 98, # 'É' + 202: 92, # 'Ê' + 203: 121, # 'Ë' + 204: 120, # 'Ì' + 205: 91, # 'Í' + 206: 103, # 'Î' + 207: 119, # 'Ï' + 208: 68, # 'Ğ' + 209: 118, # 'Ñ' + 210: 117, # 'Ò' + 211: 97, # 'Ó' + 212: 116, # 'Ô' + 213: 115, # 'Õ' + 214: 50, # 'Ö' + 215: 90, # '×' + 216: 114, # 'Ø' + 217: 113, # 'Ù' + 218: 112, # 'Ú' + 219: 111, # 'Û' + 220: 55, # 'Ü' + 221: 41, # 'İ' + 222: 40, # 'Ş' + 223: 86, # 'ß' + 224: 89, # 'à' + 225: 70, # 'á' + 226: 59, # 'â' + 227: 78, # 'ã' + 228: 71, # 'ä' + 229: 82, # 'å' + 230: 88, # 'æ' + 231: 33, # 'ç' + 232: 77, # 'è' + 233: 66, # 'é' + 234: 84, # 'ê' + 235: 83, # 'ë' + 236: 110, # 'ì' + 237: 75, # 'í' + 238: 61, # 'î' + 239: 96, # 'ï' + 240: 30, # 'ğ' + 241: 67, # 'ñ' + 242: 109, # 'ò' + 243: 74, # 'ó' + 244: 87, # 'ô' + 245: 102, # 'õ' + 246: 34, # 'ö' + 247: 95, # '÷' + 248: 81, # 'ø' + 249: 108, # 'ù' + 250: 76, # 'ú' + 251: 72, # 'û' + 252: 17, # 'ü' + 253: 6, # 'ı' + 254: 19, # 'ş' + 255: 107, # 'ÿ' +} + +ISO_8859_9_TURKISH_MODEL = SingleByteCharSetModel( + charset_name="ISO-8859-9", + language="Turkish", + char_to_order_map=ISO_8859_9_TURKISH_CHAR_TO_ORDER, + language_model=TURKISH_LANG_MODEL, + typical_positive_ratio=0.97029, + keep_ascii_letters=True, + alphabet="ABCDEFGHIJKLMNOPRSTUVYZabcdefghijklmnoprstuvyzÂÇÎÖÛÜâçîöûüĞğİıŞş", +) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/latin1prober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/latin1prober.py new file mode 100644 index 0000000..59a01d9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/latin1prober.py @@ -0,0 +1,147 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import List, Union + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +# fmt: off +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) +# fmt: on + + +class Latin1Prober(CharSetProber): + def __init__(self) -> None: + super().__init__() + self._last_char_class = OTH + self._freq_counter: List[int] = [] + self.reset() + + def reset(self) -> None: + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + super().reset() + + @property + def charset_name(self) -> str: + return "ISO-8859-1" + + @property + def language(self) -> str: + return "" + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + byte_str = self.remove_xml_tags(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self) -> float: + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + confidence = ( + 0.0 + if total < 0.01 + else (self._freq_counter[3] - self._freq_counter[1] * 20.0) / total + ) + confidence = max(confidence, 0.0) + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence *= 0.73 + return confidence diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/macromanprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/macromanprober.py new file mode 100644 index 0000000..1425d10 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/macromanprober.py @@ -0,0 +1,162 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This code was modified from latin1prober.py by Rob Speer . +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Rob Speer - adapt to MacRoman encoding +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import List, Union + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +ODD = 8 # character that is unlikely to appear +CLASS_NUM = 9 # total classes + +# The change from Latin1 is that we explicitly look for extended characters +# that are infrequently-occurring symbols, and consider them to always be +# improbable. This should let MacRoman get out of the way of more likely +# encodings in most situations. + +# fmt: off +MacRoman_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + ACV, ACV, ACO, ACV, ACO, ACV, ACV, ASV, # 80 - 87 + ASV, ASV, ASV, ASV, ASV, ASO, ASV, ASV, # 88 - 8F + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASV, # 90 - 97 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, ASO, # A0 - A7 + OTH, OTH, ODD, ODD, OTH, OTH, ACV, ACV, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, ASV, ASV, # B8 - BF + OTH, OTH, ODD, OTH, ODD, OTH, OTH, OTH, # C0 - C7 + OTH, OTH, OTH, ACV, ACV, ACV, ACV, ASV, # C8 - CF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, ODD, # D0 - D7 + ASV, ACV, ODD, OTH, OTH, OTH, OTH, OTH, # D8 - DF + OTH, OTH, OTH, OTH, OTH, ACV, ACV, ACV, # E0 - E7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # E8 - EF + ODD, ACV, ACV, ACV, ACV, ASV, ODD, ODD, # F0 - F7 + ODD, ODD, ODD, ODD, ODD, ODD, ODD, ODD, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +MacRomanClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO ODD + 0, 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, 1, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, 1, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, 1, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, 1, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, 1, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, 1, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, 1, # ASO + 0, 1, 1, 1, 1, 1, 1, 1, 1, # ODD +) +# fmt: on + + +class MacRomanProber(CharSetProber): + def __init__(self) -> None: + super().__init__() + self._last_char_class = OTH + self._freq_counter: List[int] = [] + self.reset() + + def reset(self) -> None: + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + + # express the prior that MacRoman is a somewhat rare encoding; + # this can be done by starting out in a slightly improbable state + # that must be overcome + self._freq_counter[2] = 10 + + super().reset() + + @property + def charset_name(self) -> str: + return "MacRoman" + + @property + def language(self) -> str: + return "" + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + byte_str = self.remove_xml_tags(byte_str) + for c in byte_str: + char_class = MacRoman_CharToClass[c] + freq = MacRomanClassModel[(self._last_char_class * CLASS_NUM) + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self) -> float: + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + confidence = ( + 0.0 + if total < 0.01 + else (self._freq_counter[3] - self._freq_counter[1] * 20.0) / total + ) + confidence = max(confidence, 0.0) + # lower the confidence of MacRoman so that other more accurate + # detector can take priority. + confidence *= 0.73 + return confidence diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcharsetprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcharsetprober.py new file mode 100644 index 0000000..666307e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcharsetprober.py @@ -0,0 +1,95 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import Optional, Union + +from .chardistribution import CharDistributionAnalysis +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, MachineState, ProbingState + + +class MultiByteCharSetProber(CharSetProber): + """ + MultiByteCharSetProber + """ + + def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None: + super().__init__(lang_filter=lang_filter) + self.distribution_analyzer: Optional[CharDistributionAnalysis] = None + self.coding_sm: Optional[CodingStateMachine] = None + self._last_char = bytearray(b"\0\0") + + def reset(self) -> None: + super().reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = bytearray(b"\0\0") + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + assert self.coding_sm is not None + assert self.distribution_analyzer is not None + + for i, byte in enumerate(byte_str): + coding_state = self.coding_sm.next_state(byte) + if coding_state == MachineState.ERROR: + self.logger.debug( + "%s %s prober hit error at byte %s", + self.charset_name, + self.language, + i, + ) + self._state = ProbingState.NOT_ME + break + if coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + if coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[i - 1 : i + 1], char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if self.distribution_analyzer.got_enough_data() and ( + self.get_confidence() > self.SHORTCUT_THRESHOLD + ): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self) -> float: + assert self.distribution_analyzer is not None + return self.distribution_analyzer.get_confidence() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcsgroupprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..6cb9cc7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcsgroupprober.py @@ -0,0 +1,57 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .big5prober import Big5Prober +from .charsetgroupprober import CharSetGroupProber +from .cp949prober import CP949Prober +from .enums import LanguageFilter +from .eucjpprober import EUCJPProber +from .euckrprober import EUCKRProber +from .euctwprober import EUCTWProber +from .gb2312prober import GB2312Prober +from .johabprober import JOHABProber +from .sjisprober import SJISProber +from .utf8prober import UTF8Prober + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None: + super().__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber(), + JOHABProber(), + ] + self.reset() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcssm.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcssm.py new file mode 100644 index 0000000..7bbe97e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/mbcssm.py @@ -0,0 +1,661 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .codingstatemachinedict import CodingStateMachineDict +from .enums import MachineState + +# BIG5 + +# fmt: off +BIG5_CLS = ( + 1, 1, 1, 1, 1, 1, 1, 1, # 00 - 07 #allow 0x00 as legal value + 1, 1, 1, 1, 1, 1, 0, 0, # 08 - 0f + 1, 1, 1, 1, 1, 1, 1, 1, # 10 - 17 + 1, 1, 1, 0, 1, 1, 1, 1, # 18 - 1f + 1, 1, 1, 1, 1, 1, 1, 1, # 20 - 27 + 1, 1, 1, 1, 1, 1, 1, 1, # 28 - 2f + 1, 1, 1, 1, 1, 1, 1, 1, # 30 - 37 + 1, 1, 1, 1, 1, 1, 1, 1, # 38 - 3f + 2, 2, 2, 2, 2, 2, 2, 2, # 40 - 47 + 2, 2, 2, 2, 2, 2, 2, 2, # 48 - 4f + 2, 2, 2, 2, 2, 2, 2, 2, # 50 - 57 + 2, 2, 2, 2, 2, 2, 2, 2, # 58 - 5f + 2, 2, 2, 2, 2, 2, 2, 2, # 60 - 67 + 2, 2, 2, 2, 2, 2, 2, 2, # 68 - 6f + 2, 2, 2, 2, 2, 2, 2, 2, # 70 - 77 + 2, 2, 2, 2, 2, 2, 2, 1, # 78 - 7f + 4, 4, 4, 4, 4, 4, 4, 4, # 80 - 87 + 4, 4, 4, 4, 4, 4, 4, 4, # 88 - 8f + 4, 4, 4, 4, 4, 4, 4, 4, # 90 - 97 + 4, 4, 4, 4, 4, 4, 4, 4, # 98 - 9f + 4, 3, 3, 3, 3, 3, 3, 3, # a0 - a7 + 3, 3, 3, 3, 3, 3, 3, 3, # a8 - af + 3, 3, 3, 3, 3, 3, 3, 3, # b0 - b7 + 3, 3, 3, 3, 3, 3, 3, 3, # b8 - bf + 3, 3, 3, 3, 3, 3, 3, 3, # c0 - c7 + 3, 3, 3, 3, 3, 3, 3, 3, # c8 - cf + 3, 3, 3, 3, 3, 3, 3, 3, # d0 - d7 + 3, 3, 3, 3, 3, 3, 3, 3, # d8 - df + 3, 3, 3, 3, 3, 3, 3, 3, # e0 - e7 + 3, 3, 3, 3, 3, 3, 3, 3, # e8 - ef + 3, 3, 3, 3, 3, 3, 3, 3, # f0 - f7 + 3, 3, 3, 3, 3, 3, 3, 0 # f8 - ff +) + +BIG5_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 +) +# fmt: on + +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) + +BIG5_SM_MODEL: CodingStateMachineDict = { + "class_table": BIG5_CLS, + "class_factor": 5, + "state_table": BIG5_ST, + "char_len_table": BIG5_CHAR_LEN_TABLE, + "name": "Big5", +} + +# CP949 +# fmt: off +CP949_CLS = ( + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, # 00 - 0f + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, # 10 - 1f + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, # 20 - 2f + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, # 30 - 3f + 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, # 40 - 4f + 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 1, 1, 1, 1, 1, # 50 - 5f + 1, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, # 60 - 6f + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 1, 1, 1, 1, 1, # 70 - 7f + 0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, # 80 - 8f + 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, # 90 - 9f + 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, # a0 - af + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, # b0 - bf + 7, 7, 7, 7, 7, 7, 9, 2, 2, 3, 2, 2, 2, 2, 2, 2, # c0 - cf + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, # d0 - df + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, # e0 - ef + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, # f0 - ff +) + +CP949_ST = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 +) +# fmt: on + +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949_SM_MODEL: CodingStateMachineDict = { + "class_table": CP949_CLS, + "class_factor": 10, + "state_table": CP949_ST, + "char_len_table": CP949_CHAR_LEN_TABLE, + "name": "CP949", +} + +# EUC-JP +# fmt: off +EUCJP_CLS = ( + 4, 4, 4, 4, 4, 4, 4, 4, # 00 - 07 + 4, 4, 4, 4, 4, 4, 5, 5, # 08 - 0f + 4, 4, 4, 4, 4, 4, 4, 4, # 10 - 17 + 4, 4, 4, 5, 4, 4, 4, 4, # 18 - 1f + 4, 4, 4, 4, 4, 4, 4, 4, # 20 - 27 + 4, 4, 4, 4, 4, 4, 4, 4, # 28 - 2f + 4, 4, 4, 4, 4, 4, 4, 4, # 30 - 37 + 4, 4, 4, 4, 4, 4, 4, 4, # 38 - 3f + 4, 4, 4, 4, 4, 4, 4, 4, # 40 - 47 + 4, 4, 4, 4, 4, 4, 4, 4, # 48 - 4f + 4, 4, 4, 4, 4, 4, 4, 4, # 50 - 57 + 4, 4, 4, 4, 4, 4, 4, 4, # 58 - 5f + 4, 4, 4, 4, 4, 4, 4, 4, # 60 - 67 + 4, 4, 4, 4, 4, 4, 4, 4, # 68 - 6f + 4, 4, 4, 4, 4, 4, 4, 4, # 70 - 77 + 4, 4, 4, 4, 4, 4, 4, 4, # 78 - 7f + 5, 5, 5, 5, 5, 5, 5, 5, # 80 - 87 + 5, 5, 5, 5, 5, 5, 1, 3, # 88 - 8f + 5, 5, 5, 5, 5, 5, 5, 5, # 90 - 97 + 5, 5, 5, 5, 5, 5, 5, 5, # 98 - 9f + 5, 2, 2, 2, 2, 2, 2, 2, # a0 - a7 + 2, 2, 2, 2, 2, 2, 2, 2, # a8 - af + 2, 2, 2, 2, 2, 2, 2, 2, # b0 - b7 + 2, 2, 2, 2, 2, 2, 2, 2, # b8 - bf + 2, 2, 2, 2, 2, 2, 2, 2, # c0 - c7 + 2, 2, 2, 2, 2, 2, 2, 2, # c8 - cf + 2, 2, 2, 2, 2, 2, 2, 2, # d0 - d7 + 2, 2, 2, 2, 2, 2, 2, 2, # d8 - df + 0, 0, 0, 0, 0, 0, 0, 0, # e0 - e7 + 0, 0, 0, 0, 0, 0, 0, 0, # e8 - ef + 0, 0, 0, 0, 0, 0, 0, 0, # f0 - f7 + 0, 0, 0, 0, 0, 0, 0, 5 # f8 - ff +) + +EUCJP_ST = ( + 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f + 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 +) +# fmt: on + +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) + +EUCJP_SM_MODEL: CodingStateMachineDict = { + "class_table": EUCJP_CLS, + "class_factor": 6, + "state_table": EUCJP_ST, + "char_len_table": EUCJP_CHAR_LEN_TABLE, + "name": "EUC-JP", +} + +# EUC-KR +# fmt: off +EUCKR_CLS = ( + 1, 1, 1, 1, 1, 1, 1, 1, # 00 - 07 + 1, 1, 1, 1, 1, 1, 0, 0, # 08 - 0f + 1, 1, 1, 1, 1, 1, 1, 1, # 10 - 17 + 1, 1, 1, 0, 1, 1, 1, 1, # 18 - 1f + 1, 1, 1, 1, 1, 1, 1, 1, # 20 - 27 + 1, 1, 1, 1, 1, 1, 1, 1, # 28 - 2f + 1, 1, 1, 1, 1, 1, 1, 1, # 30 - 37 + 1, 1, 1, 1, 1, 1, 1, 1, # 38 - 3f + 1, 1, 1, 1, 1, 1, 1, 1, # 40 - 47 + 1, 1, 1, 1, 1, 1, 1, 1, # 48 - 4f + 1, 1, 1, 1, 1, 1, 1, 1, # 50 - 57 + 1, 1, 1, 1, 1, 1, 1, 1, # 58 - 5f + 1, 1, 1, 1, 1, 1, 1, 1, # 60 - 67 + 1, 1, 1, 1, 1, 1, 1, 1, # 68 - 6f + 1, 1, 1, 1, 1, 1, 1, 1, # 70 - 77 + 1, 1, 1, 1, 1, 1, 1, 1, # 78 - 7f + 0, 0, 0, 0, 0, 0, 0, 0, # 80 - 87 + 0, 0, 0, 0, 0, 0, 0, 0, # 88 - 8f + 0, 0, 0, 0, 0, 0, 0, 0, # 90 - 97 + 0, 0, 0, 0, 0, 0, 0, 0, # 98 - 9f + 0, 2, 2, 2, 2, 2, 2, 2, # a0 - a7 + 2, 2, 2, 2, 2, 3, 3, 3, # a8 - af + 2, 2, 2, 2, 2, 2, 2, 2, # b0 - b7 + 2, 2, 2, 2, 2, 2, 2, 2, # b8 - bf + 2, 2, 2, 2, 2, 2, 2, 2, # c0 - c7 + 2, 3, 2, 2, 2, 2, 2, 2, # c8 - cf + 2, 2, 2, 2, 2, 2, 2, 2, # d0 - d7 + 2, 2, 2, 2, 2, 2, 2, 2, # d8 - df + 2, 2, 2, 2, 2, 2, 2, 2, # e0 - e7 + 2, 2, 2, 2, 2, 2, 2, 2, # e8 - ef + 2, 2, 2, 2, 2, 2, 2, 2, # f0 - f7 + 2, 2, 2, 2, 2, 2, 2, 0 # f8 - ff +) + +EUCKR_ST = ( + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f +) +# fmt: on + +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) + +EUCKR_SM_MODEL: CodingStateMachineDict = { + "class_table": EUCKR_CLS, + "class_factor": 4, + "state_table": EUCKR_ST, + "char_len_table": EUCKR_CHAR_LEN_TABLE, + "name": "EUC-KR", +} + +# JOHAB +# fmt: off +JOHAB_CLS = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,0,0, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,0,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,3,3,3,3,3,3,3, # 30 - 37 + 3,3,3,3,3,3,3,3, # 38 - 3f + 3,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,2, # 78 - 7f + 6,6,6,6,8,8,8,8, # 80 - 87 + 8,8,8,8,8,8,8,8, # 88 - 8f + 8,7,7,7,7,7,7,7, # 90 - 97 + 7,7,7,7,7,7,7,7, # 98 - 9f + 7,7,7,7,7,7,7,7, # a0 - a7 + 7,7,7,7,7,7,7,7, # a8 - af + 7,7,7,7,7,7,7,7, # b0 - b7 + 7,7,7,7,7,7,7,7, # b8 - bf + 7,7,7,7,7,7,7,7, # c0 - c7 + 7,7,7,7,7,7,7,7, # c8 - cf + 7,7,7,7,5,5,5,5, # d0 - d7 + 5,9,9,9,9,9,9,5, # d8 - df + 9,9,9,9,9,9,9,9, # e0 - e7 + 9,9,9,9,9,9,9,9, # e8 - ef + 9,9,9,9,9,9,9,9, # f0 - f7 + 9,9,5,5,5,5,5,0 # f8 - ff +) + +JOHAB_ST = ( +# cls = 0 1 2 3 4 5 6 7 8 9 + MachineState.ERROR ,MachineState.START ,MachineState.START ,MachineState.START ,MachineState.START ,MachineState.ERROR ,MachineState.ERROR ,3 ,3 ,4 , # MachineState.START + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR ,MachineState.ERROR , # MachineState.ERROR + MachineState.ERROR ,MachineState.START ,MachineState.START ,MachineState.ERROR ,MachineState.ERROR ,MachineState.START ,MachineState.START ,MachineState.START ,MachineState.START ,MachineState.START , # 3 + MachineState.ERROR ,MachineState.START ,MachineState.ERROR ,MachineState.START ,MachineState.ERROR ,MachineState.START ,MachineState.ERROR ,MachineState.START ,MachineState.ERROR ,MachineState.START , # 4 +) +# fmt: on + +JOHAB_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 0, 0, 2, 2, 2) + +JOHAB_SM_MODEL: CodingStateMachineDict = { + "class_table": JOHAB_CLS, + "class_factor": 10, + "state_table": JOHAB_ST, + "char_len_table": JOHAB_CHAR_LEN_TABLE, + "name": "Johab", +} + +# EUC-TW +# fmt: off +EUCTW_CLS = ( + 2, 2, 2, 2, 2, 2, 2, 2, # 00 - 07 + 2, 2, 2, 2, 2, 2, 0, 0, # 08 - 0f + 2, 2, 2, 2, 2, 2, 2, 2, # 10 - 17 + 2, 2, 2, 0, 2, 2, 2, 2, # 18 - 1f + 2, 2, 2, 2, 2, 2, 2, 2, # 20 - 27 + 2, 2, 2, 2, 2, 2, 2, 2, # 28 - 2f + 2, 2, 2, 2, 2, 2, 2, 2, # 30 - 37 + 2, 2, 2, 2, 2, 2, 2, 2, # 38 - 3f + 2, 2, 2, 2, 2, 2, 2, 2, # 40 - 47 + 2, 2, 2, 2, 2, 2, 2, 2, # 48 - 4f + 2, 2, 2, 2, 2, 2, 2, 2, # 50 - 57 + 2, 2, 2, 2, 2, 2, 2, 2, # 58 - 5f + 2, 2, 2, 2, 2, 2, 2, 2, # 60 - 67 + 2, 2, 2, 2, 2, 2, 2, 2, # 68 - 6f + 2, 2, 2, 2, 2, 2, 2, 2, # 70 - 77 + 2, 2, 2, 2, 2, 2, 2, 2, # 78 - 7f + 0, 0, 0, 0, 0, 0, 0, 0, # 80 - 87 + 0, 0, 0, 0, 0, 0, 6, 0, # 88 - 8f + 0, 0, 0, 0, 0, 0, 0, 0, # 90 - 97 + 0, 0, 0, 0, 0, 0, 0, 0, # 98 - 9f + 0, 3, 4, 4, 4, 4, 4, 4, # a0 - a7 + 5, 5, 1, 1, 1, 1, 1, 1, # a8 - af + 1, 1, 1, 1, 1, 1, 1, 1, # b0 - b7 + 1, 1, 1, 1, 1, 1, 1, 1, # b8 - bf + 1, 1, 3, 1, 3, 3, 3, 3, # c0 - c7 + 3, 3, 3, 3, 3, 3, 3, 3, # c8 - cf + 3, 3, 3, 3, 3, 3, 3, 3, # d0 - d7 + 3, 3, 3, 3, 3, 3, 3, 3, # d8 - df + 3, 3, 3, 3, 3, 3, 3, 3, # e0 - e7 + 3, 3, 3, 3, 3, 3, 3, 3, # e8 - ef + 3, 3, 3, 3, 3, 3, 3, 3, # f0 - f7 + 3, 3, 3, 3, 3, 3, 3, 0 # f8 - ff +) + +EUCTW_ST = ( + MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 + MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 + MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) +# fmt: on + +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) + +EUCTW_SM_MODEL: CodingStateMachineDict = { + "class_table": EUCTW_CLS, + "class_factor": 7, + "state_table": EUCTW_ST, + "char_len_table": EUCTW_CHAR_LEN_TABLE, + "name": "x-euc-tw", +} + +# GB2312 +# fmt: off +GB2312_CLS = ( + 1, 1, 1, 1, 1, 1, 1, 1, # 00 - 07 + 1, 1, 1, 1, 1, 1, 0, 0, # 08 - 0f + 1, 1, 1, 1, 1, 1, 1, 1, # 10 - 17 + 1, 1, 1, 0, 1, 1, 1, 1, # 18 - 1f + 1, 1, 1, 1, 1, 1, 1, 1, # 20 - 27 + 1, 1, 1, 1, 1, 1, 1, 1, # 28 - 2f + 3, 3, 3, 3, 3, 3, 3, 3, # 30 - 37 + 3, 3, 1, 1, 1, 1, 1, 1, # 38 - 3f + 2, 2, 2, 2, 2, 2, 2, 2, # 40 - 47 + 2, 2, 2, 2, 2, 2, 2, 2, # 48 - 4f + 2, 2, 2, 2, 2, 2, 2, 2, # 50 - 57 + 2, 2, 2, 2, 2, 2, 2, 2, # 58 - 5f + 2, 2, 2, 2, 2, 2, 2, 2, # 60 - 67 + 2, 2, 2, 2, 2, 2, 2, 2, # 68 - 6f + 2, 2, 2, 2, 2, 2, 2, 2, # 70 - 77 + 2, 2, 2, 2, 2, 2, 2, 4, # 78 - 7f + 5, 6, 6, 6, 6, 6, 6, 6, # 80 - 87 + 6, 6, 6, 6, 6, 6, 6, 6, # 88 - 8f + 6, 6, 6, 6, 6, 6, 6, 6, # 90 - 97 + 6, 6, 6, 6, 6, 6, 6, 6, # 98 - 9f + 6, 6, 6, 6, 6, 6, 6, 6, # a0 - a7 + 6, 6, 6, 6, 6, 6, 6, 6, # a8 - af + 6, 6, 6, 6, 6, 6, 6, 6, # b0 - b7 + 6, 6, 6, 6, 6, 6, 6, 6, # b8 - bf + 6, 6, 6, 6, 6, 6, 6, 6, # c0 - c7 + 6, 6, 6, 6, 6, 6, 6, 6, # c8 - cf + 6, 6, 6, 6, 6, 6, 6, 6, # d0 - d7 + 6, 6, 6, 6, 6, 6, 6, 6, # d8 - df + 6, 6, 6, 6, 6, 6, 6, 6, # e0 - e7 + 6, 6, 6, 6, 6, 6, 6, 6, # e8 - ef + 6, 6, 6, 6, 6, 6, 6, 6, # f0 - f7 + 6, 6, 6, 6, 6, 6, 6, 0 # f8 - ff +) + +GB2312_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 + 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) +# fmt: on + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validating +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) + +GB2312_SM_MODEL: CodingStateMachineDict = { + "class_table": GB2312_CLS, + "class_factor": 7, + "state_table": GB2312_ST, + "char_len_table": GB2312_CHAR_LEN_TABLE, + "name": "GB2312", +} + +# Shift_JIS +# fmt: off +SJIS_CLS = ( + 1, 1, 1, 1, 1, 1, 1, 1, # 00 - 07 + 1, 1, 1, 1, 1, 1, 0, 0, # 08 - 0f + 1, 1, 1, 1, 1, 1, 1, 1, # 10 - 17 + 1, 1, 1, 0, 1, 1, 1, 1, # 18 - 1f + 1, 1, 1, 1, 1, 1, 1, 1, # 20 - 27 + 1, 1, 1, 1, 1, 1, 1, 1, # 28 - 2f + 1, 1, 1, 1, 1, 1, 1, 1, # 30 - 37 + 1, 1, 1, 1, 1, 1, 1, 1, # 38 - 3f + 2, 2, 2, 2, 2, 2, 2, 2, # 40 - 47 + 2, 2, 2, 2, 2, 2, 2, 2, # 48 - 4f + 2, 2, 2, 2, 2, 2, 2, 2, # 50 - 57 + 2, 2, 2, 2, 2, 2, 2, 2, # 58 - 5f + 2, 2, 2, 2, 2, 2, 2, 2, # 60 - 67 + 2, 2, 2, 2, 2, 2, 2, 2, # 68 - 6f + 2, 2, 2, 2, 2, 2, 2, 2, # 70 - 77 + 2, 2, 2, 2, 2, 2, 2, 1, # 78 - 7f + 3, 3, 3, 3, 3, 2, 2, 3, # 80 - 87 + 3, 3, 3, 3, 3, 3, 3, 3, # 88 - 8f + 3, 3, 3, 3, 3, 3, 3, 3, # 90 - 97 + 3, 3, 3, 3, 3, 3, 3, 3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2, 2, 2, 2, 2, 2, 2, 2, # a0 - a7 + 2, 2, 2, 2, 2, 2, 2, 2, # a8 - af + 2, 2, 2, 2, 2, 2, 2, 2, # b0 - b7 + 2, 2, 2, 2, 2, 2, 2, 2, # b8 - bf + 2, 2, 2, 2, 2, 2, 2, 2, # c0 - c7 + 2, 2, 2, 2, 2, 2, 2, 2, # c8 - cf + 2, 2, 2, 2, 2, 2, 2, 2, # d0 - d7 + 2, 2, 2, 2, 2, 2, 2, 2, # d8 - df + 3, 3, 3, 3, 3, 3, 3, 3, # e0 - e7 + 3, 3, 3, 3, 3, 4, 4, 4, # e8 - ef + 3, 3, 3, 3, 3, 3, 3, 3, # f0 - f7 + 3, 3, 3, 3, 3, 0, 0, 0, # f8 - ff +) + +SJIS_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 +) +# fmt: on + +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) + +SJIS_SM_MODEL: CodingStateMachineDict = { + "class_table": SJIS_CLS, + "class_factor": 6, + "state_table": SJIS_ST, + "char_len_table": SJIS_CHAR_LEN_TABLE, + "name": "Shift_JIS", +} + +# UCS2-BE +# fmt: off +UCS2BE_CLS = ( + 0, 0, 0, 0, 0, 0, 0, 0, # 00 - 07 + 0, 0, 1, 0, 0, 2, 0, 0, # 08 - 0f + 0, 0, 0, 0, 0, 0, 0, 0, # 10 - 17 + 0, 0, 0, 3, 0, 0, 0, 0, # 18 - 1f + 0, 0, 0, 0, 0, 0, 0, 0, # 20 - 27 + 0, 3, 3, 3, 3, 3, 0, 0, # 28 - 2f + 0, 0, 0, 0, 0, 0, 0, 0, # 30 - 37 + 0, 0, 0, 0, 0, 0, 0, 0, # 38 - 3f + 0, 0, 0, 0, 0, 0, 0, 0, # 40 - 47 + 0, 0, 0, 0, 0, 0, 0, 0, # 48 - 4f + 0, 0, 0, 0, 0, 0, 0, 0, # 50 - 57 + 0, 0, 0, 0, 0, 0, 0, 0, # 58 - 5f + 0, 0, 0, 0, 0, 0, 0, 0, # 60 - 67 + 0, 0, 0, 0, 0, 0, 0, 0, # 68 - 6f + 0, 0, 0, 0, 0, 0, 0, 0, # 70 - 77 + 0, 0, 0, 0, 0, 0, 0, 0, # 78 - 7f + 0, 0, 0, 0, 0, 0, 0, 0, # 80 - 87 + 0, 0, 0, 0, 0, 0, 0, 0, # 88 - 8f + 0, 0, 0, 0, 0, 0, 0, 0, # 90 - 97 + 0, 0, 0, 0, 0, 0, 0, 0, # 98 - 9f + 0, 0, 0, 0, 0, 0, 0, 0, # a0 - a7 + 0, 0, 0, 0, 0, 0, 0, 0, # a8 - af + 0, 0, 0, 0, 0, 0, 0, 0, # b0 - b7 + 0, 0, 0, 0, 0, 0, 0, 0, # b8 - bf + 0, 0, 0, 0, 0, 0, 0, 0, # c0 - c7 + 0, 0, 0, 0, 0, 0, 0, 0, # c8 - cf + 0, 0, 0, 0, 0, 0, 0, 0, # d0 - d7 + 0, 0, 0, 0, 0, 0, 0, 0, # d8 - df + 0, 0, 0, 0, 0, 0, 0, 0, # e0 - e7 + 0, 0, 0, 0, 0, 0, 0, 0, # e8 - ef + 0, 0, 0, 0, 0, 0, 0, 0, # f0 - f7 + 0, 0, 0, 0, 0, 0, 4, 5 # f8 - ff +) + +UCS2BE_ST = ( + 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 + 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 + 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f + 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) +# fmt: on + +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) + +UCS2BE_SM_MODEL: CodingStateMachineDict = { + "class_table": UCS2BE_CLS, + "class_factor": 6, + "state_table": UCS2BE_ST, + "char_len_table": UCS2BE_CHAR_LEN_TABLE, + "name": "UTF-16BE", +} + +# UCS2-LE +# fmt: off +UCS2LE_CLS = ( + 0, 0, 0, 0, 0, 0, 0, 0, # 00 - 07 + 0, 0, 1, 0, 0, 2, 0, 0, # 08 - 0f + 0, 0, 0, 0, 0, 0, 0, 0, # 10 - 17 + 0, 0, 0, 3, 0, 0, 0, 0, # 18 - 1f + 0, 0, 0, 0, 0, 0, 0, 0, # 20 - 27 + 0, 3, 3, 3, 3, 3, 0, 0, # 28 - 2f + 0, 0, 0, 0, 0, 0, 0, 0, # 30 - 37 + 0, 0, 0, 0, 0, 0, 0, 0, # 38 - 3f + 0, 0, 0, 0, 0, 0, 0, 0, # 40 - 47 + 0, 0, 0, 0, 0, 0, 0, 0, # 48 - 4f + 0, 0, 0, 0, 0, 0, 0, 0, # 50 - 57 + 0, 0, 0, 0, 0, 0, 0, 0, # 58 - 5f + 0, 0, 0, 0, 0, 0, 0, 0, # 60 - 67 + 0, 0, 0, 0, 0, 0, 0, 0, # 68 - 6f + 0, 0, 0, 0, 0, 0, 0, 0, # 70 - 77 + 0, 0, 0, 0, 0, 0, 0, 0, # 78 - 7f + 0, 0, 0, 0, 0, 0, 0, 0, # 80 - 87 + 0, 0, 0, 0, 0, 0, 0, 0, # 88 - 8f + 0, 0, 0, 0, 0, 0, 0, 0, # 90 - 97 + 0, 0, 0, 0, 0, 0, 0, 0, # 98 - 9f + 0, 0, 0, 0, 0, 0, 0, 0, # a0 - a7 + 0, 0, 0, 0, 0, 0, 0, 0, # a8 - af + 0, 0, 0, 0, 0, 0, 0, 0, # b0 - b7 + 0, 0, 0, 0, 0, 0, 0, 0, # b8 - bf + 0, 0, 0, 0, 0, 0, 0, 0, # c0 - c7 + 0, 0, 0, 0, 0, 0, 0, 0, # c8 - cf + 0, 0, 0, 0, 0, 0, 0, 0, # d0 - d7 + 0, 0, 0, 0, 0, 0, 0, 0, # d8 - df + 0, 0, 0, 0, 0, 0, 0, 0, # e0 - e7 + 0, 0, 0, 0, 0, 0, 0, 0, # e8 - ef + 0, 0, 0, 0, 0, 0, 0, 0, # f0 - f7 + 0, 0, 0, 0, 0, 0, 4, 5 # f8 - ff +) + +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 + 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) +# fmt: on + +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) + +UCS2LE_SM_MODEL: CodingStateMachineDict = { + "class_table": UCS2LE_CLS, + "class_factor": 6, + "state_table": UCS2LE_ST, + "char_len_table": UCS2LE_CHAR_LEN_TABLE, + "name": "UTF-16LE", +} + +# UTF-8 +# fmt: off +UTF8_CLS = ( + 1, 1, 1, 1, 1, 1, 1, 1, # 00 - 07 #allow 0x00 as a legal value + 1, 1, 1, 1, 1, 1, 0, 0, # 08 - 0f + 1, 1, 1, 1, 1, 1, 1, 1, # 10 - 17 + 1, 1, 1, 0, 1, 1, 1, 1, # 18 - 1f + 1, 1, 1, 1, 1, 1, 1, 1, # 20 - 27 + 1, 1, 1, 1, 1, 1, 1, 1, # 28 - 2f + 1, 1, 1, 1, 1, 1, 1, 1, # 30 - 37 + 1, 1, 1, 1, 1, 1, 1, 1, # 38 - 3f + 1, 1, 1, 1, 1, 1, 1, 1, # 40 - 47 + 1, 1, 1, 1, 1, 1, 1, 1, # 48 - 4f + 1, 1, 1, 1, 1, 1, 1, 1, # 50 - 57 + 1, 1, 1, 1, 1, 1, 1, 1, # 58 - 5f + 1, 1, 1, 1, 1, 1, 1, 1, # 60 - 67 + 1, 1, 1, 1, 1, 1, 1, 1, # 68 - 6f + 1, 1, 1, 1, 1, 1, 1, 1, # 70 - 77 + 1, 1, 1, 1, 1, 1, 1, 1, # 78 - 7f + 2, 2, 2, 2, 3, 3, 3, 3, # 80 - 87 + 4, 4, 4, 4, 4, 4, 4, 4, # 88 - 8f + 4, 4, 4, 4, 4, 4, 4, 4, # 90 - 97 + 4, 4, 4, 4, 4, 4, 4, 4, # 98 - 9f + 5, 5, 5, 5, 5, 5, 5, 5, # a0 - a7 + 5, 5, 5, 5, 5, 5, 5, 5, # a8 - af + 5, 5, 5, 5, 5, 5, 5, 5, # b0 - b7 + 5, 5, 5, 5, 5, 5, 5, 5, # b8 - bf + 0, 0, 6, 6, 6, 6, 6, 6, # c0 - c7 + 6, 6, 6, 6, 6, 6, 6, 6, # c8 - cf + 6, 6, 6, 6, 6, 6, 6, 6, # d0 - d7 + 6, 6, 6, 6, 6, 6, 6, 6, # d8 - df + 7, 8, 8, 8, 8, 8, 8, 8, # e0 - e7 + 8, 8, 8, 8, 8, 9, 8, 8, # e8 - ef + 10, 11, 11, 11, 11, 11, 11, 11, # f0 - f7 + 12, 13, 13, 13, 14, 15, 0, 0 # f8 - ff +) + +UTF8_ST = ( + MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f + MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f + MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f + MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f + MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af + MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf +) +# fmt: on + +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8_SM_MODEL: CodingStateMachineDict = { + "class_table": UTF8_CLS, + "class_factor": 16, + "state_table": UTF8_ST, + "char_len_table": UTF8_CHAR_LEN_TABLE, + "name": "UTF-8", +} diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..26629bc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-312.pyc new file mode 100644 index 0000000..172fd62 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/languages.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/languages.py new file mode 100644 index 0000000..eb40c5f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/metadata/languages.py @@ -0,0 +1,352 @@ +""" +Metadata about languages used by our model training code for our +SingleByteCharSetProbers. Could be used for other things in the future. + +This code is based on the language metadata from the uchardet project. +""" + +from string import ascii_letters +from typing import List, Optional + +# TODO: Add Ukrainian (KOI8-U) + + +class Language: + """Metadata about a language useful for training models + + :ivar name: The human name for the language, in English. + :type name: str + :ivar iso_code: 2-letter ISO 639-1 if possible, 3-letter ISO code otherwise, + or use another catalog as a last resort. + :type iso_code: str + :ivar use_ascii: Whether or not ASCII letters should be included in trained + models. + :type use_ascii: bool + :ivar charsets: The charsets we want to support and create data for. + :type charsets: list of str + :ivar alphabet: The characters in the language's alphabet. If `use_ascii` is + `True`, you only need to add those not in the ASCII set. + :type alphabet: str + :ivar wiki_start_pages: The Wikipedia pages to start from if we're crawling + Wikipedia for training data. + :type wiki_start_pages: list of str + """ + + def __init__( + self, + name: Optional[str] = None, + iso_code: Optional[str] = None, + use_ascii: bool = True, + charsets: Optional[List[str]] = None, + alphabet: Optional[str] = None, + wiki_start_pages: Optional[List[str]] = None, + ) -> None: + super().__init__() + self.name = name + self.iso_code = iso_code + self.use_ascii = use_ascii + self.charsets = charsets + if self.use_ascii: + if alphabet: + alphabet += ascii_letters + else: + alphabet = ascii_letters + elif not alphabet: + raise ValueError("Must supply alphabet if use_ascii is False") + self.alphabet = "".join(sorted(set(alphabet))) if alphabet else None + self.wiki_start_pages = wiki_start_pages + + def __repr__(self) -> str: + param_str = ", ".join( + f"{k}={v!r}" for k, v in self.__dict__.items() if not k.startswith("_") + ) + return f"{self.__class__.__name__}({param_str})" + + +LANGUAGES = { + "Arabic": Language( + name="Arabic", + iso_code="ar", + use_ascii=False, + # We only support encodings that use isolated + # forms, because the current recommendation is + # that the rendering system handles presentation + # forms. This means we purposefully skip IBM864. + charsets=["ISO-8859-6", "WINDOWS-1256", "CP720", "CP864"], + alphabet="ءآأؤإئابةتثجحخدذرزسشصضطظعغػؼؽؾؿـفقكلمنهوىيًٌٍَُِّ", + wiki_start_pages=["الصفحة_الرئيسية"], + ), + "Belarusian": Language( + name="Belarusian", + iso_code="be", + use_ascii=False, + charsets=["ISO-8859-5", "WINDOWS-1251", "IBM866", "MacCyrillic"], + alphabet="АБВГДЕЁЖЗІЙКЛМНОПРСТУЎФХЦЧШЫЬЭЮЯабвгдеёжзійклмнопрстуўфхцчшыьэюяʼ", + wiki_start_pages=["Галоўная_старонка"], + ), + "Bulgarian": Language( + name="Bulgarian", + iso_code="bg", + use_ascii=False, + charsets=["ISO-8859-5", "WINDOWS-1251", "IBM855"], + alphabet="АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя", + wiki_start_pages=["Начална_страница"], + ), + "Czech": Language( + name="Czech", + iso_code="cz", + use_ascii=True, + charsets=["ISO-8859-2", "WINDOWS-1250"], + alphabet="áčďéěíňóřšťúůýžÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ", + wiki_start_pages=["Hlavní_strana"], + ), + "Danish": Language( + name="Danish", + iso_code="da", + use_ascii=True, + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], + alphabet="æøåÆØÅ", + wiki_start_pages=["Forside"], + ), + "German": Language( + name="German", + iso_code="de", + use_ascii=True, + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], + alphabet="äöüßẞÄÖÜ", + wiki_start_pages=["Wikipedia:Hauptseite"], + ), + "Greek": Language( + name="Greek", + iso_code="el", + use_ascii=False, + charsets=["ISO-8859-7", "WINDOWS-1253"], + alphabet="αβγδεζηθικλμνξοπρσςτυφχψωάέήίόύώΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΣΤΥΦΧΨΩΆΈΉΊΌΎΏ", + wiki_start_pages=["Πύλη:Κύρια"], + ), + "English": Language( + name="English", + iso_code="en", + use_ascii=True, + charsets=["ISO-8859-1", "WINDOWS-1252", "MacRoman"], + wiki_start_pages=["Main_Page"], + ), + "Esperanto": Language( + name="Esperanto", + iso_code="eo", + # Q, W, X, and Y not used at all + use_ascii=False, + charsets=["ISO-8859-3"], + alphabet="abcĉdefgĝhĥijĵklmnoprsŝtuŭvzABCĈDEFGĜHĤIJĴKLMNOPRSŜTUŬVZ", + wiki_start_pages=["Vikipedio:Ĉefpaĝo"], + ), + "Spanish": Language( + name="Spanish", + iso_code="es", + use_ascii=True, + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], + alphabet="ñáéíóúüÑÁÉÍÓÚÜ", + wiki_start_pages=["Wikipedia:Portada"], + ), + "Estonian": Language( + name="Estonian", + iso_code="et", + use_ascii=False, + charsets=["ISO-8859-4", "ISO-8859-13", "WINDOWS-1257"], + # C, F, Š, Q, W, X, Y, Z, Ž are only for + # loanwords + alphabet="ABDEGHIJKLMNOPRSTUVÕÄÖÜabdeghijklmnoprstuvõäöü", + wiki_start_pages=["Esileht"], + ), + "Finnish": Language( + name="Finnish", + iso_code="fi", + use_ascii=True, + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], + alphabet="ÅÄÖŠŽåäöšž", + wiki_start_pages=["Wikipedia:Etusivu"], + ), + "French": Language( + name="French", + iso_code="fr", + use_ascii=True, + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], + alphabet="œàâçèéîïùûêŒÀÂÇÈÉÎÏÙÛÊ", + wiki_start_pages=["Wikipédia:Accueil_principal", "Bœuf (animal)"], + ), + "Hebrew": Language( + name="Hebrew", + iso_code="he", + use_ascii=False, + charsets=["ISO-8859-8", "WINDOWS-1255"], + alphabet="אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ", + wiki_start_pages=["עמוד_ראשי"], + ), + "Croatian": Language( + name="Croatian", + iso_code="hr", + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=["ISO-8859-2", "WINDOWS-1250"], + alphabet="abcčćdđefghijklmnoprsštuvzžABCČĆDĐEFGHIJKLMNOPRSŠTUVZŽ", + wiki_start_pages=["Glavna_stranica"], + ), + "Hungarian": Language( + name="Hungarian", + iso_code="hu", + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=["ISO-8859-2", "WINDOWS-1250"], + alphabet="abcdefghijklmnoprstuvzáéíóöőúüűABCDEFGHIJKLMNOPRSTUVZÁÉÍÓÖŐÚÜŰ", + wiki_start_pages=["Kezdőlap"], + ), + "Italian": Language( + name="Italian", + iso_code="it", + use_ascii=True, + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], + alphabet="ÀÈÉÌÒÓÙàèéìòóù", + wiki_start_pages=["Pagina_principale"], + ), + "Lithuanian": Language( + name="Lithuanian", + iso_code="lt", + use_ascii=False, + charsets=["ISO-8859-13", "WINDOWS-1257", "ISO-8859-4"], + # Q, W, and X not used at all + alphabet="AĄBCČDEĘĖFGHIĮYJKLMNOPRSŠTUŲŪVZŽaąbcčdeęėfghiįyjklmnoprsštuųūvzž", + wiki_start_pages=["Pagrindinis_puslapis"], + ), + "Latvian": Language( + name="Latvian", + iso_code="lv", + use_ascii=False, + charsets=["ISO-8859-13", "WINDOWS-1257", "ISO-8859-4"], + # Q, W, X, Y are only for loanwords + alphabet="AĀBCČDEĒFGĢHIĪJKĶLĻMNŅOPRSŠTUŪVZŽaābcčdeēfgģhiījkķlļmnņoprsštuūvzž", + wiki_start_pages=["Sākumlapa"], + ), + "Macedonian": Language( + name="Macedonian", + iso_code="mk", + use_ascii=False, + charsets=["ISO-8859-5", "WINDOWS-1251", "MacCyrillic", "IBM855"], + alphabet="АБВГДЃЕЖЗЅИЈКЛЉМНЊОПРСТЌУФХЦЧЏШабвгдѓежзѕијклљмнњопрстќуфхцчџш", + wiki_start_pages=["Главна_страница"], + ), + "Dutch": Language( + name="Dutch", + iso_code="nl", + use_ascii=True, + charsets=["ISO-8859-1", "WINDOWS-1252", "MacRoman"], + wiki_start_pages=["Hoofdpagina"], + ), + "Polish": Language( + name="Polish", + iso_code="pl", + # Q and X are only used for foreign words. + use_ascii=False, + charsets=["ISO-8859-2", "WINDOWS-1250"], + alphabet="AĄBCĆDEĘFGHIJKLŁMNŃOÓPRSŚTUWYZŹŻaąbcćdeęfghijklłmnńoóprsśtuwyzźż", + wiki_start_pages=["Wikipedia:Strona_główna"], + ), + "Portuguese": Language( + name="Portuguese", + iso_code="pt", + use_ascii=True, + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], + alphabet="ÁÂÃÀÇÉÊÍÓÔÕÚáâãàçéêíóôõú", + wiki_start_pages=["Wikipédia:Página_principal"], + ), + "Romanian": Language( + name="Romanian", + iso_code="ro", + use_ascii=True, + charsets=["ISO-8859-2", "WINDOWS-1250"], + alphabet="ăâîșțĂÂÎȘȚ", + wiki_start_pages=["Pagina_principală"], + ), + "Russian": Language( + name="Russian", + iso_code="ru", + use_ascii=False, + charsets=[ + "ISO-8859-5", + "WINDOWS-1251", + "KOI8-R", + "MacCyrillic", + "IBM866", + "IBM855", + ], + alphabet="абвгдеёжзийклмнопрстуфхцчшщъыьэюяАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ", + wiki_start_pages=["Заглавная_страница"], + ), + "Slovak": Language( + name="Slovak", + iso_code="sk", + use_ascii=True, + charsets=["ISO-8859-2", "WINDOWS-1250"], + alphabet="áäčďéíĺľňóôŕšťúýžÁÄČĎÉÍĹĽŇÓÔŔŠŤÚÝŽ", + wiki_start_pages=["Hlavná_stránka"], + ), + "Slovene": Language( + name="Slovene", + iso_code="sl", + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=["ISO-8859-2", "WINDOWS-1250"], + alphabet="abcčdefghijklmnoprsštuvzžABCČDEFGHIJKLMNOPRSŠTUVZŽ", + wiki_start_pages=["Glavna_stran"], + ), + # Serbian can be written in both Latin and Cyrillic, but there's no + # simple way to get the Latin alphabet pages from Wikipedia through + # the API, so for now we just support Cyrillic. + "Serbian": Language( + name="Serbian", + iso_code="sr", + alphabet="АБВГДЂЕЖЗИЈКЛЉМНЊОПРСТЋУФХЦЧЏШабвгдђежзијклљмнњопрстћуфхцчџш", + charsets=["ISO-8859-5", "WINDOWS-1251", "MacCyrillic", "IBM855"], + wiki_start_pages=["Главна_страна"], + ), + "Thai": Language( + name="Thai", + iso_code="th", + use_ascii=False, + charsets=["ISO-8859-11", "TIS-620", "CP874"], + alphabet="กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛", + wiki_start_pages=["หน้าหลัก"], + ), + "Turkish": Language( + name="Turkish", + iso_code="tr", + # Q, W, and X are not used by Turkish + use_ascii=False, + charsets=["ISO-8859-3", "ISO-8859-9", "WINDOWS-1254"], + alphabet="abcçdefgğhıijklmnoöprsştuüvyzâîûABCÇDEFGĞHIİJKLMNOÖPRSŞTUÜVYZÂÎÛ", + wiki_start_pages=["Ana_Sayfa"], + ), + "Vietnamese": Language( + name="Vietnamese", + iso_code="vi", + use_ascii=False, + # Windows-1258 is the only common 8-bit + # Vietnamese encoding supported by Python. + # From Wikipedia: + # For systems that lack support for Unicode, + # dozens of 8-bit Vietnamese code pages are + # available.[1] The most common are VISCII + # (TCVN 5712:1993), VPS, and Windows-1258.[3] + # Where ASCII is required, such as when + # ensuring readability in plain text e-mail, + # Vietnamese letters are often encoded + # according to Vietnamese Quoted-Readable + # (VIQR) or VSCII Mnemonic (VSCII-MNEM),[4] + # though usage of either variable-width + # scheme has declined dramatically following + # the adoption of Unicode on the World Wide + # Web. + charsets=["WINDOWS-1258"], + alphabet="aăâbcdđeêghiklmnoôơpqrstuưvxyAĂÂBCDĐEÊGHIKLMNOÔƠPQRSTUƯVXY", + wiki_start_pages=["Chữ_Quốc_ngữ"], + ), +} diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/resultdict.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/resultdict.py new file mode 100644 index 0000000..7d36e64 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/resultdict.py @@ -0,0 +1,16 @@ +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + # TypedDict was introduced in Python 3.8. + # + # TODO: Remove the else block and TYPE_CHECKING check when dropping support + # for Python 3.7. + from typing import TypedDict + + class ResultDict(TypedDict): + encoding: Optional[str] + confidence: float + language: Optional[str] + +else: + ResultDict = dict diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcharsetprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcharsetprober.py new file mode 100644 index 0000000..0ffbcdd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcharsetprober.py @@ -0,0 +1,162 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import Dict, List, NamedTuple, Optional, Union + +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood + + +class SingleByteCharSetModel(NamedTuple): + charset_name: str + language: str + char_to_order_map: Dict[int, int] + language_model: Dict[int, Dict[int, int]] + typical_positive_ratio: float + keep_ascii_letters: bool + alphabet: str + + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__( + self, + model: SingleByteCharSetModel, + is_reversed: bool = False, + name_prober: Optional[CharSetProber] = None, + ) -> None: + super().__init__() + self._model = model + # TRUE if we need to reverse every pair in the model lookup + self._reversed = is_reversed + # Optional auxiliary prober for name decision + self._name_prober = name_prober + self._last_order = 255 + self._seq_counters: List[int] = [] + self._total_seqs = 0 + self._total_char = 0 + self._control_char = 0 + self._freq_char = 0 + self.reset() + + def reset(self) -> None: + super().reset() + # char order of last character + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + self._control_char = 0 + # characters that fall in our sampling range + self._freq_char = 0 + + @property + def charset_name(self) -> Optional[str]: + if self._name_prober: + return self._name_prober.charset_name + return self._model.charset_name + + @property + def language(self) -> Optional[str]: + if self._name_prober: + return self._name_prober.language + return self._model.language + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + # TODO: Make filter_international_words keep things in self.alphabet + if not self._model.keep_ascii_letters: + byte_str = self.filter_international_words(byte_str) + else: + byte_str = self.remove_xml_tags(byte_str) + if not byte_str: + return self.state + char_to_order_map = self._model.char_to_order_map + language_model = self._model.language_model + for char in byte_str: + order = char_to_order_map.get(char, CharacterCategory.UNDEFINED) + # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but + # CharacterCategory.SYMBOL is actually 253, so we use CONTROL + # to make it closer to the original intent. The only difference + # is whether or not we count digits and control characters for + # _total_char purposes. + if order < CharacterCategory.CONTROL: + self._total_char += 1 + if order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + lm_cat = language_model[self._last_order][order] + else: + lm_cat = language_model[order][self._last_order] + self._seq_counters[lm_cat] += 1 + self._last_order = order + + charset_name = self._model.charset_name + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug( + "%s confidence = %s, we have a winner", charset_name, confidence + ) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug( + "%s confidence = %s, below negative shortcut threshold %s", + charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD, + ) + self._state = ProbingState.NOT_ME + + return self.state + + def get_confidence(self) -> float: + r = 0.01 + if self._total_seqs > 0: + r = ( + ( + self._seq_counters[SequenceLikelihood.POSITIVE] + + 0.25 * self._seq_counters[SequenceLikelihood.LIKELY] + ) + / self._total_seqs + / self._model.typical_positive_ratio + ) + # The more control characters (proportionnaly to the size + # of the text), the less confident we become in the current + # charset. + r = r * (self._total_char - self._control_char) / self._total_char + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcsgroupprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..890ae84 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/sbcsgroupprober.py @@ -0,0 +1,88 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .hebrewprober import HebrewProber +from .langbulgarianmodel import ISO_8859_5_BULGARIAN_MODEL, WINDOWS_1251_BULGARIAN_MODEL +from .langgreekmodel import ISO_8859_7_GREEK_MODEL, WINDOWS_1253_GREEK_MODEL +from .langhebrewmodel import WINDOWS_1255_HEBREW_MODEL + +# from .langhungarianmodel import (ISO_8859_2_HUNGARIAN_MODEL, +# WINDOWS_1250_HUNGARIAN_MODEL) +from .langrussianmodel import ( + IBM855_RUSSIAN_MODEL, + IBM866_RUSSIAN_MODEL, + ISO_8859_5_RUSSIAN_MODEL, + KOI8_R_RUSSIAN_MODEL, + MACCYRILLIC_RUSSIAN_MODEL, + WINDOWS_1251_RUSSIAN_MODEL, +) +from .langthaimodel import TIS_620_THAI_MODEL +from .langturkishmodel import ISO_8859_9_TURKISH_MODEL +from .sbcharsetprober import SingleByteCharSetProber + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self) -> None: + super().__init__() + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber( + WINDOWS_1255_HEBREW_MODEL, is_reversed=False, name_prober=hebrew_prober + ) + # TODO: See if using ISO-8859-8 Hebrew model works better here, since + # it's actually the visual one + visual_hebrew_prober = SingleByteCharSetProber( + WINDOWS_1255_HEBREW_MODEL, is_reversed=True, name_prober=hebrew_prober + ) + hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) + # TODO: ORDER MATTERS HERE. I changed the order vs what was in master + # and several tests failed that did not before. Some thought + # should be put into the ordering, and we should consider making + # order not matter here, because that is very counter-intuitive. + self.probers = [ + SingleByteCharSetProber(WINDOWS_1251_RUSSIAN_MODEL), + SingleByteCharSetProber(KOI8_R_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_5_RUSSIAN_MODEL), + SingleByteCharSetProber(MACCYRILLIC_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM866_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM855_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_7_GREEK_MODEL), + SingleByteCharSetProber(WINDOWS_1253_GREEK_MODEL), + SingleByteCharSetProber(ISO_8859_5_BULGARIAN_MODEL), + SingleByteCharSetProber(WINDOWS_1251_BULGARIAN_MODEL), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(ISO_8859_2_HUNGARIAN_MODEL), + # SingleByteCharSetProber(WINDOWS_1250_HUNGARIAN_MODEL), + SingleByteCharSetProber(TIS_620_THAI_MODEL), + SingleByteCharSetProber(ISO_8859_9_TURKISH_MODEL), + hebrew_prober, + logical_hebrew_prober, + visual_hebrew_prober, + ] + self.reset() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/sjisprober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/sjisprober.py new file mode 100644 index 0000000..91df077 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/sjisprober.py @@ -0,0 +1,105 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import Union + +from .chardistribution import SJISDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .enums import MachineState, ProbingState +from .jpcntx import SJISContextAnalysis +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import SJIS_SM_MODEL + + +class SJISProber(MultiByteCharSetProber): + def __init__(self) -> None: + super().__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self) -> None: + super().reset() + self.context_analyzer.reset() + + @property + def charset_name(self) -> str: + return self.context_analyzer.charset_name + + @property + def language(self) -> str: + return "Japanese" + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + assert self.coding_sm is not None + assert self.distribution_analyzer is not None + + for i, byte in enumerate(byte_str): + coding_state = self.coding_sm.next_state(byte) + if coding_state == MachineState.ERROR: + self.logger.debug( + "%s %s prober hit error at byte %s", + self.charset_name, + self.language, + i, + ) + self._state = ProbingState.NOT_ME + break + if coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + if coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte + self.context_analyzer.feed( + self._last_char[2 - char_len :], char_len + ) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed( + byte_str[i + 1 - char_len : i + 3 - char_len], char_len + ) + self.distribution_analyzer.feed(byte_str[i - 1 : i + 1], char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if self.context_analyzer.got_enough_data() and ( + self.get_confidence() > self.SHORTCUT_THRESHOLD + ): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self) -> float: + assert self.distribution_analyzer is not None + + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/universaldetector.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/universaldetector.py new file mode 100644 index 0000000..30c441d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/universaldetector.py @@ -0,0 +1,362 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" + + +import codecs +import logging +import re +from typing import List, Optional, Union + +from .charsetgroupprober import CharSetGroupProber +from .charsetprober import CharSetProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .macromanprober import MacRomanProber +from .mbcsgroupprober import MBCSGroupProber +from .resultdict import ResultDict +from .sbcsgroupprober import SBCSGroupProber +from .utf1632prober import UTF1632Prober + + +class UniversalDetector: + """ + The ``UniversalDetector`` class underlies the ``chardet.detect`` function + and coordinates all of the different charset probers. + + To get a ``dict`` containing an encoding and its confidence, you can simply + run: + + .. code:: + + u = UniversalDetector() + u.feed(some_bytes) + u.close() + detected = u.result + + """ + + MINIMUM_THRESHOLD = 0.20 + HIGH_BYTE_DETECTOR = re.compile(b"[\x80-\xFF]") + ESC_DETECTOR = re.compile(b"(\033|~{)") + WIN_BYTE_DETECTOR = re.compile(b"[\x80-\x9F]") + ISO_WIN_MAP = { + "iso-8859-1": "Windows-1252", + "iso-8859-2": "Windows-1250", + "iso-8859-5": "Windows-1251", + "iso-8859-6": "Windows-1256", + "iso-8859-7": "Windows-1253", + "iso-8859-8": "Windows-1255", + "iso-8859-9": "Windows-1254", + "iso-8859-13": "Windows-1257", + } + # Based on https://encoding.spec.whatwg.org/#names-and-labels + # but altered to match Python names for encodings and remove mappings + # that break tests. + LEGACY_MAP = { + "ascii": "Windows-1252", + "iso-8859-1": "Windows-1252", + "tis-620": "ISO-8859-11", + "iso-8859-9": "Windows-1254", + "gb2312": "GB18030", + "euc-kr": "CP949", + "utf-16le": "UTF-16", + } + + def __init__( + self, + lang_filter: LanguageFilter = LanguageFilter.ALL, + should_rename_legacy: bool = False, + ) -> None: + self._esc_charset_prober: Optional[EscCharSetProber] = None + self._utf1632_prober: Optional[UTF1632Prober] = None + self._charset_probers: List[CharSetProber] = [] + self.result: ResultDict = { + "encoding": None, + "confidence": 0.0, + "language": None, + } + self.done = False + self._got_data = False + self._input_state = InputState.PURE_ASCII + self._last_char = b"" + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = False + self.should_rename_legacy = should_rename_legacy + self.reset() + + @property + def input_state(self) -> int: + return self._input_state + + @property + def has_win_bytes(self) -> bool: + return self._has_win_bytes + + @property + def charset_probers(self) -> List[CharSetProber]: + return self._charset_probers + + def reset(self) -> None: + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {"encoding": None, "confidence": 0.0, "language": None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b"" + if self._esc_charset_prober: + self._esc_charset_prober.reset() + if self._utf1632_prober: + self._utf1632_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str: Union[bytes, bytearray]) -> None: + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + + if not byte_str: + return + + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + + # First check for known BOMs, since these are guaranteed to be correct + if not self._got_data: + # If the data starts with BOM, we know it is UTF + if byte_str.startswith(codecs.BOM_UTF8): + # EF BB BF UTF-8 with BOM + self.result = { + "encoding": "UTF-8-SIG", + "confidence": 1.0, + "language": "", + } + elif byte_str.startswith((codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE)): + # FF FE 00 00 UTF-32, little-endian BOM + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {"encoding": "UTF-32", "confidence": 1.0, "language": ""} + elif byte_str.startswith(b"\xFE\xFF\x00\x00"): + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = { + # TODO: This encoding is not supported by Python. Should remove? + "encoding": "X-ISO-10646-UCS-4-3412", + "confidence": 1.0, + "language": "", + } + elif byte_str.startswith(b"\x00\x00\xFF\xFE"): + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = { + # TODO: This encoding is not supported by Python. Should remove? + "encoding": "X-ISO-10646-UCS-4-2143", + "confidence": 1.0, + "language": "", + } + elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + # FF FE UTF-16, little endian BOM + # FE FF UTF-16, big endian BOM + self.result = {"encoding": "UTF-16", "confidence": 1.0, "language": ""} + + self._got_data = True + if self.result["encoding"] is not None: + self.done = True + return + + # If none of those matched and we've only see ASCII so far, check + # for high bytes and escape sequences + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + elif ( + self._input_state == InputState.PURE_ASCII + and self.ESC_DETECTOR.search(self._last_char + byte_str) + ): + self._input_state = InputState.ESC_ASCII + + self._last_char = byte_str[-1:] + + # next we will look to see if it is appears to be either a UTF-16 or + # UTF-32 encoding + if not self._utf1632_prober: + self._utf1632_prober = UTF1632Prober() + + if self._utf1632_prober.state == ProbingState.DETECTING: + if self._utf1632_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = { + "encoding": self._utf1632_prober.charset_name, + "confidence": self._utf1632_prober.get_confidence(), + "language": "", + } + self.done = True + return + + # If we've seen escape sequences, use the EscCharSetProber, which + # uses a simple state machine to check for known escape sequences in + # HZ and ISO-2022 encodings, since those are the only encodings that + # use such sequences. + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = { + "encoding": self._esc_charset_prober.charset_name, + "confidence": self._esc_charset_prober.get_confidence(), + "language": self._esc_charset_prober.language, + } + self.done = True + # If we've seen high bytes (i.e., those with values greater than 127), + # we need to do more complicated checks using all our multi-byte and + # single-byte probers that are left. The single-byte probers + # use character bigram distributions to determine the encoding, whereas + # the multi-byte probers use a combination of character unigram and + # bigram distributions. + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [MBCSGroupProber(self.lang_filter)] + # If we're checking non-CJK encodings, use single-byte prober + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + self._charset_probers.append(MacRomanProber()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = { + "encoding": prober.charset_name, + "confidence": prober.get_confidence(), + "language": prober.language, + } + self.done = True + break + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self) -> ResultDict: + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + # Don't bother with checks if we're already done + if self.done: + return self.result + self.done = True + + if not self._got_data: + self.logger.debug("no data received!") + + # Default to ASCII if it is all we've seen so far + elif self._input_state == InputState.PURE_ASCII: + self.result = {"encoding": "ascii", "confidence": 1.0, "language": ""} + + # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD + elif self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): + charset_name = max_prober.charset_name + assert charset_name is not None + lower_charset_name = charset_name.lower() + confidence = max_prober.get_confidence() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith("iso-8859"): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get( + lower_charset_name, charset_name + ) + # Rename legacy encodings with superset encodings if asked + if self.should_rename_legacy: + charset_name = self.LEGACY_MAP.get( + (charset_name or "").lower(), charset_name + ) + self.result = { + "encoding": charset_name, + "confidence": confidence, + "language": max_prober.language, + } + + # Log all prober confidences if none met MINIMUM_THRESHOLD + if self.logger.getEffectiveLevel() <= logging.DEBUG: + if self.result["encoding"] is None: + self.logger.debug("no probers hit minimum threshold") + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug( + "%s %s confidence = %s", + prober.charset_name, + prober.language, + prober.get_confidence(), + ) + else: + self.logger.debug( + "%s %s confidence = %s", + group_prober.charset_name, + group_prober.language, + group_prober.get_confidence(), + ) + return self.result diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/utf1632prober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/utf1632prober.py new file mode 100644 index 0000000..6bdec63 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/utf1632prober.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# +# Contributor(s): +# Jason Zavaglia +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +from typing import List, Union + +from .charsetprober import CharSetProber +from .enums import ProbingState + + +class UTF1632Prober(CharSetProber): + """ + This class simply looks for occurrences of zero bytes, and infers + whether the file is UTF16 or UTF32 (low-endian or big-endian) + For instance, files looking like ( \0 \0 \0 [nonzero] )+ + have a good probability to be UTF32BE. Files looking like ( \0 [nonzero] )+ + may be guessed to be UTF16BE, and inversely for little-endian varieties. + """ + + # how many logical characters to scan before feeling confident of prediction + MIN_CHARS_FOR_DETECTION = 20 + # a fixed constant ratio of expected zeros or non-zeros in modulo-position. + EXPECTED_RATIO = 0.94 + + def __init__(self) -> None: + super().__init__() + self.position = 0 + self.zeros_at_mod = [0] * 4 + self.nonzeros_at_mod = [0] * 4 + self._state = ProbingState.DETECTING + self.quad = [0, 0, 0, 0] + self.invalid_utf16be = False + self.invalid_utf16le = False + self.invalid_utf32be = False + self.invalid_utf32le = False + self.first_half_surrogate_pair_detected_16be = False + self.first_half_surrogate_pair_detected_16le = False + self.reset() + + def reset(self) -> None: + super().reset() + self.position = 0 + self.zeros_at_mod = [0] * 4 + self.nonzeros_at_mod = [0] * 4 + self._state = ProbingState.DETECTING + self.invalid_utf16be = False + self.invalid_utf16le = False + self.invalid_utf32be = False + self.invalid_utf32le = False + self.first_half_surrogate_pair_detected_16be = False + self.first_half_surrogate_pair_detected_16le = False + self.quad = [0, 0, 0, 0] + + @property + def charset_name(self) -> str: + if self.is_likely_utf32be(): + return "utf-32be" + if self.is_likely_utf32le(): + return "utf-32le" + if self.is_likely_utf16be(): + return "utf-16be" + if self.is_likely_utf16le(): + return "utf-16le" + # default to something valid + return "utf-16" + + @property + def language(self) -> str: + return "" + + def approx_32bit_chars(self) -> float: + return max(1.0, self.position / 4.0) + + def approx_16bit_chars(self) -> float: + return max(1.0, self.position / 2.0) + + def is_likely_utf32be(self) -> bool: + approx_chars = self.approx_32bit_chars() + return approx_chars >= self.MIN_CHARS_FOR_DETECTION and ( + self.zeros_at_mod[0] / approx_chars > self.EXPECTED_RATIO + and self.zeros_at_mod[1] / approx_chars > self.EXPECTED_RATIO + and self.zeros_at_mod[2] / approx_chars > self.EXPECTED_RATIO + and self.nonzeros_at_mod[3] / approx_chars > self.EXPECTED_RATIO + and not self.invalid_utf32be + ) + + def is_likely_utf32le(self) -> bool: + approx_chars = self.approx_32bit_chars() + return approx_chars >= self.MIN_CHARS_FOR_DETECTION and ( + self.nonzeros_at_mod[0] / approx_chars > self.EXPECTED_RATIO + and self.zeros_at_mod[1] / approx_chars > self.EXPECTED_RATIO + and self.zeros_at_mod[2] / approx_chars > self.EXPECTED_RATIO + and self.zeros_at_mod[3] / approx_chars > self.EXPECTED_RATIO + and not self.invalid_utf32le + ) + + def is_likely_utf16be(self) -> bool: + approx_chars = self.approx_16bit_chars() + return approx_chars >= self.MIN_CHARS_FOR_DETECTION and ( + (self.nonzeros_at_mod[1] + self.nonzeros_at_mod[3]) / approx_chars + > self.EXPECTED_RATIO + and (self.zeros_at_mod[0] + self.zeros_at_mod[2]) / approx_chars + > self.EXPECTED_RATIO + and not self.invalid_utf16be + ) + + def is_likely_utf16le(self) -> bool: + approx_chars = self.approx_16bit_chars() + return approx_chars >= self.MIN_CHARS_FOR_DETECTION and ( + (self.nonzeros_at_mod[0] + self.nonzeros_at_mod[2]) / approx_chars + > self.EXPECTED_RATIO + and (self.zeros_at_mod[1] + self.zeros_at_mod[3]) / approx_chars + > self.EXPECTED_RATIO + and not self.invalid_utf16le + ) + + def validate_utf32_characters(self, quad: List[int]) -> None: + """ + Validate if the quad of bytes is valid UTF-32. + + UTF-32 is valid in the range 0x00000000 - 0x0010FFFF + excluding 0x0000D800 - 0x0000DFFF + + https://en.wikipedia.org/wiki/UTF-32 + """ + if ( + quad[0] != 0 + or quad[1] > 0x10 + or (quad[0] == 0 and quad[1] == 0 and 0xD8 <= quad[2] <= 0xDF) + ): + self.invalid_utf32be = True + if ( + quad[3] != 0 + or quad[2] > 0x10 + or (quad[3] == 0 and quad[2] == 0 and 0xD8 <= quad[1] <= 0xDF) + ): + self.invalid_utf32le = True + + def validate_utf16_characters(self, pair: List[int]) -> None: + """ + Validate if the pair of bytes is valid UTF-16. + + UTF-16 is valid in the range 0x0000 - 0xFFFF excluding 0xD800 - 0xFFFF + with an exception for surrogate pairs, which must be in the range + 0xD800-0xDBFF followed by 0xDC00-0xDFFF + + https://en.wikipedia.org/wiki/UTF-16 + """ + if not self.first_half_surrogate_pair_detected_16be: + if 0xD8 <= pair[0] <= 0xDB: + self.first_half_surrogate_pair_detected_16be = True + elif 0xDC <= pair[0] <= 0xDF: + self.invalid_utf16be = True + else: + if 0xDC <= pair[0] <= 0xDF: + self.first_half_surrogate_pair_detected_16be = False + else: + self.invalid_utf16be = True + + if not self.first_half_surrogate_pair_detected_16le: + if 0xD8 <= pair[1] <= 0xDB: + self.first_half_surrogate_pair_detected_16le = True + elif 0xDC <= pair[1] <= 0xDF: + self.invalid_utf16le = True + else: + if 0xDC <= pair[1] <= 0xDF: + self.first_half_surrogate_pair_detected_16le = False + else: + self.invalid_utf16le = True + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + for c in byte_str: + mod4 = self.position % 4 + self.quad[mod4] = c + if mod4 == 3: + self.validate_utf32_characters(self.quad) + self.validate_utf16_characters(self.quad[0:2]) + self.validate_utf16_characters(self.quad[2:4]) + if c == 0: + self.zeros_at_mod[mod4] += 1 + else: + self.nonzeros_at_mod[mod4] += 1 + self.position += 1 + return self.state + + @property + def state(self) -> ProbingState: + if self._state in {ProbingState.NOT_ME, ProbingState.FOUND_IT}: + # terminal, decided states + return self._state + if self.get_confidence() > 0.80: + self._state = ProbingState.FOUND_IT + elif self.position > 4 * 1024: + # if we get to 4kb into the file, and we can't conclude it's UTF, + # let's give up + self._state = ProbingState.NOT_ME + return self._state + + def get_confidence(self) -> float: + return ( + 0.85 + if ( + self.is_likely_utf16le() + or self.is_likely_utf16be() + or self.is_likely_utf32le() + or self.is_likely_utf32be() + ) + else 0.00 + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/utf8prober.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/utf8prober.py new file mode 100644 index 0000000..d96354d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/utf8prober.py @@ -0,0 +1,82 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import Union + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import MachineState, ProbingState +from .mbcssm import UTF8_SM_MODEL + + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self) -> None: + super().__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = 0 + self.reset() + + def reset(self) -> None: + super().reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self) -> str: + return "utf-8" + + @property + def language(self) -> str: + return "" + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + if coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + if coding_state == MachineState.START: + if self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self) -> float: + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB**self._num_mb_chars + return 1.0 - unlike + return unlike diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/version.py b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/version.py new file mode 100644 index 0000000..c5e9d85 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setuptools and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "5.1.0" +VERSION = __version__.split(".") diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__init__.py new file mode 100644 index 0000000..383101c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__init__.py @@ -0,0 +1,7 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from .initialise import init, deinit, reinit, colorama_text, just_fix_windows_console +from .ansi import Fore, Back, Style, Cursor +from .ansitowin32 import AnsiToWin32 + +__version__ = '0.4.6' + diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c595717 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-312.pyc new file mode 100644 index 0000000..46b5f96 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-312.pyc new file mode 100644 index 0000000..dcdfa48 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-312.pyc new file mode 100644 index 0000000..f93f4ae Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-312.pyc new file mode 100644 index 0000000..394f590 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-312.pyc new file mode 100644 index 0000000..3ec2e46 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/ansi.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/ansi.py new file mode 100644 index 0000000..11ec695 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/ansi.py @@ -0,0 +1,102 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +''' +This module generates ANSI character codes to printing colors to terminals. +See: http://en.wikipedia.org/wiki/ANSI_escape_code +''' + +CSI = '\033[' +OSC = '\033]' +BEL = '\a' + + +def code_to_chars(code): + return CSI + str(code) + 'm' + +def set_title(title): + return OSC + '2;' + title + BEL + +def clear_screen(mode=2): + return CSI + str(mode) + 'J' + +def clear_line(mode=2): + return CSI + str(mode) + 'K' + + +class AnsiCodes(object): + def __init__(self): + # the subclasses declare class attributes which are numbers. + # Upon instantiation we define instance attributes, which are the same + # as the class attributes but wrapped with the ANSI escape sequence + for name in dir(self): + if not name.startswith('_'): + value = getattr(self, name) + setattr(self, name, code_to_chars(value)) + + +class AnsiCursor(object): + def UP(self, n=1): + return CSI + str(n) + 'A' + def DOWN(self, n=1): + return CSI + str(n) + 'B' + def FORWARD(self, n=1): + return CSI + str(n) + 'C' + def BACK(self, n=1): + return CSI + str(n) + 'D' + def POS(self, x=1, y=1): + return CSI + str(y) + ';' + str(x) + 'H' + + +class AnsiFore(AnsiCodes): + BLACK = 30 + RED = 31 + GREEN = 32 + YELLOW = 33 + BLUE = 34 + MAGENTA = 35 + CYAN = 36 + WHITE = 37 + RESET = 39 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 90 + LIGHTRED_EX = 91 + LIGHTGREEN_EX = 92 + LIGHTYELLOW_EX = 93 + LIGHTBLUE_EX = 94 + LIGHTMAGENTA_EX = 95 + LIGHTCYAN_EX = 96 + LIGHTWHITE_EX = 97 + + +class AnsiBack(AnsiCodes): + BLACK = 40 + RED = 41 + GREEN = 42 + YELLOW = 43 + BLUE = 44 + MAGENTA = 45 + CYAN = 46 + WHITE = 47 + RESET = 49 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 100 + LIGHTRED_EX = 101 + LIGHTGREEN_EX = 102 + LIGHTYELLOW_EX = 103 + LIGHTBLUE_EX = 104 + LIGHTMAGENTA_EX = 105 + LIGHTCYAN_EX = 106 + LIGHTWHITE_EX = 107 + + +class AnsiStyle(AnsiCodes): + BRIGHT = 1 + DIM = 2 + NORMAL = 22 + RESET_ALL = 0 + +Fore = AnsiFore() +Back = AnsiBack() +Style = AnsiStyle() +Cursor = AnsiCursor() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/ansitowin32.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/ansitowin32.py new file mode 100644 index 0000000..abf209e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/ansitowin32.py @@ -0,0 +1,277 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import re +import sys +import os + +from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL +from .winterm import enable_vt_processing, WinTerm, WinColor, WinStyle +from .win32 import windll, winapi_test + + +winterm = None +if windll is not None: + winterm = WinTerm() + + +class StreamWrapper(object): + ''' + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()', which is delegated to our + Converter instance. + ''' + def __init__(self, wrapped, converter): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + self.__convertor = converter + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def __enter__(self, *args, **kwargs): + # special method lookup bypasses __getattr__/__getattribute__, see + # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit + # thus, contextlib magic methods are not proxied via __getattr__ + return self.__wrapped.__enter__(*args, **kwargs) + + def __exit__(self, *args, **kwargs): + return self.__wrapped.__exit__(*args, **kwargs) + + def __setstate__(self, state): + self.__dict__ = state + + def __getstate__(self): + return self.__dict__ + + def write(self, text): + self.__convertor.write(text) + + def isatty(self): + stream = self.__wrapped + if 'PYCHARM_HOSTED' in os.environ: + if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__): + return True + try: + stream_isatty = stream.isatty + except AttributeError: + return False + else: + return stream_isatty() + + @property + def closed(self): + stream = self.__wrapped + try: + return stream.closed + # AttributeError in the case that the stream doesn't support being closed + # ValueError for the case that the stream has already been detached when atexit runs + except (AttributeError, ValueError): + return True + + +class AnsiToWin32(object): + ''' + Implements a 'write()' method which, on Windows, will strip ANSI character + sequences from the text, and if outputting to a tty, will convert them into + win32 function calls. + ''' + ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer + ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command + + def __init__(self, wrapped, convert=None, strip=None, autoreset=False): + # The wrapped stream (normally sys.stdout or sys.stderr) + self.wrapped = wrapped + + # should we reset colors to defaults after every .write() + self.autoreset = autoreset + + # create the proxy wrapping our output stream + self.stream = StreamWrapper(wrapped, self) + + on_windows = os.name == 'nt' + # We test if the WinAPI works, because even if we are on Windows + # we may be using a terminal that doesn't support the WinAPI + # (e.g. Cygwin Terminal). In this case it's up to the terminal + # to support the ANSI codes. + conversion_supported = on_windows and winapi_test() + try: + fd = wrapped.fileno() + except Exception: + fd = -1 + system_has_native_ansi = not on_windows or enable_vt_processing(fd) + have_tty = not self.stream.closed and self.stream.isatty() + need_conversion = conversion_supported and not system_has_native_ansi + + # should we strip ANSI sequences from our output? + if strip is None: + strip = need_conversion or not have_tty + self.strip = strip + + # should we should convert ANSI sequences into win32 calls? + if convert is None: + convert = need_conversion and have_tty + self.convert = convert + + # dict of ansi codes to win32 functions and parameters + self.win32_calls = self.get_win32_calls() + + # are we wrapping stderr? + self.on_stderr = self.wrapped is sys.stderr + + def should_wrap(self): + ''' + True if this class is actually needed. If false, then the output + stream will not be affected, nor will win32 calls be issued, so + wrapping stdout is not actually required. This will generally be + False on non-Windows platforms, unless optional functionality like + autoreset has been requested using kwargs to init() + ''' + return self.convert or self.strip or self.autoreset + + def get_win32_calls(self): + if self.convert and winterm: + return { + AnsiStyle.RESET_ALL: (winterm.reset_all, ), + AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), + AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), + AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), + AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), + AnsiFore.RED: (winterm.fore, WinColor.RED), + AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), + AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), + AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), + AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), + AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), + AnsiFore.WHITE: (winterm.fore, WinColor.GREY), + AnsiFore.RESET: (winterm.fore, ), + AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), + AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), + AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), + AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), + AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), + AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), + AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), + AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), + AnsiBack.BLACK: (winterm.back, WinColor.BLACK), + AnsiBack.RED: (winterm.back, WinColor.RED), + AnsiBack.GREEN: (winterm.back, WinColor.GREEN), + AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), + AnsiBack.BLUE: (winterm.back, WinColor.BLUE), + AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), + AnsiBack.CYAN: (winterm.back, WinColor.CYAN), + AnsiBack.WHITE: (winterm.back, WinColor.GREY), + AnsiBack.RESET: (winterm.back, ), + AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), + AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), + AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), + AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), + AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), + AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), + AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), + AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), + } + return dict() + + def write(self, text): + if self.strip or self.convert: + self.write_and_convert(text) + else: + self.wrapped.write(text) + self.wrapped.flush() + if self.autoreset: + self.reset_all() + + + def reset_all(self): + if self.convert: + self.call_win32('m', (0,)) + elif not self.strip and not self.stream.closed: + self.wrapped.write(Style.RESET_ALL) + + + def write_and_convert(self, text): + ''' + Write the given text to our wrapped stream, stripping any ANSI + sequences from the text, and optionally converting them into win32 + calls. + ''' + cursor = 0 + text = self.convert_osc(text) + for match in self.ANSI_CSI_RE.finditer(text): + start, end = match.span() + self.write_plain_text(text, cursor, start) + self.convert_ansi(*match.groups()) + cursor = end + self.write_plain_text(text, cursor, len(text)) + + + def write_plain_text(self, text, start, end): + if start < end: + self.wrapped.write(text[start:end]) + self.wrapped.flush() + + + def convert_ansi(self, paramstring, command): + if self.convert: + params = self.extract_params(command, paramstring) + self.call_win32(command, params) + + + def extract_params(self, command, paramstring): + if command in 'Hf': + params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) + while len(params) < 2: + # defaults: + params = params + (1,) + else: + params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) + if len(params) == 0: + # defaults: + if command in 'JKm': + params = (0,) + elif command in 'ABCD': + params = (1,) + + return params + + + def call_win32(self, command, params): + if command == 'm': + for param in params: + if param in self.win32_calls: + func_args = self.win32_calls[param] + func = func_args[0] + args = func_args[1:] + kwargs = dict(on_stderr=self.on_stderr) + func(*args, **kwargs) + elif command in 'J': + winterm.erase_screen(params[0], on_stderr=self.on_stderr) + elif command in 'K': + winterm.erase_line(params[0], on_stderr=self.on_stderr) + elif command in 'Hf': # cursor position - absolute + winterm.set_cursor_position(params, on_stderr=self.on_stderr) + elif command in 'ABCD': # cursor position - relative + n = params[0] + # A - up, B - down, C - forward, D - back + x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] + winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) + + + def convert_osc(self, text): + for match in self.ANSI_OSC_RE.finditer(text): + start, end = match.span() + text = text[:start] + text[end:] + paramstring, command = match.groups() + if command == BEL: + if paramstring.count(";") == 1: + params = paramstring.split(";") + # 0 - change title and icon (we will only change title) + # 1 - change icon (we don't support this) + # 2 - change title + if params[0] in '02': + winterm.set_title(params[1]) + return text + + + def flush(self): + self.wrapped.flush() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/initialise.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/initialise.py new file mode 100644 index 0000000..d5fd4b7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/initialise.py @@ -0,0 +1,121 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import atexit +import contextlib +import sys + +from .ansitowin32 import AnsiToWin32 + + +def _wipe_internal_state_for_tests(): + global orig_stdout, orig_stderr + orig_stdout = None + orig_stderr = None + + global wrapped_stdout, wrapped_stderr + wrapped_stdout = None + wrapped_stderr = None + + global atexit_done + atexit_done = False + + global fixed_windows_console + fixed_windows_console = False + + try: + # no-op if it wasn't registered + atexit.unregister(reset_all) + except AttributeError: + # python 2: no atexit.unregister. Oh well, we did our best. + pass + + +def reset_all(): + if AnsiToWin32 is not None: # Issue #74: objects might become None at exit + AnsiToWin32(orig_stdout).reset_all() + + +def init(autoreset=False, convert=None, strip=None, wrap=True): + + if not wrap and any([autoreset, convert, strip]): + raise ValueError('wrap=False conflicts with any other arg=True') + + global wrapped_stdout, wrapped_stderr + global orig_stdout, orig_stderr + + orig_stdout = sys.stdout + orig_stderr = sys.stderr + + if sys.stdout is None: + wrapped_stdout = None + else: + sys.stdout = wrapped_stdout = \ + wrap_stream(orig_stdout, convert, strip, autoreset, wrap) + if sys.stderr is None: + wrapped_stderr = None + else: + sys.stderr = wrapped_stderr = \ + wrap_stream(orig_stderr, convert, strip, autoreset, wrap) + + global atexit_done + if not atexit_done: + atexit.register(reset_all) + atexit_done = True + + +def deinit(): + if orig_stdout is not None: + sys.stdout = orig_stdout + if orig_stderr is not None: + sys.stderr = orig_stderr + + +def just_fix_windows_console(): + global fixed_windows_console + + if sys.platform != "win32": + return + if fixed_windows_console: + return + if wrapped_stdout is not None or wrapped_stderr is not None: + # Someone already ran init() and it did stuff, so we won't second-guess them + return + + # On newer versions of Windows, AnsiToWin32.__init__ will implicitly enable the + # native ANSI support in the console as a side-effect. We only need to actually + # replace sys.stdout/stderr if we're in the old-style conversion mode. + new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False) + if new_stdout.convert: + sys.stdout = new_stdout + new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False) + if new_stderr.convert: + sys.stderr = new_stderr + + fixed_windows_console = True + +@contextlib.contextmanager +def colorama_text(*args, **kwargs): + init(*args, **kwargs) + try: + yield + finally: + deinit() + + +def reinit(): + if wrapped_stdout is not None: + sys.stdout = wrapped_stdout + if wrapped_stderr is not None: + sys.stderr = wrapped_stderr + + +def wrap_stream(stream, convert, strip, autoreset, wrap): + if wrap: + wrapper = AnsiToWin32(stream, + convert=convert, strip=strip, autoreset=autoreset) + if wrapper.should_wrap(): + stream = wrapper.stream + return stream + + +# Use this for initial setup as well, to reduce code duplication +_wipe_internal_state_for_tests() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__init__.py new file mode 100644 index 0000000..8c5661e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__init__.py @@ -0,0 +1 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..b8ddab2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/ansi_test.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/ansi_test.cpython-312.pyc new file mode 100644 index 0000000..3748ac7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/ansi_test.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc new file mode 100644 index 0000000..4ba47d8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/initialise_test.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/initialise_test.cpython-312.pyc new file mode 100644 index 0000000..6149d20 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/initialise_test.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/isatty_test.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/isatty_test.cpython-312.pyc new file mode 100644 index 0000000..b6b06ec Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/isatty_test.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..f592429 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/winterm_test.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/winterm_test.cpython-312.pyc new file mode 100644 index 0000000..e327248 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/__pycache__/winterm_test.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansi_test.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansi_test.py new file mode 100644 index 0000000..0a20c80 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansi_test.py @@ -0,0 +1,76 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import sys +from unittest import TestCase, main + +from ..ansi import Back, Fore, Style +from ..ansitowin32 import AnsiToWin32 + +stdout_orig = sys.stdout +stderr_orig = sys.stderr + + +class AnsiTest(TestCase): + + def setUp(self): + # sanity check: stdout should be a file or StringIO object. + # It will only be AnsiToWin32 if init() has previously wrapped it + self.assertNotEqual(type(sys.stdout), AnsiToWin32) + self.assertNotEqual(type(sys.stderr), AnsiToWin32) + + def tearDown(self): + sys.stdout = stdout_orig + sys.stderr = stderr_orig + + + def testForeAttributes(self): + self.assertEqual(Fore.BLACK, '\033[30m') + self.assertEqual(Fore.RED, '\033[31m') + self.assertEqual(Fore.GREEN, '\033[32m') + self.assertEqual(Fore.YELLOW, '\033[33m') + self.assertEqual(Fore.BLUE, '\033[34m') + self.assertEqual(Fore.MAGENTA, '\033[35m') + self.assertEqual(Fore.CYAN, '\033[36m') + self.assertEqual(Fore.WHITE, '\033[37m') + self.assertEqual(Fore.RESET, '\033[39m') + + # Check the light, extended versions. + self.assertEqual(Fore.LIGHTBLACK_EX, '\033[90m') + self.assertEqual(Fore.LIGHTRED_EX, '\033[91m') + self.assertEqual(Fore.LIGHTGREEN_EX, '\033[92m') + self.assertEqual(Fore.LIGHTYELLOW_EX, '\033[93m') + self.assertEqual(Fore.LIGHTBLUE_EX, '\033[94m') + self.assertEqual(Fore.LIGHTMAGENTA_EX, '\033[95m') + self.assertEqual(Fore.LIGHTCYAN_EX, '\033[96m') + self.assertEqual(Fore.LIGHTWHITE_EX, '\033[97m') + + + def testBackAttributes(self): + self.assertEqual(Back.BLACK, '\033[40m') + self.assertEqual(Back.RED, '\033[41m') + self.assertEqual(Back.GREEN, '\033[42m') + self.assertEqual(Back.YELLOW, '\033[43m') + self.assertEqual(Back.BLUE, '\033[44m') + self.assertEqual(Back.MAGENTA, '\033[45m') + self.assertEqual(Back.CYAN, '\033[46m') + self.assertEqual(Back.WHITE, '\033[47m') + self.assertEqual(Back.RESET, '\033[49m') + + # Check the light, extended versions. + self.assertEqual(Back.LIGHTBLACK_EX, '\033[100m') + self.assertEqual(Back.LIGHTRED_EX, '\033[101m') + self.assertEqual(Back.LIGHTGREEN_EX, '\033[102m') + self.assertEqual(Back.LIGHTYELLOW_EX, '\033[103m') + self.assertEqual(Back.LIGHTBLUE_EX, '\033[104m') + self.assertEqual(Back.LIGHTMAGENTA_EX, '\033[105m') + self.assertEqual(Back.LIGHTCYAN_EX, '\033[106m') + self.assertEqual(Back.LIGHTWHITE_EX, '\033[107m') + + + def testStyleAttributes(self): + self.assertEqual(Style.DIM, '\033[2m') + self.assertEqual(Style.NORMAL, '\033[22m') + self.assertEqual(Style.BRIGHT, '\033[1m') + + +if __name__ == '__main__': + main() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansitowin32_test.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansitowin32_test.py new file mode 100644 index 0000000..91ca551 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/ansitowin32_test.py @@ -0,0 +1,294 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from io import StringIO, TextIOWrapper +from unittest import TestCase, main +try: + from contextlib import ExitStack +except ImportError: + # python 2 + from contextlib2 import ExitStack + +try: + from unittest.mock import MagicMock, Mock, patch +except ImportError: + from mock import MagicMock, Mock, patch + +from ..ansitowin32 import AnsiToWin32, StreamWrapper +from ..win32 import ENABLE_VIRTUAL_TERMINAL_PROCESSING +from .utils import osname + + +class StreamWrapperTest(TestCase): + + def testIsAProxy(self): + mockStream = Mock() + wrapper = StreamWrapper(mockStream, None) + self.assertTrue( wrapper.random_attr is mockStream.random_attr ) + + def testDelegatesWrite(self): + mockStream = Mock() + mockConverter = Mock() + wrapper = StreamWrapper(mockStream, mockConverter) + wrapper.write('hello') + self.assertTrue(mockConverter.write.call_args, (('hello',), {})) + + def testDelegatesContext(self): + mockConverter = Mock() + s = StringIO() + with StreamWrapper(s, mockConverter) as fp: + fp.write(u'hello') + self.assertTrue(s.closed) + + def testProxyNoContextManager(self): + mockStream = MagicMock() + mockStream.__enter__.side_effect = AttributeError() + mockConverter = Mock() + with self.assertRaises(AttributeError) as excinfo: + with StreamWrapper(mockStream, mockConverter) as wrapper: + wrapper.write('hello') + + def test_closed_shouldnt_raise_on_closed_stream(self): + stream = StringIO() + stream.close() + wrapper = StreamWrapper(stream, None) + self.assertEqual(wrapper.closed, True) + + def test_closed_shouldnt_raise_on_detached_stream(self): + stream = TextIOWrapper(StringIO()) + stream.detach() + wrapper = StreamWrapper(stream, None) + self.assertEqual(wrapper.closed, True) + +class AnsiToWin32Test(TestCase): + + def testInit(self): + mockStdout = Mock() + auto = Mock() + stream = AnsiToWin32(mockStdout, autoreset=auto) + self.assertEqual(stream.wrapped, mockStdout) + self.assertEqual(stream.autoreset, auto) + + @patch('colorama.ansitowin32.winterm', None) + @patch('colorama.ansitowin32.winapi_test', lambda *_: True) + def testStripIsTrueOnWindows(self): + with osname('nt'): + mockStdout = Mock() + stream = AnsiToWin32(mockStdout) + self.assertTrue(stream.strip) + + def testStripIsFalseOffWindows(self): + with osname('posix'): + mockStdout = Mock(closed=False) + stream = AnsiToWin32(mockStdout) + self.assertFalse(stream.strip) + + def testWriteStripsAnsi(self): + mockStdout = Mock() + stream = AnsiToWin32(mockStdout) + stream.wrapped = Mock() + stream.write_and_convert = Mock() + stream.strip = True + + stream.write('abc') + + self.assertFalse(stream.wrapped.write.called) + self.assertEqual(stream.write_and_convert.call_args, (('abc',), {})) + + def testWriteDoesNotStripAnsi(self): + mockStdout = Mock() + stream = AnsiToWin32(mockStdout) + stream.wrapped = Mock() + stream.write_and_convert = Mock() + stream.strip = False + stream.convert = False + + stream.write('abc') + + self.assertFalse(stream.write_and_convert.called) + self.assertEqual(stream.wrapped.write.call_args, (('abc',), {})) + + def assert_autoresets(self, convert, autoreset=True): + stream = AnsiToWin32(Mock()) + stream.convert = convert + stream.reset_all = Mock() + stream.autoreset = autoreset + stream.winterm = Mock() + + stream.write('abc') + + self.assertEqual(stream.reset_all.called, autoreset) + + def testWriteAutoresets(self): + self.assert_autoresets(convert=True) + self.assert_autoresets(convert=False) + self.assert_autoresets(convert=True, autoreset=False) + self.assert_autoresets(convert=False, autoreset=False) + + def testWriteAndConvertWritesPlainText(self): + stream = AnsiToWin32(Mock()) + stream.write_and_convert( 'abc' ) + self.assertEqual( stream.wrapped.write.call_args, (('abc',), {}) ) + + def testWriteAndConvertStripsAllValidAnsi(self): + stream = AnsiToWin32(Mock()) + stream.call_win32 = Mock() + data = [ + 'abc\033[mdef', + 'abc\033[0mdef', + 'abc\033[2mdef', + 'abc\033[02mdef', + 'abc\033[002mdef', + 'abc\033[40mdef', + 'abc\033[040mdef', + 'abc\033[0;1mdef', + 'abc\033[40;50mdef', + 'abc\033[50;30;40mdef', + 'abc\033[Adef', + 'abc\033[0Gdef', + 'abc\033[1;20;128Hdef', + ] + for datum in data: + stream.wrapped.write.reset_mock() + stream.write_and_convert( datum ) + self.assertEqual( + [args[0] for args in stream.wrapped.write.call_args_list], + [ ('abc',), ('def',) ] + ) + + def testWriteAndConvertSkipsEmptySnippets(self): + stream = AnsiToWin32(Mock()) + stream.call_win32 = Mock() + stream.write_and_convert( '\033[40m\033[41m' ) + self.assertFalse( stream.wrapped.write.called ) + + def testWriteAndConvertCallsWin32WithParamsAndCommand(self): + stream = AnsiToWin32(Mock()) + stream.convert = True + stream.call_win32 = Mock() + stream.extract_params = Mock(return_value='params') + data = { + 'abc\033[adef': ('a', 'params'), + 'abc\033[;;bdef': ('b', 'params'), + 'abc\033[0cdef': ('c', 'params'), + 'abc\033[;;0;;Gdef': ('G', 'params'), + 'abc\033[1;20;128Hdef': ('H', 'params'), + } + for datum, expected in data.items(): + stream.call_win32.reset_mock() + stream.write_and_convert( datum ) + self.assertEqual( stream.call_win32.call_args[0], expected ) + + def test_reset_all_shouldnt_raise_on_closed_orig_stdout(self): + stream = StringIO() + converter = AnsiToWin32(stream) + stream.close() + + converter.reset_all() + + def test_wrap_shouldnt_raise_on_closed_orig_stdout(self): + stream = StringIO() + stream.close() + with \ + patch("colorama.ansitowin32.os.name", "nt"), \ + patch("colorama.ansitowin32.winapi_test", lambda: True): + converter = AnsiToWin32(stream) + self.assertTrue(converter.strip) + self.assertFalse(converter.convert) + + def test_wrap_shouldnt_raise_on_missing_closed_attr(self): + with \ + patch("colorama.ansitowin32.os.name", "nt"), \ + patch("colorama.ansitowin32.winapi_test", lambda: True): + converter = AnsiToWin32(object()) + self.assertTrue(converter.strip) + self.assertFalse(converter.convert) + + def testExtractParams(self): + stream = AnsiToWin32(Mock()) + data = { + '': (0,), + ';;': (0,), + '2': (2,), + ';;002;;': (2,), + '0;1': (0, 1), + ';;003;;456;;': (3, 456), + '11;22;33;44;55': (11, 22, 33, 44, 55), + } + for datum, expected in data.items(): + self.assertEqual(stream.extract_params('m', datum), expected) + + def testCallWin32UsesLookup(self): + listener = Mock() + stream = AnsiToWin32(listener) + stream.win32_calls = { + 1: (lambda *_, **__: listener(11),), + 2: (lambda *_, **__: listener(22),), + 3: (lambda *_, **__: listener(33),), + } + stream.call_win32('m', (3, 1, 99, 2)) + self.assertEqual( + [a[0][0] for a in listener.call_args_list], + [33, 11, 22] ) + + def test_osc_codes(self): + mockStdout = Mock() + stream = AnsiToWin32(mockStdout, convert=True) + with patch('colorama.ansitowin32.winterm') as winterm: + data = [ + '\033]0\x07', # missing arguments + '\033]0;foo\x08', # wrong OSC command + '\033]0;colorama_test_title\x07', # should work + '\033]1;colorama_test_title\x07', # wrong set command + '\033]2;colorama_test_title\x07', # should work + '\033]' + ';' * 64 + '\x08', # see issue #247 + ] + for code in data: + stream.write(code) + self.assertEqual(winterm.set_title.call_count, 2) + + def test_native_windows_ansi(self): + with ExitStack() as stack: + def p(a, b): + stack.enter_context(patch(a, b, create=True)) + # Pretend to be on Windows + p("colorama.ansitowin32.os.name", "nt") + p("colorama.ansitowin32.winapi_test", lambda: True) + p("colorama.win32.winapi_test", lambda: True) + p("colorama.winterm.win32.windll", "non-None") + p("colorama.winterm.get_osfhandle", lambda _: 1234) + + # Pretend that our mock stream has native ANSI support + p( + "colorama.winterm.win32.GetConsoleMode", + lambda _: ENABLE_VIRTUAL_TERMINAL_PROCESSING, + ) + SetConsoleMode = Mock() + p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode) + + stdout = Mock() + stdout.closed = False + stdout.isatty.return_value = True + stdout.fileno.return_value = 1 + + # Our fake console says it has native vt support, so AnsiToWin32 should + # enable that support and do nothing else. + stream = AnsiToWin32(stdout) + SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING) + self.assertFalse(stream.strip) + self.assertFalse(stream.convert) + self.assertFalse(stream.should_wrap()) + + # Now let's pretend we're on an old Windows console, that doesn't have + # native ANSI support. + p("colorama.winterm.win32.GetConsoleMode", lambda _: 0) + SetConsoleMode = Mock() + p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode) + + stream = AnsiToWin32(stdout) + SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING) + self.assertTrue(stream.strip) + self.assertTrue(stream.convert) + self.assertTrue(stream.should_wrap()) + + +if __name__ == '__main__': + main() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/initialise_test.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/initialise_test.py new file mode 100644 index 0000000..89f9b07 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/initialise_test.py @@ -0,0 +1,189 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import sys +from unittest import TestCase, main, skipUnless + +try: + from unittest.mock import patch, Mock +except ImportError: + from mock import patch, Mock + +from ..ansitowin32 import StreamWrapper +from ..initialise import init, just_fix_windows_console, _wipe_internal_state_for_tests +from .utils import osname, replace_by + +orig_stdout = sys.stdout +orig_stderr = sys.stderr + + +class InitTest(TestCase): + + @skipUnless(sys.stdout.isatty(), "sys.stdout is not a tty") + def setUp(self): + # sanity check + self.assertNotWrapped() + + def tearDown(self): + _wipe_internal_state_for_tests() + sys.stdout = orig_stdout + sys.stderr = orig_stderr + + def assertWrapped(self): + self.assertIsNot(sys.stdout, orig_stdout, 'stdout should be wrapped') + self.assertIsNot(sys.stderr, orig_stderr, 'stderr should be wrapped') + self.assertTrue(isinstance(sys.stdout, StreamWrapper), + 'bad stdout wrapper') + self.assertTrue(isinstance(sys.stderr, StreamWrapper), + 'bad stderr wrapper') + + def assertNotWrapped(self): + self.assertIs(sys.stdout, orig_stdout, 'stdout should not be wrapped') + self.assertIs(sys.stderr, orig_stderr, 'stderr should not be wrapped') + + @patch('colorama.initialise.reset_all') + @patch('colorama.ansitowin32.winapi_test', lambda *_: True) + @patch('colorama.ansitowin32.enable_vt_processing', lambda *_: False) + def testInitWrapsOnWindows(self, _): + with osname("nt"): + init() + self.assertWrapped() + + @patch('colorama.initialise.reset_all') + @patch('colorama.ansitowin32.winapi_test', lambda *_: False) + def testInitDoesntWrapOnEmulatedWindows(self, _): + with osname("nt"): + init() + self.assertNotWrapped() + + def testInitDoesntWrapOnNonWindows(self): + with osname("posix"): + init() + self.assertNotWrapped() + + def testInitDoesntWrapIfNone(self): + with replace_by(None): + init() + # We can't use assertNotWrapped here because replace_by(None) + # changes stdout/stderr already. + self.assertIsNone(sys.stdout) + self.assertIsNone(sys.stderr) + + def testInitAutoresetOnWrapsOnAllPlatforms(self): + with osname("posix"): + init(autoreset=True) + self.assertWrapped() + + def testInitWrapOffDoesntWrapOnWindows(self): + with osname("nt"): + init(wrap=False) + self.assertNotWrapped() + + def testInitWrapOffIncompatibleWithAutoresetOn(self): + self.assertRaises(ValueError, lambda: init(autoreset=True, wrap=False)) + + @patch('colorama.win32.SetConsoleTextAttribute') + @patch('colorama.initialise.AnsiToWin32') + def testAutoResetPassedOn(self, mockATW32, _): + with osname("nt"): + init(autoreset=True) + self.assertEqual(len(mockATW32.call_args_list), 2) + self.assertEqual(mockATW32.call_args_list[1][1]['autoreset'], True) + self.assertEqual(mockATW32.call_args_list[0][1]['autoreset'], True) + + @patch('colorama.initialise.AnsiToWin32') + def testAutoResetChangeable(self, mockATW32): + with osname("nt"): + init() + + init(autoreset=True) + self.assertEqual(len(mockATW32.call_args_list), 4) + self.assertEqual(mockATW32.call_args_list[2][1]['autoreset'], True) + self.assertEqual(mockATW32.call_args_list[3][1]['autoreset'], True) + + init() + self.assertEqual(len(mockATW32.call_args_list), 6) + self.assertEqual( + mockATW32.call_args_list[4][1]['autoreset'], False) + self.assertEqual( + mockATW32.call_args_list[5][1]['autoreset'], False) + + + @patch('colorama.initialise.atexit.register') + def testAtexitRegisteredOnlyOnce(self, mockRegister): + init() + self.assertTrue(mockRegister.called) + mockRegister.reset_mock() + init() + self.assertFalse(mockRegister.called) + + +class JustFixWindowsConsoleTest(TestCase): + def _reset(self): + _wipe_internal_state_for_tests() + sys.stdout = orig_stdout + sys.stderr = orig_stderr + + def tearDown(self): + self._reset() + + @patch("colorama.ansitowin32.winapi_test", lambda: True) + def testJustFixWindowsConsole(self): + if sys.platform != "win32": + # just_fix_windows_console should be a no-op + just_fix_windows_console() + self.assertIs(sys.stdout, orig_stdout) + self.assertIs(sys.stderr, orig_stderr) + else: + def fake_std(): + # Emulate stdout=not a tty, stderr=tty + # to check that we handle both cases correctly + stdout = Mock() + stdout.closed = False + stdout.isatty.return_value = False + stdout.fileno.return_value = 1 + sys.stdout = stdout + + stderr = Mock() + stderr.closed = False + stderr.isatty.return_value = True + stderr.fileno.return_value = 2 + sys.stderr = stderr + + for native_ansi in [False, True]: + with patch( + 'colorama.ansitowin32.enable_vt_processing', + lambda *_: native_ansi + ): + self._reset() + fake_std() + + # Regular single-call test + prev_stdout = sys.stdout + prev_stderr = sys.stderr + just_fix_windows_console() + self.assertIs(sys.stdout, prev_stdout) + if native_ansi: + self.assertIs(sys.stderr, prev_stderr) + else: + self.assertIsNot(sys.stderr, prev_stderr) + + # second call without resetting is always a no-op + prev_stdout = sys.stdout + prev_stderr = sys.stderr + just_fix_windows_console() + self.assertIs(sys.stdout, prev_stdout) + self.assertIs(sys.stderr, prev_stderr) + + self._reset() + fake_std() + + # If init() runs first, just_fix_windows_console should be a no-op + init() + prev_stdout = sys.stdout + prev_stderr = sys.stderr + just_fix_windows_console() + self.assertIs(prev_stdout, sys.stdout) + self.assertIs(prev_stderr, sys.stderr) + + +if __name__ == '__main__': + main() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/isatty_test.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/isatty_test.py new file mode 100644 index 0000000..0f84e4b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/isatty_test.py @@ -0,0 +1,57 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import sys +from unittest import TestCase, main + +from ..ansitowin32 import StreamWrapper, AnsiToWin32 +from .utils import pycharm, replace_by, replace_original_by, StreamTTY, StreamNonTTY + + +def is_a_tty(stream): + return StreamWrapper(stream, None).isatty() + +class IsattyTest(TestCase): + + def test_TTY(self): + tty = StreamTTY() + self.assertTrue(is_a_tty(tty)) + with pycharm(): + self.assertTrue(is_a_tty(tty)) + + def test_nonTTY(self): + non_tty = StreamNonTTY() + self.assertFalse(is_a_tty(non_tty)) + with pycharm(): + self.assertFalse(is_a_tty(non_tty)) + + def test_withPycharm(self): + with pycharm(): + self.assertTrue(is_a_tty(sys.stderr)) + self.assertTrue(is_a_tty(sys.stdout)) + + def test_withPycharmTTYOverride(self): + tty = StreamTTY() + with pycharm(), replace_by(tty): + self.assertTrue(is_a_tty(tty)) + + def test_withPycharmNonTTYOverride(self): + non_tty = StreamNonTTY() + with pycharm(), replace_by(non_tty): + self.assertFalse(is_a_tty(non_tty)) + + def test_withPycharmNoneOverride(self): + with pycharm(): + with replace_by(None), replace_original_by(None): + self.assertFalse(is_a_tty(None)) + self.assertFalse(is_a_tty(StreamNonTTY())) + self.assertTrue(is_a_tty(StreamTTY())) + + def test_withPycharmStreamWrapped(self): + with pycharm(): + self.assertTrue(AnsiToWin32(StreamTTY()).stream.isatty()) + self.assertFalse(AnsiToWin32(StreamNonTTY()).stream.isatty()) + self.assertTrue(AnsiToWin32(sys.stdout).stream.isatty()) + self.assertTrue(AnsiToWin32(sys.stderr).stream.isatty()) + + +if __name__ == '__main__': + main() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/utils.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/utils.py new file mode 100644 index 0000000..472fafb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/utils.py @@ -0,0 +1,49 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from contextlib import contextmanager +from io import StringIO +import sys +import os + + +class StreamTTY(StringIO): + def isatty(self): + return True + +class StreamNonTTY(StringIO): + def isatty(self): + return False + +@contextmanager +def osname(name): + orig = os.name + os.name = name + yield + os.name = orig + +@contextmanager +def replace_by(stream): + orig_stdout = sys.stdout + orig_stderr = sys.stderr + sys.stdout = stream + sys.stderr = stream + yield + sys.stdout = orig_stdout + sys.stderr = orig_stderr + +@contextmanager +def replace_original_by(stream): + orig_stdout = sys.__stdout__ + orig_stderr = sys.__stderr__ + sys.__stdout__ = stream + sys.__stderr__ = stream + yield + sys.__stdout__ = orig_stdout + sys.__stderr__ = orig_stderr + +@contextmanager +def pycharm(): + os.environ["PYCHARM_HOSTED"] = "1" + non_tty = StreamNonTTY() + with replace_by(non_tty), replace_original_by(non_tty): + yield + del os.environ["PYCHARM_HOSTED"] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/winterm_test.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/winterm_test.py new file mode 100644 index 0000000..d0955f9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/tests/winterm_test.py @@ -0,0 +1,131 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import sys +from unittest import TestCase, main, skipUnless + +try: + from unittest.mock import Mock, patch +except ImportError: + from mock import Mock, patch + +from ..winterm import WinColor, WinStyle, WinTerm + + +class WinTermTest(TestCase): + + @patch('colorama.winterm.win32') + def testInit(self, mockWin32): + mockAttr = Mock() + mockAttr.wAttributes = 7 + 6 * 16 + 8 + mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr + term = WinTerm() + self.assertEqual(term._fore, 7) + self.assertEqual(term._back, 6) + self.assertEqual(term._style, 8) + + @skipUnless(sys.platform.startswith("win"), "requires Windows") + def testGetAttrs(self): + term = WinTerm() + + term._fore = 0 + term._back = 0 + term._style = 0 + self.assertEqual(term.get_attrs(), 0) + + term._fore = WinColor.YELLOW + self.assertEqual(term.get_attrs(), WinColor.YELLOW) + + term._back = WinColor.MAGENTA + self.assertEqual( + term.get_attrs(), + WinColor.YELLOW + WinColor.MAGENTA * 16) + + term._style = WinStyle.BRIGHT + self.assertEqual( + term.get_attrs(), + WinColor.YELLOW + WinColor.MAGENTA * 16 + WinStyle.BRIGHT) + + @patch('colorama.winterm.win32') + def testResetAll(self, mockWin32): + mockAttr = Mock() + mockAttr.wAttributes = 1 + 2 * 16 + 8 + mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr + term = WinTerm() + + term.set_console = Mock() + term._fore = -1 + term._back = -1 + term._style = -1 + + term.reset_all() + + self.assertEqual(term._fore, 1) + self.assertEqual(term._back, 2) + self.assertEqual(term._style, 8) + self.assertEqual(term.set_console.called, True) + + @skipUnless(sys.platform.startswith("win"), "requires Windows") + def testFore(self): + term = WinTerm() + term.set_console = Mock() + term._fore = 0 + + term.fore(5) + + self.assertEqual(term._fore, 5) + self.assertEqual(term.set_console.called, True) + + @skipUnless(sys.platform.startswith("win"), "requires Windows") + def testBack(self): + term = WinTerm() + term.set_console = Mock() + term._back = 0 + + term.back(5) + + self.assertEqual(term._back, 5) + self.assertEqual(term.set_console.called, True) + + @skipUnless(sys.platform.startswith("win"), "requires Windows") + def testStyle(self): + term = WinTerm() + term.set_console = Mock() + term._style = 0 + + term.style(22) + + self.assertEqual(term._style, 22) + self.assertEqual(term.set_console.called, True) + + @patch('colorama.winterm.win32') + def testSetConsole(self, mockWin32): + mockAttr = Mock() + mockAttr.wAttributes = 0 + mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr + term = WinTerm() + term.windll = Mock() + + term.set_console() + + self.assertEqual( + mockWin32.SetConsoleTextAttribute.call_args, + ((mockWin32.STDOUT, term.get_attrs()), {}) + ) + + @patch('colorama.winterm.win32') + def testSetConsoleOnStderr(self, mockWin32): + mockAttr = Mock() + mockAttr.wAttributes = 0 + mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr + term = WinTerm() + term.windll = Mock() + + term.set_console(on_stderr=True) + + self.assertEqual( + mockWin32.SetConsoleTextAttribute.call_args, + ((mockWin32.STDERR, term.get_attrs()), {}) + ) + + +if __name__ == '__main__': + main() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/win32.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/win32.py new file mode 100644 index 0000000..841b0e2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/win32.py @@ -0,0 +1,180 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. + +# from winbase.h +STDOUT = -11 +STDERR = -12 + +ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004 + +try: + import ctypes + from ctypes import LibraryLoader + windll = LibraryLoader(ctypes.WinDLL) + from ctypes import wintypes +except (AttributeError, ImportError): + windll = None + SetConsoleTextAttribute = lambda *_: None + winapi_test = lambda *_: None +else: + from ctypes import byref, Structure, c_char, POINTER + + COORD = wintypes._COORD + + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + """struct in wincon.h.""" + _fields_ = [ + ("dwSize", COORD), + ("dwCursorPosition", COORD), + ("wAttributes", wintypes.WORD), + ("srWindow", wintypes.SMALL_RECT), + ("dwMaximumWindowSize", COORD), + ] + def __str__(self): + return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( + self.dwSize.Y, self.dwSize.X + , self.dwCursorPosition.Y, self.dwCursorPosition.X + , self.wAttributes + , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right + , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X + ) + + _GetStdHandle = windll.kernel32.GetStdHandle + _GetStdHandle.argtypes = [ + wintypes.DWORD, + ] + _GetStdHandle.restype = wintypes.HANDLE + + _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo + _GetConsoleScreenBufferInfo.argtypes = [ + wintypes.HANDLE, + POINTER(CONSOLE_SCREEN_BUFFER_INFO), + ] + _GetConsoleScreenBufferInfo.restype = wintypes.BOOL + + _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute + _SetConsoleTextAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + ] + _SetConsoleTextAttribute.restype = wintypes.BOOL + + _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition + _SetConsoleCursorPosition.argtypes = [ + wintypes.HANDLE, + COORD, + ] + _SetConsoleCursorPosition.restype = wintypes.BOOL + + _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA + _FillConsoleOutputCharacterA.argtypes = [ + wintypes.HANDLE, + c_char, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputCharacterA.restype = wintypes.BOOL + + _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute + _FillConsoleOutputAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputAttribute.restype = wintypes.BOOL + + _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW + _SetConsoleTitleW.argtypes = [ + wintypes.LPCWSTR + ] + _SetConsoleTitleW.restype = wintypes.BOOL + + _GetConsoleMode = windll.kernel32.GetConsoleMode + _GetConsoleMode.argtypes = [ + wintypes.HANDLE, + POINTER(wintypes.DWORD) + ] + _GetConsoleMode.restype = wintypes.BOOL + + _SetConsoleMode = windll.kernel32.SetConsoleMode + _SetConsoleMode.argtypes = [ + wintypes.HANDLE, + wintypes.DWORD + ] + _SetConsoleMode.restype = wintypes.BOOL + + def _winapi_test(handle): + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return bool(success) + + def winapi_test(): + return any(_winapi_test(h) for h in + (_GetStdHandle(STDOUT), _GetStdHandle(STDERR))) + + def GetConsoleScreenBufferInfo(stream_id=STDOUT): + handle = _GetStdHandle(stream_id) + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return csbi + + def SetConsoleTextAttribute(stream_id, attrs): + handle = _GetStdHandle(stream_id) + return _SetConsoleTextAttribute(handle, attrs) + + def SetConsoleCursorPosition(stream_id, position, adjust=True): + position = COORD(*position) + # If the position is out of range, do nothing. + if position.Y <= 0 or position.X <= 0: + return + # Adjust for Windows' SetConsoleCursorPosition: + # 1. being 0-based, while ANSI is 1-based. + # 2. expecting (x,y), while ANSI uses (y,x). + adjusted_position = COORD(position.Y - 1, position.X - 1) + if adjust: + # Adjust for viewport's scroll position + sr = GetConsoleScreenBufferInfo(STDOUT).srWindow + adjusted_position.Y += sr.Top + adjusted_position.X += sr.Left + # Resume normal processing + handle = _GetStdHandle(stream_id) + return _SetConsoleCursorPosition(handle, adjusted_position) + + def FillConsoleOutputCharacter(stream_id, char, length, start): + handle = _GetStdHandle(stream_id) + char = c_char(char.encode()) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + success = _FillConsoleOutputCharacterA( + handle, char, length, start, byref(num_written)) + return num_written.value + + def FillConsoleOutputAttribute(stream_id, attr, length, start): + ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' + handle = _GetStdHandle(stream_id) + attribute = wintypes.WORD(attr) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + return _FillConsoleOutputAttribute( + handle, attribute, length, start, byref(num_written)) + + def SetConsoleTitle(title): + return _SetConsoleTitleW(title) + + def GetConsoleMode(handle): + mode = wintypes.DWORD() + success = _GetConsoleMode(handle, byref(mode)) + if not success: + raise ctypes.WinError() + return mode.value + + def SetConsoleMode(handle, mode): + success = _SetConsoleMode(handle, mode) + if not success: + raise ctypes.WinError() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/winterm.py b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/winterm.py new file mode 100644 index 0000000..aad867e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/colorama/winterm.py @@ -0,0 +1,195 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +try: + from msvcrt import get_osfhandle +except ImportError: + def get_osfhandle(_): + raise OSError("This isn't windows!") + + +from . import win32 + +# from wincon.h +class WinColor(object): + BLACK = 0 + BLUE = 1 + GREEN = 2 + CYAN = 3 + RED = 4 + MAGENTA = 5 + YELLOW = 6 + GREY = 7 + +# from wincon.h +class WinStyle(object): + NORMAL = 0x00 # dim text, dim background + BRIGHT = 0x08 # bright text, dim background + BRIGHT_BACKGROUND = 0x80 # dim text, bright background + +class WinTerm(object): + + def __init__(self): + self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes + self.set_attrs(self._default) + self._default_fore = self._fore + self._default_back = self._back + self._default_style = self._style + # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. + # So that LIGHT_EX colors and BRIGHT style do not clobber each other, + # we track them separately, since LIGHT_EX is overwritten by Fore/Back + # and BRIGHT is overwritten by Style codes. + self._light = 0 + + def get_attrs(self): + return self._fore + self._back * 16 + (self._style | self._light) + + def set_attrs(self, value): + self._fore = value & 7 + self._back = (value >> 4) & 7 + self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) + + def reset_all(self, on_stderr=None): + self.set_attrs(self._default) + self.set_console(attrs=self._default) + self._light = 0 + + def fore(self, fore=None, light=False, on_stderr=False): + if fore is None: + fore = self._default_fore + self._fore = fore + # Emulate LIGHT_EX with BRIGHT Style + if light: + self._light |= WinStyle.BRIGHT + else: + self._light &= ~WinStyle.BRIGHT + self.set_console(on_stderr=on_stderr) + + def back(self, back=None, light=False, on_stderr=False): + if back is None: + back = self._default_back + self._back = back + # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style + if light: + self._light |= WinStyle.BRIGHT_BACKGROUND + else: + self._light &= ~WinStyle.BRIGHT_BACKGROUND + self.set_console(on_stderr=on_stderr) + + def style(self, style=None, on_stderr=False): + if style is None: + style = self._default_style + self._style = style + self.set_console(on_stderr=on_stderr) + + def set_console(self, attrs=None, on_stderr=False): + if attrs is None: + attrs = self.get_attrs() + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleTextAttribute(handle, attrs) + + def get_position(self, handle): + position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition + # Because Windows coordinates are 0-based, + # and win32.SetConsoleCursorPosition expects 1-based. + position.X += 1 + position.Y += 1 + return position + + def set_cursor_position(self, position=None, on_stderr=False): + if position is None: + # I'm not currently tracking the position, so there is no default. + # position = self.get_position() + return + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleCursorPosition(handle, position) + + def cursor_adjust(self, x, y, on_stderr=False): + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + position = self.get_position(handle) + adjusted_position = (position.Y + y, position.X + x) + win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) + + def erase_screen(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the screen. + # 1 should clear from the cursor to the beginning of the screen. + # 2 should clear the entire screen, and move cursor to (1,1) + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + # get the number of character cells in the current buffer + cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y + # get number of character cells before current cursor position + cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = cells_in_screen - cells_before_cursor + elif mode == 1: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_before_cursor + elif mode == 2: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_in_screen + else: + # invalid mode + return + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + if mode == 2: + # put the cursor where needed + win32.SetConsoleCursorPosition(handle, (1, 1)) + + def erase_line(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the line. + # 1 should clear from the cursor to the beginning of the line. + # 2 should clear the entire line. + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X + elif mode == 1: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwCursorPosition.X + elif mode == 2: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwSize.X + else: + # invalid mode + return + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + + def set_title(self, title): + win32.SetConsoleTitle(title) + + +def enable_vt_processing(fd): + if win32.windll is None or not win32.winapi_test(): + return False + + try: + handle = get_osfhandle(fd) + mode = win32.GetConsoleMode(handle) + win32.SetConsoleMode( + handle, + mode | win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING, + ) + + mode = win32.GetConsoleMode(handle) + if mode & win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING: + return True + # Can get TypeError in testsuite where 'fd' is a Mock() + except (OSError, TypeError): + return False diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__init__.py new file mode 100644 index 0000000..e999438 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2023 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import logging + +__version__ = '0.3.8' + + +class DistlibException(Exception): + pass + + +try: + from logging import NullHandler +except ImportError: # pragma: no cover + + class NullHandler(logging.Handler): + + def handle(self, record): + pass + + def emit(self, record): + pass + + def createLock(self): + self.lock = None + + +logger = logging.getLogger(__name__) +logger.addHandler(NullHandler()) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..698e800 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..5df4058 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-312.pyc new file mode 100644 index 0000000..59b367c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-312.pyc new file mode 100644 index 0000000..30cafae Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-312.pyc new file mode 100644 index 0000000..11dedd9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-312.pyc new file mode 100644 index 0000000..d4ddf4e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-312.pyc new file mode 100644 index 0000000..914a165 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-312.pyc new file mode 100644 index 0000000..8bb7b54 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc new file mode 100644 index 0000000..9bc381f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc new file mode 100644 index 0000000..81ac479 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..b907565 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-312.pyc new file mode 100644 index 0000000..149befb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..9bf6ae6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/compat.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/compat.py new file mode 100644 index 0000000..e93dc27 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/compat.py @@ -0,0 +1,1138 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import absolute_import + +import os +import re +import shutil +import sys + +try: + import ssl +except ImportError: # pragma: no cover + ssl = None + +if sys.version_info[0] < 3: # pragma: no cover + from StringIO import StringIO + string_types = basestring, + text_type = unicode + from types import FileType as file_type + import __builtin__ as builtins + import ConfigParser as configparser + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit + from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, + pathname2url, ContentTooShortError, splittype) + + def quote(s): + if isinstance(s, unicode): + s = s.encode('utf-8') + return _quote(s) + + import urllib2 + from urllib2 import (Request, urlopen, URLError, HTTPError, + HTTPBasicAuthHandler, HTTPPasswordMgr, HTTPHandler, + HTTPRedirectHandler, build_opener) + if ssl: + from urllib2 import HTTPSHandler + import httplib + import xmlrpclib + import Queue as queue + from HTMLParser import HTMLParser + import htmlentitydefs + raw_input = raw_input + from itertools import ifilter as filter + from itertools import ifilterfalse as filterfalse + + # Leaving this around for now, in case it needs resurrecting in some way + # _userprog = None + # def splituser(host): + # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + # global _userprog + # if _userprog is None: + # import re + # _userprog = re.compile('^(.*)@(.*)$') + + # match = _userprog.match(host) + # if match: return match.group(1, 2) + # return None, host + +else: # pragma: no cover + from io import StringIO + string_types = str, + text_type = str + from io import TextIOWrapper as file_type + import builtins + import configparser + from urllib.parse import (urlparse, urlunparse, urljoin, quote, unquote, + urlsplit, urlunsplit, splittype) + from urllib.request import (urlopen, urlretrieve, Request, url2pathname, + pathname2url, HTTPBasicAuthHandler, + HTTPPasswordMgr, HTTPHandler, + HTTPRedirectHandler, build_opener) + if ssl: + from urllib.request import HTTPSHandler + from urllib.error import HTTPError, URLError, ContentTooShortError + import http.client as httplib + import urllib.request as urllib2 + import xmlrpc.client as xmlrpclib + import queue + from html.parser import HTMLParser + import html.entities as htmlentitydefs + raw_input = input + from itertools import filterfalse + filter = filter + +try: + from ssl import match_hostname, CertificateError +except ImportError: # pragma: no cover + + class CertificateError(ValueError): + pass + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + parts = dn.split('.') + leftmost, remainder = parts[0], parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" % + (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" % + (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +try: + from types import SimpleNamespace as Container +except ImportError: # pragma: no cover + + class Container(object): + """ + A generic container for when multiple values need to be returned + """ + + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +try: + from shutil import which +except ImportError: # pragma: no cover + # Implementation from Python 3.3 + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if os.curdir not in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if normdir not in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + + +# ZipFile is a context manager in 2.7, but not in 2.6 + +from zipfile import ZipFile as BaseZipFile + +if hasattr(BaseZipFile, '__enter__'): # pragma: no cover + ZipFile = BaseZipFile +else: # pragma: no cover + from zipfile import ZipExtFile as BaseZipExtFile + + class ZipExtFile(BaseZipExtFile): + + def __init__(self, base): + self.__dict__.update(base.__dict__) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + class ZipFile(BaseZipFile): + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + def open(self, *args, **kwargs): + base = BaseZipFile.open(self, *args, **kwargs) + return ZipExtFile(base) + + +try: + from platform import python_implementation +except ImportError: # pragma: no cover + + def python_implementation(): + """Return a string identifying the Python implementation.""" + if 'PyPy' in sys.version: + return 'PyPy' + if os.name == 'java': + return 'Jython' + if sys.version.startswith('IronPython'): + return 'IronPython' + return 'CPython' + + +import sysconfig + +try: + callable = callable +except NameError: # pragma: no cover + from collections.abc import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode + fsdecode = os.fsdecode +except AttributeError: # pragma: no cover + # Issue #99: on some systems (e.g. containerised), + # sys.getfilesystemencoding() returns None, and we need a real value, + # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and + # sys.getfilesystemencoding(): the return value is "the user’s preference + # according to the result of nl_langinfo(CODESET), or None if the + # nl_langinfo(CODESET) failed." + _fsencoding = sys.getfilesystemencoding() or 'utf-8' + if _fsencoding == 'mbcs': + _fserrors = 'strict' + else: + _fserrors = 'surrogateescape' + + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, text_type): + return filename.encode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + def fsdecode(filename): + if isinstance(filename, text_type): + return filename + elif isinstance(filename, bytes): + return filename.decode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + +try: + from tokenize import detect_encoding +except ImportError: # pragma: no cover + from codecs import BOM_UTF8, lookup + + cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") + + def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + + def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, + but disagree, a SyntaxError will be raised. If the encoding cookie is an + invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + try: + filename = readline.__self__.name + except AttributeError: + filename = None + bom_found = False + encoding = None + default = 'utf-8' + + def read_or_stop(): + try: + return readline() + except StopIteration: + return b'' + + def find_cookie(line): + try: + # Decode as UTF-8. Either the line is an encoding declaration, + # in which case it should be pure ASCII, or it must be UTF-8 + # per default encoding. + line_string = line.decode('utf-8') + except UnicodeDecodeError: + msg = "invalid or missing encoding declaration" + if filename is not None: + msg = '{} for {!r}'.format(msg, filename) + raise SyntaxError(msg) + + matches = cookie_re.findall(line_string) + if not matches: + return None + encoding = _get_normal_name(matches[0]) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + if filename is None: + msg = "unknown encoding: " + encoding + else: + msg = "unknown encoding for {!r}: {}".format( + filename, encoding) + raise SyntaxError(msg) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + if filename is None: + msg = 'encoding problem: utf-8' + else: + msg = 'encoding problem for {!r}: utf-8'.format( + filename) + raise SyntaxError(msg) + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + + +# For converting & <-> & etc. +try: + from html import escape +except ImportError: + from cgi import escape +if sys.version_info[:2] < (3, 4): + unescape = HTMLParser().unescape +else: + from html import unescape + +try: + from collections import ChainMap +except ImportError: # pragma: no cover + from collections import MutableMapping + + try: + from reprlib import recursive_repr as _recursive_repr + except ImportError: + + def _recursive_repr(fillvalue='...'): + ''' + Decorator to make a repr function return fillvalue for a recursive + call + ''' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, + '__annotations__', {}) + return wrapper + + return decorating_function + + class ChainMap(MutableMapping): + ''' + A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[ + key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__( + key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union( + *self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self): # like Django's Context.push() + 'New ChainMap with a new dict followed by all previous maps.' + return self.__class__({}, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError( + 'Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError( + 'Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +try: + from importlib.util import cache_from_source # Python >= 3.4 +except ImportError: # pragma: no cover + + def cache_from_source(path, debug_override=None): + assert path.endswith('.py') + if debug_override is None: + debug_override = __debug__ + if debug_override: + suffix = 'c' + else: + suffix = 'o' + return path + suffix + + +try: + from collections import OrderedDict +except ImportError: # pragma: no cover + # {{{ http://code.activestate.com/recipes/576693/ (r9) + # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. + # Passes Python2.7's test suite and incorporates all the latest updates. + try: + from thread import get_ident as _get_ident + except ImportError: + from dummy_thread import get_ident as _get_ident + + try: + from _abcoll import KeysView, ValuesView, ItemsView + except ImportError: + pass + + class OrderedDict(dict): + 'Dictionary that remembers insertion order' + + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % + len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args), )) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running=None): + 'od.__repr__() <==> repr(od)' + if not _repr_running: + _repr_running = {} + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__, ) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items, ), inst_dict) + return self.__class__, (items, ) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self) == len( + other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + + +try: + from logging.config import BaseConfigurator, valid_ident +except ImportError: # pragma: no cover + IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + + # The ConvertingXXX classes are wrappers around standard Python containers, + # and they serve to convert any suitable values in the container. The + # conversion converts base dicts, lists and tuples to their wrapped + # equivalents, whereas strings which match a conversion format are converted + # appropriately. + # + # Each wrapper should have a configurator attribute holding the actual + # configurator to use for conversion. + + class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class ConvertingList(list): + """A converting list wrapper.""" + + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext': 'ext_convert', + 'cfg': 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = staticmethod(__import__) + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int( + idx + ) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + # rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance( + value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance( + value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, string_types): + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/database.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/database.py new file mode 100644 index 0000000..eb3765f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/database.py @@ -0,0 +1,1359 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2023 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""PEP 376 implementation.""" + +from __future__ import unicode_literals + +import base64 +import codecs +import contextlib +import hashlib +import logging +import os +import posixpath +import sys +import zipimport + +from . import DistlibException, resources +from .compat import StringIO +from .version import get_scheme, UnsupportedVersionError +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) +from .util import (parse_requirement, cached_property, parse_name_and_version, + read_exports, write_exports, CSVReader, CSVWriter) + +__all__ = [ + 'Distribution', 'BaseInstalledDistribution', 'InstalledDistribution', + 'EggInfoDistribution', 'DistributionPath' +] + +logger = logging.getLogger(__name__) + +EXPORTS_FILENAME = 'pydist-exports.json' +COMMANDS_FILENAME = 'pydist-commands.json' + +DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', + 'RESOURCES', EXPORTS_FILENAME, 'SHARED') + +DISTINFO_EXT = '.dist-info' + + +class _Cache(object): + """ + A simple cache mapping names and .dist-info paths to distributions + """ + + def __init__(self): + """ + Initialise an instance. There is normally one for each DistributionPath. + """ + self.name = {} + self.path = {} + self.generated = False + + def clear(self): + """ + Clear the cache, setting it to its initial state. + """ + self.name.clear() + self.path.clear() + self.generated = False + + def add(self, dist): + """ + Add a distribution to the cache. + :param dist: The distribution to add. + """ + if dist.path not in self.path: + self.path[dist.path] = dist + self.name.setdefault(dist.key, []).append(dist) + + +class DistributionPath(object): + """ + Represents a set of distributions installed on a path (typically sys.path). + """ + + def __init__(self, path=None, include_egg=False): + """ + Create an instance from a path, optionally including legacy (distutils/ + setuptools/distribute) distributions. + :param path: The path to use, as a list of directories. If not specified, + sys.path is used. + :param include_egg: If True, this instance will look for and return legacy + distributions as well as those based on PEP 376. + """ + if path is None: + path = sys.path + self.path = path + self._include_dist = True + self._include_egg = include_egg + + self._cache = _Cache() + self._cache_egg = _Cache() + self._cache_enabled = True + self._scheme = get_scheme('default') + + def _get_cache_enabled(self): + return self._cache_enabled + + def _set_cache_enabled(self, value): + self._cache_enabled = value + + cache_enabled = property(_get_cache_enabled, _set_cache_enabled) + + def clear_cache(self): + """ + Clears the internal cache. + """ + self._cache.clear() + self._cache_egg.clear() + + def _yield_distributions(self): + """ + Yield .dist-info and/or .egg(-info) distributions. + """ + # We need to check if we've seen some resources already, because on + # some Linux systems (e.g. some Debian/Ubuntu variants) there are + # symlinks which alias other files in the environment. + seen = set() + for path in self.path: + finder = resources.finder_for_path(path) + if finder is None: + continue + r = finder.find('') + if not r or not r.is_container: + continue + rset = sorted(r.resources) + for entry in rset: + r = finder.find(entry) + if not r or r.path in seen: + continue + try: + if self._include_dist and entry.endswith(DISTINFO_EXT): + possible_filenames = [ + METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME + ] + for metadata_filename in possible_filenames: + metadata_path = posixpath.join( + entry, metadata_filename) + pydist = finder.find(metadata_path) + if pydist: + break + else: + continue + + with contextlib.closing(pydist.as_stream()) as stream: + metadata = Metadata(fileobj=stream, + scheme='legacy') + logger.debug('Found %s', r.path) + seen.add(r.path) + yield new_dist_class(r.path, + metadata=metadata, + env=self) + elif self._include_egg and entry.endswith( + ('.egg-info', '.egg')): + logger.debug('Found %s', r.path) + seen.add(r.path) + yield old_dist_class(r.path, self) + except Exception as e: + msg = 'Unable to read distribution at %s, perhaps due to bad metadata: %s' + logger.warning(msg, r.path, e) + import warnings + warnings.warn(msg % (r.path, e), stacklevel=2) + + def _generate_cache(self): + """ + Scan the path for distributions and populate the cache with + those that are found. + """ + gen_dist = not self._cache.generated + gen_egg = self._include_egg and not self._cache_egg.generated + if gen_dist or gen_egg: + for dist in self._yield_distributions(): + if isinstance(dist, InstalledDistribution): + self._cache.add(dist) + else: + self._cache_egg.add(dist) + + if gen_dist: + self._cache.generated = True + if gen_egg: + self._cache_egg.generated = True + + @classmethod + def distinfo_dirname(cls, name, version): + """ + The *name* and *version* parameters are converted into their + filename-escaped form, i.e. any ``'-'`` characters are replaced + with ``'_'`` other than the one in ``'dist-info'`` and the one + separating the name from the version number. + + :parameter name: is converted to a standard distribution name by replacing + any runs of non- alphanumeric characters with a single + ``'-'``. + :type name: string + :parameter version: is converted to a standard version string. Spaces + become dots, and all other non-alphanumeric characters + (except dots) become dashes, with runs of multiple + dashes condensed to a single dash. + :type version: string + :returns: directory name + :rtype: string""" + name = name.replace('-', '_') + return '-'.join([name, version]) + DISTINFO_EXT + + def get_distributions(self): + """ + Provides an iterator that looks for distributions and returns + :class:`InstalledDistribution` or + :class:`EggInfoDistribution` instances for each one of them. + + :rtype: iterator of :class:`InstalledDistribution` and + :class:`EggInfoDistribution` instances + """ + if not self._cache_enabled: + for dist in self._yield_distributions(): + yield dist + else: + self._generate_cache() + + for dist in self._cache.path.values(): + yield dist + + if self._include_egg: + for dist in self._cache_egg.path.values(): + yield dist + + def get_distribution(self, name): + """ + Looks for a named distribution on the path. + + This function only returns the first result found, as no more than one + value is expected. If nothing is found, ``None`` is returned. + + :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` + or ``None`` + """ + result = None + name = name.lower() + if not self._cache_enabled: + for dist in self._yield_distributions(): + if dist.key == name: + result = dist + break + else: + self._generate_cache() + + if name in self._cache.name: + result = self._cache.name[name][0] + elif self._include_egg and name in self._cache_egg.name: + result = self._cache_egg.name[name][0] + return result + + def provides_distribution(self, name, version=None): + """ + Iterates over all distributions to find which distributions provide *name*. + If a *version* is provided, it will be used to filter the results. + + This function only returns the first result found, since no more than + one values are expected. If the directory is not found, returns ``None``. + + :parameter version: a version specifier that indicates the version + required, conforming to the format in ``PEP-345`` + + :type name: string + :type version: string + """ + matcher = None + if version is not None: + try: + matcher = self._scheme.matcher('%s (%s)' % (name, version)) + except ValueError: + raise DistlibException('invalid name or version: %r, %r' % + (name, version)) + + for dist in self.get_distributions(): + # We hit a problem on Travis where enum34 was installed and doesn't + # have a provides attribute ... + if not hasattr(dist, 'provides'): + logger.debug('No "provides": %s', dist) + else: + provided = dist.provides + + for p in provided: + p_name, p_ver = parse_name_and_version(p) + if matcher is None: + if p_name == name: + yield dist + break + else: + if p_name == name and matcher.match(p_ver): + yield dist + break + + def get_file_path(self, name, relative_path): + """ + Return the path to a resource file. + """ + dist = self.get_distribution(name) + if dist is None: + raise LookupError('no distribution named %r found' % name) + return dist.get_resource_path(relative_path) + + def get_exported_entries(self, category, name=None): + """ + Return all of the exported entries in a particular category. + + :param category: The category to search for entries. + :param name: If specified, only entries with that name are returned. + """ + for dist in self.get_distributions(): + r = dist.exports + if category in r: + d = r[category] + if name is not None: + if name in d: + yield d[name] + else: + for v in d.values(): + yield v + + +class Distribution(object): + """ + A base class for distributions, whether installed or from indexes. + Either way, it must have some metadata, so that's all that's needed + for construction. + """ + + build_time_dependency = False + """ + Set to True if it's known to be only a build-time dependency (i.e. + not needed after installation). + """ + + requested = False + """A boolean that indicates whether the ``REQUESTED`` metadata file is + present (in other words, whether the package was installed by user + request or it was installed as a dependency).""" + + def __init__(self, metadata): + """ + Initialise an instance. + :param metadata: The instance of :class:`Metadata` describing this + distribution. + """ + self.metadata = metadata + self.name = metadata.name + self.key = self.name.lower() # for case-insensitive comparisons + self.version = metadata.version + self.locator = None + self.digest = None + self.extras = None # additional features requested + self.context = None # environment marker overrides + self.download_urls = set() + self.digests = {} + + @property + def source_url(self): + """ + The source archive download URL for this distribution. + """ + return self.metadata.source_url + + download_url = source_url # Backward compatibility + + @property + def name_and_version(self): + """ + A utility property which displays the name and version in parentheses. + """ + return '%s (%s)' % (self.name, self.version) + + @property + def provides(self): + """ + A set of distribution names and versions provided by this distribution. + :return: A set of "name (version)" strings. + """ + plist = self.metadata.provides + s = '%s (%s)' % (self.name, self.version) + if s not in plist: + plist.append(s) + return plist + + def _get_requirements(self, req_attr): + md = self.metadata + reqts = getattr(md, req_attr) + logger.debug('%s: got requirements %r from metadata: %r', self.name, + req_attr, reqts) + return set( + md.get_requirements(reqts, extras=self.extras, env=self.context)) + + @property + def run_requires(self): + return self._get_requirements('run_requires') + + @property + def meta_requires(self): + return self._get_requirements('meta_requires') + + @property + def build_requires(self): + return self._get_requirements('build_requires') + + @property + def test_requires(self): + return self._get_requirements('test_requires') + + @property + def dev_requires(self): + return self._get_requirements('dev_requires') + + def matches_requirement(self, req): + """ + Say if this instance matches (fulfills) a requirement. + :param req: The requirement to match. + :rtype req: str + :return: True if it matches, else False. + """ + # Requirement may contain extras - parse to lose those + # from what's passed to the matcher + r = parse_requirement(req) + scheme = get_scheme(self.metadata.scheme) + try: + matcher = scheme.matcher(r.requirement) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + result = False + for p in self.provides: + p_name, p_ver = parse_name_and_version(p) + if p_name != name: + continue + try: + result = matcher.match(p_ver) + break + except UnsupportedVersionError: + pass + return result + + def __repr__(self): + """ + Return a textual representation of this instance, + """ + if self.source_url: + suffix = ' [%s]' % self.source_url + else: + suffix = '' + return '' % (self.name, self.version, suffix) + + def __eq__(self, other): + """ + See if this distribution is the same as another. + :param other: The distribution to compare with. To be equal to one + another. distributions must have the same type, name, + version and source_url. + :return: True if it is the same, else False. + """ + if type(other) is not type(self): + result = False + else: + result = (self.name == other.name and self.version == other.version + and self.source_url == other.source_url) + return result + + def __hash__(self): + """ + Compute hash in a way which matches the equality test. + """ + return hash(self.name) + hash(self.version) + hash(self.source_url) + + +class BaseInstalledDistribution(Distribution): + """ + This is the base class for installed distributions (whether PEP 376 or + legacy). + """ + + hasher = None + + def __init__(self, metadata, path, env=None): + """ + Initialise an instance. + :param metadata: An instance of :class:`Metadata` which describes the + distribution. This will normally have been initialised + from a metadata file in the ``path``. + :param path: The path of the ``.dist-info`` or ``.egg-info`` + directory for the distribution. + :param env: This is normally the :class:`DistributionPath` + instance where this distribution was found. + """ + super(BaseInstalledDistribution, self).__init__(metadata) + self.path = path + self.dist_path = env + + def get_hash(self, data, hasher=None): + """ + Get the hash of some data, using a particular hash algorithm, if + specified. + + :param data: The data to be hashed. + :type data: bytes + :param hasher: The name of a hash implementation, supported by hashlib, + or ``None``. Examples of valid values are ``'sha1'``, + ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and + ``'sha512'``. If no hasher is specified, the ``hasher`` + attribute of the :class:`InstalledDistribution` instance + is used. If the hasher is determined to be ``None``, MD5 + is used as the hashing algorithm. + :returns: The hash of the data. If a hasher was explicitly specified, + the returned hash will be prefixed with the specified hasher + followed by '='. + :rtype: str + """ + if hasher is None: + hasher = self.hasher + if hasher is None: + hasher = hashlib.md5 + prefix = '' + else: + hasher = getattr(hashlib, hasher) + prefix = '%s=' % self.hasher + digest = hasher(data).digest() + digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') + return '%s%s' % (prefix, digest) + + +class InstalledDistribution(BaseInstalledDistribution): + """ + Created with the *path* of the ``.dist-info`` directory provided to the + constructor. It reads the metadata contained in ``pydist.json`` when it is + instantiated., or uses a passed in Metadata instance (useful for when + dry-run mode is being used). + """ + + hasher = 'sha256' + + def __init__(self, path, metadata=None, env=None): + self.modules = [] + self.finder = finder = resources.finder_for_path(path) + if finder is None: + raise ValueError('finder unavailable for %s' % path) + if env and env._cache_enabled and path in env._cache.path: + metadata = env._cache.path[path].metadata + elif metadata is None: + r = finder.find(METADATA_FILENAME) + # Temporary - for Wheel 0.23 support + if r is None: + r = finder.find(WHEEL_METADATA_FILENAME) + # Temporary - for legacy support + if r is None: + r = finder.find(LEGACY_METADATA_FILENAME) + if r is None: + raise ValueError('no %s found in %s' % + (METADATA_FILENAME, path)) + with contextlib.closing(r.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + + super(InstalledDistribution, self).__init__(metadata, path, env) + + if env and env._cache_enabled: + env._cache.add(self) + + r = finder.find('REQUESTED') + self.requested = r is not None + p = os.path.join(path, 'top_level.txt') + if os.path.exists(p): + with open(p, 'rb') as f: + data = f.read().decode('utf-8') + self.modules = data.splitlines() + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def _get_records(self): + """ + Get the list of installed files for the distribution + :return: A list of tuples of path, hash and size. Note that hash and + size might be ``None`` for some entries. The path is exactly + as stored in the file (which is as in PEP 376). + """ + results = [] + r = self.get_distinfo_resource('RECORD') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as record_reader: + # Base location is parent dir of .dist-info dir + # base_location = os.path.dirname(self.path) + # base_location = os.path.abspath(base_location) + for row in record_reader: + missing = [None for i in range(len(row), 3)] + path, checksum, size = row + missing + # if not os.path.isabs(path): + # path = path.replace('/', os.sep) + # path = os.path.join(base_location, path) + results.append((path, checksum, size)) + return results + + @cached_property + def exports(self): + """ + Return the information exported by this distribution. + :return: A dictionary of exports, mapping an export category to a dict + of :class:`ExportEntry` instances describing the individual + export entries, and keyed by name. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + result = self.read_exports() + return result + + def read_exports(self): + """ + Read exports data from a file in .ini format. + + :return: A dictionary of exports, mapping an export category to a list + of :class:`ExportEntry` instances describing the individual + export entries. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + with contextlib.closing(r.as_stream()) as stream: + result = read_exports(stream) + return result + + def write_exports(self, exports): + """ + Write a dictionary of exports to a file in .ini format. + :param exports: A dictionary of exports, mapping an export category to + a list of :class:`ExportEntry` instances describing the + individual export entries. + """ + rf = self.get_distinfo_file(EXPORTS_FILENAME) + with open(rf, 'w') as f: + write_exports(exports, f) + + def get_resource_path(self, relative_path): + """ + NOTE: This API may change in the future. + + Return the absolute path to a resource file with the given relative + path. + + :param relative_path: The path, relative to .dist-info, of the resource + of interest. + :return: The absolute path where the resource is to be found. + """ + r = self.get_distinfo_resource('RESOURCES') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as resources_reader: + for relative, destination in resources_reader: + if relative == relative_path: + return destination + raise KeyError('no resource file with relative path %r ' + 'is installed' % relative_path) + + def list_installed_files(self): + """ + Iterates over the ``RECORD`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: iterator of (path, hash, size) + """ + for result in self._get_records(): + yield result + + def write_installed_files(self, paths, prefix, dry_run=False): + """ + Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any + existing ``RECORD`` file is silently overwritten. + + prefix is used to determine when to write absolute paths. + """ + prefix = os.path.join(prefix, '') + base = os.path.dirname(self.path) + base_under_prefix = base.startswith(prefix) + base = os.path.join(base, '') + record_path = self.get_distinfo_file('RECORD') + logger.info('creating %s', record_path) + if dry_run: + return None + with CSVWriter(record_path) as writer: + for path in paths: + if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): + # do not put size and hash, as in PEP-376 + hash_value = size = '' + else: + size = '%d' % os.path.getsize(path) + with open(path, 'rb') as fp: + hash_value = self.get_hash(fp.read()) + if path.startswith(base) or (base_under_prefix + and path.startswith(prefix)): + path = os.path.relpath(path, base) + writer.writerow((path, hash_value, size)) + + # add the RECORD file itself + if record_path.startswith(base): + record_path = os.path.relpath(record_path, base) + writer.writerow((record_path, '', '')) + return record_path + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + base = os.path.dirname(self.path) + record_path = self.get_distinfo_file('RECORD') + for path, hash_value, size in self.list_installed_files(): + if not os.path.isabs(path): + path = os.path.join(base, path) + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + elif os.path.isfile(path): + actual_size = str(os.path.getsize(path)) + if size and actual_size != size: + mismatches.append((path, 'size', size, actual_size)) + elif hash_value: + if '=' in hash_value: + hasher = hash_value.split('=', 1)[0] + else: + hasher = None + + with open(path, 'rb') as f: + actual_hash = self.get_hash(f.read(), hasher) + if actual_hash != hash_value: + mismatches.append( + (path, 'hash', hash_value, actual_hash)) + return mismatches + + @cached_property + def shared_locations(self): + """ + A dictionary of shared locations whose keys are in the set 'prefix', + 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. + The corresponding value is the absolute path of that category for + this distribution, and takes into account any paths selected by the + user at installation time (e.g. via command-line arguments). In the + case of the 'namespace' key, this would be a list of absolute paths + for the roots of namespace packages in this distribution. + + The first time this property is accessed, the relevant information is + read from the SHARED file in the .dist-info directory. + """ + result = {} + shared_path = os.path.join(self.path, 'SHARED') + if os.path.isfile(shared_path): + with codecs.open(shared_path, 'r', encoding='utf-8') as f: + lines = f.read().splitlines() + for line in lines: + key, value = line.split('=', 1) + if key == 'namespace': + result.setdefault(key, []).append(value) + else: + result[key] = value + return result + + def write_shared_locations(self, paths, dry_run=False): + """ + Write shared location information to the SHARED file in .dist-info. + :param paths: A dictionary as described in the documentation for + :meth:`shared_locations`. + :param dry_run: If True, the action is logged but no file is actually + written. + :return: The path of the file written to. + """ + shared_path = os.path.join(self.path, 'SHARED') + logger.info('creating %s', shared_path) + if dry_run: + return None + lines = [] + for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): + path = paths[key] + if os.path.isdir(paths[key]): + lines.append('%s=%s' % (key, path)) + for ns in paths.get('namespace', ()): + lines.append('namespace=%s' % ns) + + with codecs.open(shared_path, 'w', encoding='utf-8') as f: + f.write('\n'.join(lines)) + return shared_path + + def get_distinfo_resource(self, path): + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + finder = resources.finder_for_path(self.path) + if finder is None: + raise DistlibException('Unable to get a finder for %s' % self.path) + return finder.find(path) + + def get_distinfo_file(self, path): + """ + Returns a path located under the ``.dist-info`` directory. Returns a + string representing the path. + + :parameter path: a ``'/'``-separated path relative to the + ``.dist-info`` directory or an absolute path; + If *path* is an absolute path and doesn't start + with the ``.dist-info`` directory path, + a :class:`DistlibException` is raised + :type path: str + :rtype: str + """ + # Check if it is an absolute path # XXX use relpath, add tests + if path.find(os.sep) >= 0: + # it's an absolute path? + distinfo_dirname, path = path.split(os.sep)[-2:] + if distinfo_dirname != self.path.split(os.sep)[-1]: + raise DistlibException( + 'dist-info file %r does not belong to the %r %s ' + 'distribution' % (path, self.name, self.version)) + + # The file must be relative + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + + return os.path.join(self.path, path) + + def list_distinfo_files(self): + """ + Iterates over the ``RECORD`` entries and returns paths for each line if + the path is pointing to a file located in the ``.dist-info`` directory + or one of its subdirectories. + + :returns: iterator of paths + """ + base = os.path.dirname(self.path) + for path, checksum, size in self._get_records(): + # XXX add separator or use real relpath algo + if not os.path.isabs(path): + path = os.path.join(base, path) + if path.startswith(self.path): + yield path + + def __eq__(self, other): + return (isinstance(other, InstalledDistribution) + and self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +class EggInfoDistribution(BaseInstalledDistribution): + """Created with the *path* of the ``.egg-info`` directory or file provided + to the constructor. It reads the metadata contained in the file itself, or + if the given path happens to be a directory, the metadata is read from the + file ``PKG-INFO`` under that directory.""" + + requested = True # as we have no way of knowing, assume it was + shared_locations = {} + + def __init__(self, path, env=None): + + def set_name_and_version(s, n, v): + s.name = n + s.key = n.lower() # for case-insensitive comparisons + s.version = v + + self.path = path + self.dist_path = env + if env and env._cache_enabled and path in env._cache_egg.path: + metadata = env._cache_egg.path[path].metadata + set_name_and_version(self, metadata.name, metadata.version) + else: + metadata = self._get_metadata(path) + + # Need to be set before caching + set_name_and_version(self, metadata.name, metadata.version) + + if env and env._cache_enabled: + env._cache_egg.add(self) + super(EggInfoDistribution, self).__init__(metadata, path, env) + + def _get_metadata(self, path): + requires = None + + def parse_requires_data(data): + """Create a list of dependencies from a requires.txt file. + + *data*: the contents of a setuptools-produced requires.txt file. + """ + reqs = [] + lines = data.splitlines() + for line in lines: + line = line.strip() + # sectioned files have bare newlines (separating sections) + if not line: # pragma: no cover + continue + if line.startswith('['): # pragma: no cover + logger.warning( + 'Unexpected line: quitting requirement scan: %r', line) + break + r = parse_requirement(line) + if not r: # pragma: no cover + logger.warning('Not recognised as a requirement: %r', line) + continue + if r.extras: # pragma: no cover + logger.warning('extra requirements in requires.txt are ' + 'not supported') + if not r.constraints: + reqs.append(r.name) + else: + cons = ', '.join('%s%s' % c for c in r.constraints) + reqs.append('%s (%s)' % (r.name, cons)) + return reqs + + def parse_requires_path(req_path): + """Create a list of dependencies from a requires.txt file. + + *req_path*: the path to a setuptools-produced requires.txt file. + """ + + reqs = [] + try: + with codecs.open(req_path, 'r', 'utf-8') as fp: + reqs = parse_requires_data(fp.read()) + except IOError: + pass + return reqs + + tl_path = tl_data = None + if path.endswith('.egg'): + if os.path.isdir(path): + p = os.path.join(path, 'EGG-INFO') + meta_path = os.path.join(p, 'PKG-INFO') + metadata = Metadata(path=meta_path, scheme='legacy') + req_path = os.path.join(p, 'requires.txt') + tl_path = os.path.join(p, 'top_level.txt') + requires = parse_requires_path(req_path) + else: + # FIXME handle the case where zipfile is not available + zipf = zipimport.zipimporter(path) + fileobj = StringIO( + zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) + metadata = Metadata(fileobj=fileobj, scheme='legacy') + try: + data = zipf.get_data('EGG-INFO/requires.txt') + tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode( + 'utf-8') + requires = parse_requires_data(data.decode('utf-8')) + except IOError: + requires = None + elif path.endswith('.egg-info'): + if os.path.isdir(path): + req_path = os.path.join(path, 'requires.txt') + requires = parse_requires_path(req_path) + path = os.path.join(path, 'PKG-INFO') + tl_path = os.path.join(path, 'top_level.txt') + metadata = Metadata(path=path, scheme='legacy') + else: + raise DistlibException('path must end with .egg-info or .egg, ' + 'got %r' % path) + + if requires: + metadata.add_requirements(requires) + # look for top-level modules in top_level.txt, if present + if tl_data is None: + if tl_path is not None and os.path.exists(tl_path): + with open(tl_path, 'rb') as f: + tl_data = f.read().decode('utf-8') + if not tl_data: + tl_data = [] + else: + tl_data = tl_data.splitlines() + self.modules = tl_data + return metadata + + def __repr__(self): + return '' % (self.name, self.version, + self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + for path, _, _ in self.list_installed_files(): + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + return mismatches + + def list_installed_files(self): + """ + Iterates over the ``installed-files.txt`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: a list of (path, hash, size) + """ + + def _md5(path): + f = open(path, 'rb') + try: + content = f.read() + finally: + f.close() + return hashlib.md5(content).hexdigest() + + def _size(path): + return os.stat(path).st_size + + record_path = os.path.join(self.path, 'installed-files.txt') + result = [] + if os.path.exists(record_path): + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + p = os.path.normpath(os.path.join(self.path, line)) + # "./" is present as a marker between installed files + # and installation metadata files + if not os.path.exists(p): + logger.warning('Non-existent file: %s', p) + if p.endswith(('.pyc', '.pyo')): + continue + # otherwise fall through and fail + if not os.path.isdir(p): + result.append((p, _md5(p), _size(p))) + result.append((record_path, None, None)) + return result + + def list_distinfo_files(self, absolute=False): + """ + Iterates over the ``installed-files.txt`` entries and returns paths for + each line if the path is pointing to a file located in the + ``.egg-info`` directory or one of its subdirectories. + + :parameter absolute: If *absolute* is ``True``, each returned path is + transformed into a local absolute path. Otherwise the + raw value from ``installed-files.txt`` is returned. + :type absolute: boolean + :returns: iterator of paths + """ + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + skip = True + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + if line == './': + skip = False + continue + if not skip: + p = os.path.normpath(os.path.join(self.path, line)) + if p.startswith(self.path): + if absolute: + yield p + else: + yield line + + def __eq__(self, other): + return (isinstance(other, EggInfoDistribution) + and self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +new_dist_class = InstalledDistribution +old_dist_class = EggInfoDistribution + + +class DependencyGraph(object): + """ + Represents a dependency graph between distributions. + + The dependency relationships are stored in an ``adjacency_list`` that maps + distributions to a list of ``(other, label)`` tuples where ``other`` + is a distribution and the edge is labeled with ``label`` (i.e. the version + specifier, if such was provided). Also, for more efficient traversal, for + every distribution ``x``, a list of predecessors is kept in + ``reverse_list[x]``. An edge from distribution ``a`` to + distribution ``b`` means that ``a`` depends on ``b``. If any missing + dependencies are found, they are stored in ``missing``, which is a + dictionary that maps distributions to a list of requirements that were not + provided by any other distributions. + """ + + def __init__(self): + self.adjacency_list = {} + self.reverse_list = {} + self.missing = {} + + def add_distribution(self, distribution): + """Add the *distribution* to the graph. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + """ + self.adjacency_list[distribution] = [] + self.reverse_list[distribution] = [] + # self.missing[distribution] = [] + + def add_edge(self, x, y, label=None): + """Add an edge from distribution *x* to distribution *y* with the given + *label*. + + :type x: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type y: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type label: ``str`` or ``None`` + """ + self.adjacency_list[x].append((y, label)) + # multiple edges are allowed, so be careful + if x not in self.reverse_list[y]: + self.reverse_list[y].append(x) + + def add_missing(self, distribution, requirement): + """ + Add a missing *requirement* for the given *distribution*. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + :type requirement: ``str`` + """ + logger.debug('%s missing %r', distribution, requirement) + self.missing.setdefault(distribution, []).append(requirement) + + def _repr_dist(self, dist): + return '%s %s' % (dist.name, dist.version) + + def repr_node(self, dist, level=1): + """Prints only a subgraph""" + output = [self._repr_dist(dist)] + for other, label in self.adjacency_list[dist]: + dist = self._repr_dist(other) + if label is not None: + dist = '%s [%s]' % (dist, label) + output.append(' ' * level + str(dist)) + suboutput = self.repr_node(other, level + 1) + subs = suboutput.split('\n') + output.extend(subs[1:]) + return '\n'.join(output) + + def to_dot(self, f, skip_disconnected=True): + """Writes a DOT output for the graph to the provided file *f*. + + If *skip_disconnected* is set to ``True``, then all distributions + that are not dependent on any other distribution are skipped. + + :type f: has to support ``file``-like operations + :type skip_disconnected: ``bool`` + """ + disconnected = [] + + f.write("digraph dependencies {\n") + for dist, adjs in self.adjacency_list.items(): + if len(adjs) == 0 and not skip_disconnected: + disconnected.append(dist) + for other, label in adjs: + if label is not None: + f.write('"%s" -> "%s" [label="%s"]\n' % + (dist.name, other.name, label)) + else: + f.write('"%s" -> "%s"\n' % (dist.name, other.name)) + if not skip_disconnected and len(disconnected) > 0: + f.write('subgraph disconnected {\n') + f.write('label = "Disconnected"\n') + f.write('bgcolor = red\n') + + for dist in disconnected: + f.write('"%s"' % dist.name) + f.write('\n') + f.write('}\n') + f.write('}\n') + + def topological_sort(self): + """ + Perform a topological sort of the graph. + :return: A tuple, the first element of which is a topologically sorted + list of distributions, and the second element of which is a + list of distributions that cannot be sorted because they have + circular dependencies and so form a cycle. + """ + result = [] + # Make a shallow copy of the adjacency list + alist = {} + for k, v in self.adjacency_list.items(): + alist[k] = v[:] + while True: + # See what we can remove in this run + to_remove = [] + for k, v in list(alist.items())[:]: + if not v: + to_remove.append(k) + del alist[k] + if not to_remove: + # What's left in alist (if anything) is a cycle. + break + # Remove from the adjacency list of others + for k, v in alist.items(): + alist[k] = [(d, r) for d, r in v if d not in to_remove] + logger.debug('Moving to result: %s', + ['%s (%s)' % (d.name, d.version) for d in to_remove]) + result.extend(to_remove) + return result, list(alist.keys()) + + def __repr__(self): + """Representation of the graph""" + output = [] + for dist, adjs in self.adjacency_list.items(): + output.append(self.repr_node(dist)) + return '\n'.join(output) + + +def make_graph(dists, scheme='default'): + """Makes a dependency graph from the given distributions. + + :parameter dists: a list of distributions + :type dists: list of :class:`distutils2.database.InstalledDistribution` and + :class:`distutils2.database.EggInfoDistribution` instances + :rtype: a :class:`DependencyGraph` instance + """ + scheme = get_scheme(scheme) + graph = DependencyGraph() + provided = {} # maps names to lists of (version, dist) tuples + + # first, build the graph and find out what's provided + for dist in dists: + graph.add_distribution(dist) + + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + provided.setdefault(name, []).append((version, dist)) + + # now make the edges + for dist in dists: + requires = (dist.run_requires | dist.meta_requires + | dist.build_requires | dist.dev_requires) + for req in requires: + try: + matcher = scheme.matcher(req) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + matched = False + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + graph.add_edge(dist, provider, req) + matched = True + break + if not matched: + graph.add_missing(dist, req) + return graph + + +def get_dependent_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + dependent on *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + dep = [dist] # dependent distributions + todo = graph.reverse_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop() + dep.append(d) + for succ in graph.reverse_list[d]: + if succ not in dep: + todo.append(succ) + + dep.pop(0) # remove dist from dep, was there to prevent infinite loops + return dep + + +def get_required_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + required by *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + in finding the dependencies. + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + req = set() # required distributions + todo = graph.adjacency_list[dist] # list of nodes we should inspect + seen = set(t[0] for t in todo) # already added to todo + + while todo: + d = todo.pop()[0] + req.add(d) + pred_list = graph.adjacency_list[d] + for pred in pred_list: + d = pred[0] + if d not in req and d not in seen: + seen.add(d) + todo.append(pred) + return req + + +def make_dist(name, version, **kwargs): + """ + A convenience method for making a dist given just a name and version. + """ + summary = kwargs.pop('summary', 'Placeholder for summary') + md = Metadata(**kwargs) + md.name = name + md.version = version + md.summary = summary or 'Placeholder for summary' + return Distribution(md) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/index.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/index.py new file mode 100644 index 0000000..56cd286 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/index.py @@ -0,0 +1,508 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2023 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import hashlib +import logging +import os +import shutil +import subprocess +import tempfile +try: + from threading import Thread +except ImportError: # pragma: no cover + from dummy_threading import Thread + +from . import DistlibException +from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, + urlparse, build_opener, string_types) +from .util import zip_dir, ServerProxy + +logger = logging.getLogger(__name__) + +DEFAULT_INDEX = 'https://pypi.org/pypi' +DEFAULT_REALM = 'pypi' + + +class PackageIndex(object): + """ + This class represents a package index compatible with PyPI, the Python + Package Index. + """ + + boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' + + def __init__(self, url=None): + """ + Initialise an instance. + + :param url: The URL of the index. If not specified, the URL for PyPI is + used. + """ + self.url = url or DEFAULT_INDEX + self.read_configuration() + scheme, netloc, path, params, query, frag = urlparse(self.url) + if params or query or frag or scheme not in ('http', 'https'): + raise DistlibException('invalid repository: %s' % self.url) + self.password_handler = None + self.ssl_verifier = None + self.gpg = None + self.gpg_home = None + with open(os.devnull, 'w') as sink: + # Use gpg by default rather than gpg2, as gpg2 insists on + # prompting for passwords + for s in ('gpg', 'gpg2'): + try: + rc = subprocess.check_call([s, '--version'], stdout=sink, + stderr=sink) + if rc == 0: + self.gpg = s + break + except OSError: + pass + + def _get_pypirc_command(self): + """ + Get the distutils command for interacting with PyPI configurations. + :return: the command. + """ + from .util import _get_pypirc_command as cmd + return cmd() + + def read_configuration(self): + """ + Read the PyPI access configuration as supported by distutils. This populates + ``username``, ``password``, ``realm`` and ``url`` attributes from the + configuration. + """ + from .util import _load_pypirc + cfg = _load_pypirc(self) + self.username = cfg.get('username') + self.password = cfg.get('password') + self.realm = cfg.get('realm', 'pypi') + self.url = cfg.get('repository', self.url) + + def save_configuration(self): + """ + Save the PyPI access configuration. You must have set ``username`` and + ``password`` attributes before calling this method. + """ + self.check_credentials() + from .util import _store_pypirc + _store_pypirc(self) + + def check_credentials(self): + """ + Check that ``username`` and ``password`` have been set, and raise an + exception if not. + """ + if self.username is None or self.password is None: + raise DistlibException('username and password must be set') + pm = HTTPPasswordMgr() + _, netloc, _, _, _, _ = urlparse(self.url) + pm.add_password(self.realm, netloc, self.username, self.password) + self.password_handler = HTTPBasicAuthHandler(pm) + + def register(self, metadata): # pragma: no cover + """ + Register a distribution on PyPI, using the provided metadata. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the distribution to be + registered. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + metadata.validate() + d = metadata.todict() + d[':action'] = 'verify' + request = self.encode_request(d.items(), []) + self.send_request(request) + d[':action'] = 'submit' + request = self.encode_request(d.items(), []) + return self.send_request(request) + + def _reader(self, name, stream, outbuf): + """ + Thread runner for reading lines of from a subprocess into a buffer. + + :param name: The logical name of the stream (used for logging only). + :param stream: The stream to read from. This will typically a pipe + connected to the output stream of a subprocess. + :param outbuf: The list to append the read lines to. + """ + while True: + s = stream.readline() + if not s: + break + s = s.decode('utf-8').rstrip() + outbuf.append(s) + logger.debug('%s: %s' % (name, s)) + stream.close() + + def get_sign_command(self, filename, signer, sign_password, keystore=None): # pragma: no cover + """ + Return a suitable command for signing a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The signing command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + if sign_password is not None: + cmd.extend(['--batch', '--passphrase-fd', '0']) + td = tempfile.mkdtemp() + sf = os.path.join(td, os.path.basename(filename) + '.asc') + cmd.extend(['--detach-sign', '--armor', '--local-user', + signer, '--output', sf, filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd, sf + + def run_command(self, cmd, input_data=None): + """ + Run a command in a child process , passing it any input data specified. + + :param cmd: The command to run. + :param input_data: If specified, this must be a byte string containing + data to be sent to the child process. + :return: A tuple consisting of the subprocess' exit code, a list of + lines read from the subprocess' ``stdout``, and a list of + lines read from the subprocess' ``stderr``. + """ + kwargs = { + 'stdout': subprocess.PIPE, + 'stderr': subprocess.PIPE, + } + if input_data is not None: + kwargs['stdin'] = subprocess.PIPE + stdout = [] + stderr = [] + p = subprocess.Popen(cmd, **kwargs) + # We don't use communicate() here because we may need to + # get clever with interacting with the command + t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) + t1.start() + t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) + t2.start() + if input_data is not None: + p.stdin.write(input_data) + p.stdin.close() + + p.wait() + t1.join() + t2.join() + return p.returncode, stdout, stderr + + def sign_file(self, filename, signer, sign_password, keystore=None): # pragma: no cover + """ + Sign a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The absolute pathname of the file where the signature is + stored. + """ + cmd, sig_file = self.get_sign_command(filename, signer, sign_password, + keystore) + rc, stdout, stderr = self.run_command(cmd, + sign_password.encode('utf-8')) + if rc != 0: + raise DistlibException('sign command failed with error ' + 'code %s' % rc) + return sig_file + + def upload_file(self, metadata, filename, signer=None, sign_password=None, + filetype='sdist', pyversion='source', keystore=None): + """ + Upload a release file to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the file to be uploaded. + :param filename: The pathname of the file to be uploaded. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param filetype: The type of the file being uploaded. This is the + distutils command which produced that file, e.g. + ``sdist`` or ``bdist_wheel``. + :param pyversion: The version of Python which the release relates + to. For code compatible with any Python, this would + be ``source``, otherwise it would be e.g. ``3.2``. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.exists(filename): + raise DistlibException('not found: %s' % filename) + metadata.validate() + d = metadata.todict() + sig_file = None + if signer: + if not self.gpg: + logger.warning('no signing program available - not signed') + else: + sig_file = self.sign_file(filename, signer, sign_password, + keystore) + with open(filename, 'rb') as f: + file_data = f.read() + md5_digest = hashlib.md5(file_data).hexdigest() + sha256_digest = hashlib.sha256(file_data).hexdigest() + d.update({ + ':action': 'file_upload', + 'protocol_version': '1', + 'filetype': filetype, + 'pyversion': pyversion, + 'md5_digest': md5_digest, + 'sha256_digest': sha256_digest, + }) + files = [('content', os.path.basename(filename), file_data)] + if sig_file: + with open(sig_file, 'rb') as f: + sig_data = f.read() + files.append(('gpg_signature', os.path.basename(sig_file), + sig_data)) + shutil.rmtree(os.path.dirname(sig_file)) + request = self.encode_request(d.items(), files) + return self.send_request(request) + + def upload_documentation(self, metadata, doc_dir): # pragma: no cover + """ + Upload documentation to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the documentation to be + uploaded. + :param doc_dir: The pathname of the directory which contains the + documentation. This should be the directory that + contains the ``index.html`` for the documentation. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.isdir(doc_dir): + raise DistlibException('not a directory: %r' % doc_dir) + fn = os.path.join(doc_dir, 'index.html') + if not os.path.exists(fn): + raise DistlibException('not found: %r' % fn) + metadata.validate() + name, version = metadata.name, metadata.version + zip_data = zip_dir(doc_dir).getvalue() + fields = [(':action', 'doc_upload'), + ('name', name), ('version', version)] + files = [('content', name, zip_data)] + request = self.encode_request(fields, files) + return self.send_request(request) + + def get_verify_command(self, signature_filename, data_filename, + keystore=None): + """ + Return a suitable command for verifying a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The verifying command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + cmd.extend(['--verify', signature_filename, data_filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd + + def verify_signature(self, signature_filename, data_filename, + keystore=None): + """ + Verify a signature for a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: True if the signature was verified, else False. + """ + if not self.gpg: + raise DistlibException('verification unavailable because gpg ' + 'unavailable') + cmd = self.get_verify_command(signature_filename, data_filename, + keystore) + rc, stdout, stderr = self.run_command(cmd) + if rc not in (0, 1): + raise DistlibException('verify command failed with error code %s' % rc) + return rc == 0 + + def download_file(self, url, destfile, digest=None, reporthook=None): + """ + This is a convenience method for downloading a file from an URL. + Normally, this will be a file from the index, though currently + no check is made for this (i.e. a file can be downloaded from + anywhere). + + The method is just like the :func:`urlretrieve` function in the + standard library, except that it allows digest computation to be + done during download and checking that the downloaded data + matched any expected value. + + :param url: The URL of the file to be downloaded (assumed to be + available via an HTTP GET request). + :param destfile: The pathname where the downloaded file is to be + saved. + :param digest: If specified, this must be a (hasher, value) + tuple, where hasher is the algorithm used (e.g. + ``'md5'``) and ``value`` is the expected value. + :param reporthook: The same as for :func:`urlretrieve` in the + standard library. + """ + if digest is None: + digester = None + logger.debug('No digest specified') + else: + if isinstance(digest, (list, tuple)): + hasher, digest = digest + else: + hasher = 'md5' + digester = getattr(hashlib, hasher)() + logger.debug('Digest specified: %s' % digest) + # The following code is equivalent to urlretrieve. + # We need to do it this way so that we can compute the + # digest of the file as we go. + with open(destfile, 'wb') as dfp: + # addinfourl is not a context manager on 2.x + # so we have to use try/finally + sfp = self.send_request(Request(url)) + try: + headers = sfp.info() + blocksize = 8192 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, blocksize, size) + while True: + block = sfp.read(blocksize) + if not block: + break + read += len(block) + dfp.write(block) + if digester: + digester.update(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, blocksize, size) + finally: + sfp.close() + + # check that we got the whole file, if we can + if size >= 0 and read < size: + raise DistlibException( + 'retrieval incomplete: got only %d out of %d bytes' + % (read, size)) + # if we have a digest, it must match. + if digester: + actual = digester.hexdigest() + if digest != actual: + raise DistlibException('%s digest mismatch for %s: expected ' + '%s, got %s' % (hasher, destfile, + digest, actual)) + logger.debug('Digest verified: %s', digest) + + def send_request(self, req): + """ + Send a standard library :class:`Request` to PyPI and return its + response. + + :param req: The request to send. + :return: The HTTP response from PyPI (a standard library HTTPResponse). + """ + handlers = [] + if self.password_handler: + handlers.append(self.password_handler) + if self.ssl_verifier: + handlers.append(self.ssl_verifier) + opener = build_opener(*handlers) + return opener.open(req) + + def encode_request(self, fields, files): + """ + Encode fields and files for posting to an HTTP server. + + :param fields: The fields to send as a list of (fieldname, value) + tuples. + :param files: The files to send as a list of (fieldname, filename, + file_bytes) tuple. + """ + # Adapted from packaging, which in turn was adapted from + # http://code.activestate.com/recipes/146306 + + parts = [] + boundary = self.boundary + for k, values in fields: + if not isinstance(values, (list, tuple)): + values = [values] + + for v in values: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"' % + k).encode('utf-8'), + b'', + v.encode('utf-8'))) + for key, filename, value in files: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"; filename="%s"' % + (key, filename)).encode('utf-8'), + b'', + value)) + + parts.extend((b'--' + boundary + b'--', b'')) + + body = b'\r\n'.join(parts) + ct = b'multipart/form-data; boundary=' + boundary + headers = { + 'Content-type': ct, + 'Content-length': str(len(body)) + } + return Request(self.url, body, headers) + + def search(self, terms, operator=None): # pragma: no cover + if isinstance(terms, string_types): + terms = {'name': terms} + rpc_proxy = ServerProxy(self.url, timeout=3.0) + try: + return rpc_proxy.search(terms, operator or 'and') + finally: + rpc_proxy('close')() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/locators.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/locators.py new file mode 100644 index 0000000..f9f0788 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/locators.py @@ -0,0 +1,1303 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2023 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# + +import gzip +from io import BytesIO +import json +import logging +import os +import posixpath +import re +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import zlib + +from . import DistlibException +from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, + queue, quote, unescape, build_opener, + HTTPRedirectHandler as BaseRedirectHandler, text_type, + Request, HTTPError, URLError) +from .database import Distribution, DistributionPath, make_dist +from .metadata import Metadata, MetadataInvalidError +from .util import (cached_property, ensure_slash, split_filename, get_project_data, + parse_requirement, parse_name_and_version, ServerProxy, + normalize_name) +from .version import get_scheme, UnsupportedVersionError +from .wheel import Wheel, is_compatible + +logger = logging.getLogger(__name__) + +HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') +CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) +HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') +DEFAULT_INDEX = 'https://pypi.org/pypi' + + +def get_all_distribution_names(url=None): + """ + Return all distribution names known by an index. + :param url: The URL of the index. + :return: A list of all known distribution names. + """ + if url is None: + url = DEFAULT_INDEX + client = ServerProxy(url, timeout=3.0) + try: + return client.list_packages() + finally: + client('close')() + + +class RedirectHandler(BaseRedirectHandler): + """ + A class to work around a bug in some Python 3.2.x releases. + """ + # There's a bug in the base version for some 3.2.x + # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header + # returns e.g. /abc, it bails because it says the scheme '' + # is bogus, when actually it should use the request's + # URL for the scheme. See Python issue #13696. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + newurl = None + for key in ('location', 'uri'): + if key in headers: + newurl = headers[key] + break + if newurl is None: # pragma: no cover + return + urlparts = urlparse(newurl) + if urlparts.scheme == '': + newurl = urljoin(req.get_full_url(), newurl) + if hasattr(headers, 'replace_header'): + headers.replace_header(key, newurl) + else: + headers[key] = newurl + return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, + headers) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + + +class Locator(object): + """ + A base class for locators - things that locate distributions. + """ + source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') + binary_extensions = ('.egg', '.exe', '.whl') + excluded_extensions = ('.pdf',) + + # A list of tags indicating which wheels you want to match. The default + # value of None matches against the tags compatible with the running + # Python. If you want to match other values, set wheel_tags on a locator + # instance to a list of tuples (pyver, abi, arch) which you want to match. + wheel_tags = None + + downloadable_extensions = source_extensions + ('.whl',) + + def __init__(self, scheme='default'): + """ + Initialise an instance. + :param scheme: Because locators look for most recent versions, they + need to know the version scheme to use. This specifies + the current PEP-recommended scheme - use ``'legacy'`` + if you need to support existing distributions on PyPI. + """ + self._cache = {} + self.scheme = scheme + # Because of bugs in some of the handlers on some of the platforms, + # we use our own opener rather than just using urlopen. + self.opener = build_opener(RedirectHandler()) + # If get_project() is called from locate(), the matcher instance + # is set from the requirement passed to locate(). See issue #18 for + # why this can be useful to know. + self.matcher = None + self.errors = queue.Queue() + + def get_errors(self): + """ + Return any errors which have occurred. + """ + result = [] + while not self.errors.empty(): # pragma: no cover + try: + e = self.errors.get(False) + result.append(e) + except self.errors.Empty: + continue + self.errors.task_done() + return result + + def clear_errors(self): + """ + Clear any errors which may have been logged. + """ + # Just get the errors and throw them away + self.get_errors() + + def clear_cache(self): + self._cache.clear() + + def _get_scheme(self): + return self._scheme + + def _set_scheme(self, value): + self._scheme = value + + scheme = property(_get_scheme, _set_scheme) + + def _get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This should be implemented in subclasses. + + If called from a locate() request, self.matcher will be set to a + matcher for the requirement to satisfy, otherwise it will be None. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This calls _get_project to do all the work, and just implements a caching layer on top. + """ + if self._cache is None: # pragma: no cover + result = self._get_project(name) + elif name in self._cache: + result = self._cache[name] + else: + self.clear_errors() + result = self._get_project(name) + self._cache[name] = result + return result + + def score_url(self, url): + """ + Give an url a score which can be used to choose preferred URLs + for a given project release. + """ + t = urlparse(url) + basename = posixpath.basename(t.path) + compatible = True + is_wheel = basename.endswith('.whl') + is_downloadable = basename.endswith(self.downloadable_extensions) + if is_wheel: + compatible = is_compatible(Wheel(basename), self.wheel_tags) + return (t.scheme == 'https', 'pypi.org' in t.netloc, + is_downloadable, is_wheel, compatible, basename) + + def prefer_url(self, url1, url2): + """ + Choose one of two URLs where both are candidates for distribution + archives for the same version of a distribution (for example, + .tar.gz vs. zip). + + The current implementation favours https:// URLs over http://, archives + from PyPI over those from other locations, wheel compatibility (if a + wheel) and then the archive name. + """ + result = url2 + if url1: + s1 = self.score_url(url1) + s2 = self.score_url(url2) + if s1 > s2: + result = url1 + if result != url2: + logger.debug('Not replacing %r with %r', url1, url2) + else: + logger.debug('Replacing %r with %r', url1, url2) + return result + + def split_filename(self, filename, project_name): + """ + Attempt to split a filename in project name, version and Python version. + """ + return split_filename(filename, project_name) + + def convert_url_to_download_info(self, url, project_name): + """ + See if a URL is a candidate for a download URL for a project (the URL + has typically been scraped from an HTML page). + + If it is, a dictionary is returned with keys "name", "version", + "filename" and "url"; otherwise, None is returned. + """ + def same_project(name1, name2): + return normalize_name(name1) == normalize_name(name2) + + result = None + scheme, netloc, path, params, query, frag = urlparse(url) + if frag.lower().startswith('egg='): # pragma: no cover + logger.debug('%s: version hint in fragment: %r', + project_name, frag) + m = HASHER_HASH.match(frag) + if m: + algo, digest = m.groups() + else: + algo, digest = None, None + origpath = path + if path and path[-1] == '/': # pragma: no cover + path = path[:-1] + if path.endswith('.whl'): + try: + wheel = Wheel(path) + if not is_compatible(wheel, self.wheel_tags): + logger.debug('Wheel not compatible: %s', path) + else: + if project_name is None: + include = True + else: + include = same_project(wheel.name, project_name) + if include: + result = { + 'name': wheel.name, + 'version': wheel.version, + 'filename': wheel.filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + 'python-version': ', '.join( + ['.'.join(list(v[2:])) for v in wheel.pyver]), + } + except Exception: # pragma: no cover + logger.warning('invalid path for wheel: %s', path) + elif not path.endswith(self.downloadable_extensions): # pragma: no cover + logger.debug('Not downloadable: %s', path) + else: # downloadable extension + path = filename = posixpath.basename(path) + for ext in self.downloadable_extensions: + if path.endswith(ext): + path = path[:-len(ext)] + t = self.split_filename(path, project_name) + if not t: # pragma: no cover + logger.debug('No match for project/version: %s', path) + else: + name, version, pyver = t + if not project_name or same_project(project_name, name): + result = { + 'name': name, + 'version': version, + 'filename': filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + } + if pyver: # pragma: no cover + result['python-version'] = pyver + break + if result and algo: + result['%s_digest' % algo] = digest + return result + + def _get_digest(self, info): + """ + Get a digest from a dictionary by looking at a "digests" dictionary + or keys of the form 'algo_digest'. + + Returns a 2-tuple (algo, digest) if found, else None. Currently + looks only for SHA256, then MD5. + """ + result = None + if 'digests' in info: + digests = info['digests'] + for algo in ('sha256', 'md5'): + if algo in digests: + result = (algo, digests[algo]) + break + if not result: + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break + return result + + def _update_version_data(self, result, info): + """ + Update a result dictionary (the final result from _get_project) with a + dictionary for a specific version, which typically holds information + gleaned from a filename or URL for an archive for the distribution. + """ + name = info.pop('name') + version = info.pop('version') + if version in result: + dist = result[version] + md = dist.metadata + else: + dist = make_dist(name, version, scheme=self.scheme) + md = dist.metadata + dist.digest = digest = self._get_digest(info) + url = info['url'] + result['digests'][url] = digest + if md.source_url != info['url']: + md.source_url = self.prefer_url(md.source_url, url) + result['urls'].setdefault(version, set()).add(url) + dist.locator = self + result[version] = dist + + def locate(self, requirement, prereleases=False): + """ + Find the most recent distribution which matches the given + requirement. + + :param requirement: A requirement of the form 'foo (1.0)' or perhaps + 'foo (>= 1.0, < 2.0, != 1.3)' + :param prereleases: If ``True``, allow pre-release versions + to be located. Otherwise, pre-release versions + are not returned. + :return: A :class:`Distribution` instance, or ``None`` if no such + distribution could be located. + """ + result = None + r = parse_requirement(requirement) + if r is None: # pragma: no cover + raise DistlibException('Not a valid requirement: %r' % requirement) + scheme = get_scheme(self.scheme) + self.matcher = matcher = scheme.matcher(r.requirement) + logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) + versions = self.get_project(r.name) + if len(versions) > 2: # urls and digests keys are present + # sometimes, versions are invalid + slist = [] + vcls = matcher.version_class + for k in versions: + if k in ('urls', 'digests'): + continue + try: + if not matcher.match(k): + pass # logger.debug('%s did not match %r', matcher, k) + else: + if prereleases or not vcls(k).is_prerelease: + slist.append(k) + except Exception: # pragma: no cover + logger.warning('error matching %s with %r', matcher, k) + pass # slist.append(k) + if len(slist) > 1: + slist = sorted(slist, key=scheme.key) + if slist: + logger.debug('sorted list: %s', slist) + version = slist[-1] + result = versions[version] + if result: + if r.extras: + result.extras = r.extras + result.download_urls = versions.get('urls', {}).get(version, set()) + d = {} + sd = versions.get('digests', {}) + for url in result.download_urls: + if url in sd: # pragma: no cover + d[url] = sd[url] + result.digests = d + self.matcher = None + return result + + +class PyPIRPCLocator(Locator): + """ + This locator uses XML-RPC to locate distributions. It therefore + cannot be used with simple mirrors (that only mirror file content). + """ + def __init__(self, url, **kwargs): + """ + Initialise an instance. + + :param url: The URL to use for XML-RPC. + :param kwargs: Passed to the superclass constructor. + """ + super(PyPIRPCLocator, self).__init__(**kwargs) + self.base_url = url + self.client = ServerProxy(url, timeout=3.0) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + return set(self.client.list_packages()) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + versions = self.client.package_releases(name, True) + for v in versions: + urls = self.client.release_urls(name, v) + data = self.client.release_data(name, v) + metadata = Metadata(scheme=self.scheme) + metadata.name = data['name'] + metadata.version = data['version'] + metadata.license = data.get('license') + metadata.keywords = data.get('keywords', []) + metadata.summary = data.get('summary') + dist = Distribution(metadata) + if urls: + info = urls[0] + metadata.source_url = info['url'] + dist.digest = self._get_digest(info) + dist.locator = self + result[v] = dist + for info in urls: + url = info['url'] + digest = self._get_digest(info) + result['urls'].setdefault(v, set()).add(url) + result['digests'][url] = digest + return result + + +class PyPIJSONLocator(Locator): + """ + This locator uses PyPI's JSON interface. It's very limited in functionality + and probably not worth using. + """ + def __init__(self, url, **kwargs): + super(PyPIJSONLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + url = urljoin(self.base_url, '%s/json' % quote(name)) + try: + resp = self.opener.open(url) + data = resp.read().decode() # for now + d = json.loads(data) + md = Metadata(scheme=self.scheme) + data = d['info'] + md.name = data['name'] + md.version = data['version'] + md.license = data.get('license') + md.keywords = data.get('keywords', []) + md.summary = data.get('summary') + dist = Distribution(md) + dist.locator = self + # urls = d['urls'] + result[md.version] = dist + for info in d['urls']: + url = info['url'] + dist.download_urls.add(url) + dist.digests[url] = self._get_digest(info) + result['urls'].setdefault(md.version, set()).add(url) + result['digests'][url] = self._get_digest(info) + # Now get other releases + for version, infos in d['releases'].items(): + if version == md.version: + continue # already done + omd = Metadata(scheme=self.scheme) + omd.name = md.name + omd.version = version + odist = Distribution(omd) + odist.locator = self + result[version] = odist + for info in infos: + url = info['url'] + odist.download_urls.add(url) + odist.digests[url] = self._get_digest(info) + result['urls'].setdefault(version, set()).add(url) + result['digests'][url] = self._get_digest(info) +# for info in urls: +# md.source_url = info['url'] +# dist.digest = self._get_digest(info) +# dist.locator = self +# for info in urls: +# url = info['url'] +# result['urls'].setdefault(md.version, set()).add(url) +# result['digests'][url] = self._get_digest(info) + except Exception as e: + self.errors.put(text_type(e)) + logger.exception('JSON fetch failed: %s', e) + return result + + +class Page(object): + """ + This class represents a scraped HTML page. + """ + # The following slightly hairy-looking regex just looks for the contents of + # an anchor link, which has an attribute "href" either immediately preceded + # or immediately followed by a "rel" attribute. The attribute values can be + # declared with double quotes, single quotes or no quotes - which leads to + # the length of the expression. + _href = re.compile(""" +(rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)? +href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)) +(\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))? +""", re.I | re.S | re.X) + _base = re.compile(r"""]+)""", re.I | re.S) + + def __init__(self, data, url): + """ + Initialise an instance with the Unicode page contents and the URL they + came from. + """ + self.data = data + self.base_url = self.url = url + m = self._base.search(self.data) + if m: + self.base_url = m.group(1) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + @cached_property + def links(self): + """ + Return the URLs of all the links on a page together with information + about their "rel" attribute, for determining which ones to treat as + downloads and which ones to queue for further scraping. + """ + def clean(url): + "Tidy up an URL." + scheme, netloc, path, params, query, frag = urlparse(url) + return urlunparse((scheme, netloc, quote(path), + params, query, frag)) + + result = set() + for match in self._href.finditer(self.data): + d = match.groupdict('') + rel = (d['rel1'] or d['rel2'] or d['rel3'] or + d['rel4'] or d['rel5'] or d['rel6']) + url = d['url1'] or d['url2'] or d['url3'] + url = urljoin(self.base_url, url) + url = unescape(url) + url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) + result.add((url, rel)) + # We sort the result, hoping to bring the most recent versions + # to the front + result = sorted(result, key=lambda t: t[0], reverse=True) + return result + + +class SimpleScrapingLocator(Locator): + """ + A locator which scrapes HTML pages to locate downloads for a distribution. + This runs multiple threads to do the I/O; performance is at least as good + as pip's PackageFinder, which works in an analogous fashion. + """ + + # These are used to deal with various Content-Encoding schemes. + decoders = { + 'deflate': zlib.decompress, + 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(b)).read(), + 'none': lambda b: b, + } + + def __init__(self, url, timeout=None, num_workers=10, **kwargs): + """ + Initialise an instance. + :param url: The root URL to use for scraping. + :param timeout: The timeout, in seconds, to be applied to requests. + This defaults to ``None`` (no timeout specified). + :param num_workers: The number of worker threads you want to do I/O, + This defaults to 10. + :param kwargs: Passed to the superclass. + """ + super(SimpleScrapingLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + self.timeout = timeout + self._page_cache = {} + self._seen = set() + self._to_fetch = queue.Queue() + self._bad_hosts = set() + self.skip_externals = False + self.num_workers = num_workers + self._lock = threading.RLock() + # See issue #45: we need to be resilient when the locator is used + # in a thread, e.g. with concurrent.futures. We can't use self._lock + # as it is for coordinating our internal threads - the ones created + # in _prepare_threads. + self._gplock = threading.RLock() + self.platform_check = False # See issue #112 + + def _prepare_threads(self): + """ + Threads are created only when get_project is called, and terminate + before it returns. They are there primarily to parallelise I/O (i.e. + fetching web pages). + """ + self._threads = [] + for i in range(self.num_workers): + t = threading.Thread(target=self._fetch) + t.daemon = True + t.start() + self._threads.append(t) + + def _wait_threads(self): + """ + Tell all the threads to terminate (by sending a sentinel value) and + wait for them to do so. + """ + # Note that you need two loops, since you can't say which + # thread will get each sentinel + for t in self._threads: + self._to_fetch.put(None) # sentinel + for t in self._threads: + t.join() + self._threads = [] + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + with self._gplock: + self.result = result + self.project_name = name + url = urljoin(self.base_url, '%s/' % quote(name)) + self._seen.clear() + self._page_cache.clear() + self._prepare_threads() + try: + logger.debug('Queueing %s', url) + self._to_fetch.put(url) + self._to_fetch.join() + finally: + self._wait_threads() + del self.result + return result + + platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|' + r'win(32|_amd64)|macosx_?\d+)\b', re.I) + + def _is_platform_dependent(self, url): + """ + Does an URL refer to a platform-specific download? + """ + return self.platform_dependent.search(url) + + def _process_download(self, url): + """ + See if an URL is a suitable download for a project. + + If it is, register information in the result dictionary (for + _get_project) about the specific version it's for. + + Note that the return value isn't actually used other than as a boolean + value. + """ + if self.platform_check and self._is_platform_dependent(url): + info = None + else: + info = self.convert_url_to_download_info(url, self.project_name) + logger.debug('process_download: %s -> %s', url, info) + if info: + with self._lock: # needed because self.result is shared + self._update_version_data(self.result, info) + return info + + def _should_queue(self, link, referrer, rel): + """ + Determine whether a link URL from a referring page and with a + particular "rel" attribute should be queued for scraping. + """ + scheme, netloc, path, _, _, _ = urlparse(link) + if path.endswith(self.source_extensions + self.binary_extensions + + self.excluded_extensions): + result = False + elif self.skip_externals and not link.startswith(self.base_url): + result = False + elif not referrer.startswith(self.base_url): + result = False + elif rel not in ('homepage', 'download'): + result = False + elif scheme not in ('http', 'https', 'ftp'): + result = False + elif self._is_platform_dependent(link): + result = False + else: + host = netloc.split(':', 1)[0] + if host.lower() == 'localhost': + result = False + else: + result = True + logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, + referrer, result) + return result + + def _fetch(self): + """ + Get a URL to fetch from the work queue, get the HTML page, examine its + links for download candidates and candidates for further scraping. + + This is a handy method to run in a thread. + """ + while True: + url = self._to_fetch.get() + try: + if url: + page = self.get_page(url) + if page is None: # e.g. after an error + continue + for link, rel in page.links: + if link not in self._seen: + try: + self._seen.add(link) + if (not self._process_download(link) and + self._should_queue(link, url, rel)): + logger.debug('Queueing %s from %s', link, url) + self._to_fetch.put(link) + except MetadataInvalidError: # e.g. invalid versions + pass + except Exception as e: # pragma: no cover + self.errors.put(text_type(e)) + finally: + # always do this, to avoid hangs :-) + self._to_fetch.task_done() + if not url: + # logger.debug('Sentinel seen, quitting.') + break + + def get_page(self, url): + """ + Get the HTML for an URL, possibly from an in-memory cache. + + XXX TODO Note: this cache is never actually cleared. It's assumed that + the data won't get stale over the lifetime of a locator instance (not + necessarily true for the default_locator). + """ + # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api + scheme, netloc, path, _, _, _ = urlparse(url) + if scheme == 'file' and os.path.isdir(url2pathname(path)): + url = urljoin(ensure_slash(url), 'index.html') + + if url in self._page_cache: + result = self._page_cache[url] + logger.debug('Returning %s from cache: %s', url, result) + else: + host = netloc.split(':', 1)[0] + result = None + if host in self._bad_hosts: + logger.debug('Skipping %s due to bad host %s', url, host) + else: + req = Request(url, headers={'Accept-encoding': 'identity'}) + try: + logger.debug('Fetching %s', url) + resp = self.opener.open(req, timeout=self.timeout) + logger.debug('Fetched %s', url) + headers = resp.info() + content_type = headers.get('Content-Type', '') + if HTML_CONTENT_TYPE.match(content_type): + final_url = resp.geturl() + data = resp.read() + encoding = headers.get('Content-Encoding') + if encoding: + decoder = self.decoders[encoding] # fail if not found + data = decoder(data) + encoding = 'utf-8' + m = CHARSET.search(content_type) + if m: + encoding = m.group(1) + try: + data = data.decode(encoding) + except UnicodeError: # pragma: no cover + data = data.decode('latin-1') # fallback + result = Page(data, final_url) + self._page_cache[final_url] = result + except HTTPError as e: + if e.code != 404: + logger.exception('Fetch failed: %s: %s', url, e) + except URLError as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + with self._lock: + self._bad_hosts.add(host) + except Exception as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + finally: + self._page_cache[url] = result # even if None (failure) + return result + + _distname_re = re.compile(']*>([^<]+)<') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + page = self.get_page(self.base_url) + if not page: + raise DistlibException('Unable to get %s' % self.base_url) + for match in self._distname_re.finditer(page.data): + result.add(match.group(1)) + return result + + +class DirectoryLocator(Locator): + """ + This class locates distributions in a directory tree. + """ + + def __init__(self, path, **kwargs): + """ + Initialise an instance. + :param path: The root of the directory tree to search. + :param kwargs: Passed to the superclass constructor, + except for: + * recursive - if True (the default), subdirectories are + recursed into. If False, only the top-level directory + is searched, + """ + self.recursive = kwargs.pop('recursive', True) + super(DirectoryLocator, self).__init__(**kwargs) + path = os.path.abspath(path) + if not os.path.isdir(path): # pragma: no cover + raise DistlibException('Not a directory: %r' % path) + self.base_dir = path + + def should_include(self, filename, parent): + """ + Should a filename be considered as a candidate for a distribution + archive? As well as the filename, the directory which contains it + is provided, though not used by the current implementation. + """ + return filename.endswith(self.downloadable_extensions) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, name) + if info: + self._update_version_data(result, info) + if not self.recursive: + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, None) + if info: + result.add(info['name']) + if not self.recursive: + break + return result + + +class JSONLocator(Locator): + """ + This locator uses special extended metadata (not available on PyPI) and is + the basis of performant dependency resolution in distlib. Other locators + require archive downloads before dependencies can be determined! As you + might imagine, that can be slow. + """ + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + data = get_project_data(name) + if data: + for info in data.get('files', []): + if info['ptype'] != 'sdist' or info['pyversion'] != 'source': + continue + # We don't store summary in project metadata as it makes + # the data bigger for no benefit during dependency + # resolution + dist = make_dist(data['name'], info['version'], + summary=data.get('summary', + 'Placeholder for summary'), + scheme=self.scheme) + md = dist.metadata + md.source_url = info['url'] + # TODO SHA256 digest + if 'digest' in info and info['digest']: + dist.digest = ('md5', info['digest']) + md.dependencies = info.get('requirements', {}) + dist.exports = info.get('exports', {}) + result[dist.version] = dist + result['urls'].setdefault(dist.version, set()).add(info['url']) + return result + + +class DistPathLocator(Locator): + """ + This locator finds installed distributions in a path. It can be useful for + adding to an :class:`AggregatingLocator`. + """ + def __init__(self, distpath, **kwargs): + """ + Initialise an instance. + + :param distpath: A :class:`DistributionPath` instance to search. + """ + super(DistPathLocator, self).__init__(**kwargs) + assert isinstance(distpath, DistributionPath) + self.distpath = distpath + + def _get_project(self, name): + dist = self.distpath.get_distribution(name) + if dist is None: + result = {'urls': {}, 'digests': {}} + else: + result = { + dist.version: dist, + 'urls': {dist.version: set([dist.source_url])}, + 'digests': {dist.version: set([None])} + } + return result + + +class AggregatingLocator(Locator): + """ + This class allows you to chain and/or merge a list of locators. + """ + def __init__(self, *locators, **kwargs): + """ + Initialise an instance. + + :param locators: The list of locators to search. + :param kwargs: Passed to the superclass constructor, + except for: + * merge - if False (the default), the first successful + search from any of the locators is returned. If True, + the results from all locators are merged (this can be + slow). + """ + self.merge = kwargs.pop('merge', False) + self.locators = locators + super(AggregatingLocator, self).__init__(**kwargs) + + def clear_cache(self): + super(AggregatingLocator, self).clear_cache() + for locator in self.locators: + locator.clear_cache() + + def _set_scheme(self, value): + self._scheme = value + for locator in self.locators: + locator.scheme = value + + scheme = property(Locator.scheme.fget, _set_scheme) + + def _get_project(self, name): + result = {} + for locator in self.locators: + d = locator.get_project(name) + if d: + if self.merge: + files = result.get('urls', {}) + digests = result.get('digests', {}) + # next line could overwrite result['urls'], result['digests'] + result.update(d) + df = result.get('urls') + if files and df: + for k, v in files.items(): + if k in df: + df[k] |= v + else: + df[k] = v + dd = result.get('digests') + if digests and dd: + dd.update(digests) + else: + # See issue #18. If any dists are found and we're looking + # for specific constraints, we only return something if + # a match is found. For example, if a DirectoryLocator + # returns just foo (1.0) while we're looking for + # foo (>= 2.0), we'll pretend there was nothing there so + # that subsequent locators can be queried. Otherwise we + # would just return foo (1.0) which would then lead to a + # failure to find foo (>= 2.0), because other locators + # weren't searched. Note that this only matters when + # merge=False. + if self.matcher is None: + found = True + else: + found = False + for k in d: + if self.matcher.match(k): + found = True + break + if found: + result = d + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for locator in self.locators: + try: + result |= locator.get_distribution_names() + except NotImplementedError: + pass + return result + + +# We use a legacy scheme simply because most of the dists on PyPI use legacy +# versions which don't conform to PEP 440. +default_locator = AggregatingLocator( + # JSONLocator(), # don't use as PEP 426 is withdrawn + SimpleScrapingLocator('https://pypi.org/simple/', + timeout=3.0), + scheme='legacy') + +locate = default_locator.locate + + +class DependencyFinder(object): + """ + Locate dependencies for distributions. + """ + + def __init__(self, locator=None): + """ + Initialise an instance, using the specified locator + to locate distributions. + """ + self.locator = locator or default_locator + self.scheme = get_scheme(self.locator.scheme) + + def add_distribution(self, dist): + """ + Add a distribution to the finder. This will update internal information + about who provides what. + :param dist: The distribution to add. + """ + logger.debug('adding distribution %s', dist) + name = dist.key + self.dists_by_name[name] = dist + self.dists[(name, dist.version)] = dist + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + self.provided.setdefault(name, set()).add((version, dist)) + + def remove_distribution(self, dist): + """ + Remove a distribution from the finder. This will update internal + information about who provides what. + :param dist: The distribution to remove. + """ + logger.debug('removing distribution %s', dist) + name = dist.key + del self.dists_by_name[name] + del self.dists[(name, dist.version)] + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Remove from provided: %s, %s, %s', name, version, dist) + s = self.provided[name] + s.remove((version, dist)) + if not s: + del self.provided[name] + + def get_matcher(self, reqt): + """ + Get a version matcher for a requirement. + :param reqt: The requirement + :type reqt: str + :return: A version matcher (an instance of + :class:`distlib.version.Matcher`). + """ + try: + matcher = self.scheme.matcher(reqt) + except UnsupportedVersionError: # pragma: no cover + # XXX compat-mode if cannot read the version + name = reqt.split()[0] + matcher = self.scheme.matcher(name) + return matcher + + def find_providers(self, reqt): + """ + Find the distributions which can fulfill a requirement. + + :param reqt: The requirement. + :type reqt: str + :return: A set of distribution which can fulfill the requirement. + """ + matcher = self.get_matcher(reqt) + name = matcher.key # case-insensitive + result = set() + provided = self.provided + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + result.add(provider) + break + return result + + def try_to_replace(self, provider, other, problems): + """ + Attempt to replace one provider with another. This is typically used + when resolving dependencies from multiple sources, e.g. A requires + (B >= 1.0) while C requires (B >= 1.1). + + For successful replacement, ``provider`` must meet all the requirements + which ``other`` fulfills. + + :param provider: The provider we are trying to replace with. + :param other: The provider we're trying to replace. + :param problems: If False is returned, this will contain what + problems prevented replacement. This is currently + a tuple of the literal string 'cantreplace', + ``provider``, ``other`` and the set of requirements + that ``provider`` couldn't fulfill. + :return: True if we can replace ``other`` with ``provider``, else + False. + """ + rlist = self.reqts[other] + unmatched = set() + for s in rlist: + matcher = self.get_matcher(s) + if not matcher.match(provider.version): + unmatched.add(s) + if unmatched: + # can't replace other with provider + problems.add(('cantreplace', provider, other, + frozenset(unmatched))) + result = False + else: + # can replace other with provider + self.remove_distribution(other) + del self.reqts[other] + for s in rlist: + self.reqts.setdefault(provider, set()).add(s) + self.add_distribution(provider) + result = True + return result + + def find(self, requirement, meta_extras=None, prereleases=False): + """ + Find a distribution and all distributions it depends on. + + :param requirement: The requirement specifying the distribution to + find, or a Distribution instance. + :param meta_extras: A list of meta extras such as :test:, :build: and + so on. + :param prereleases: If ``True``, allow pre-release versions to be + returned - otherwise, don't return prereleases + unless they're all that's available. + + Return a set of :class:`Distribution` instances and a set of + problems. + + The distributions returned should be such that they have the + :attr:`required` attribute set to ``True`` if they were + from the ``requirement`` passed to ``find()``, and they have the + :attr:`build_time_dependency` attribute set to ``True`` unless they + are post-installation dependencies of the ``requirement``. + + The problems should be a tuple consisting of the string + ``'unsatisfied'`` and the requirement which couldn't be satisfied + by any distribution known to the locator. + """ + + self.provided = {} + self.dists = {} + self.dists_by_name = {} + self.reqts = {} + + meta_extras = set(meta_extras or []) + if ':*:' in meta_extras: + meta_extras.remove(':*:') + # :meta: and :run: are implicitly included + meta_extras |= set([':test:', ':build:', ':dev:']) + + if isinstance(requirement, Distribution): + dist = odist = requirement + logger.debug('passed %s as requirement', odist) + else: + dist = odist = self.locator.locate(requirement, + prereleases=prereleases) + if dist is None: + raise DistlibException('Unable to locate %r' % requirement) + logger.debug('located %s', odist) + dist.requested = True + problems = set() + todo = set([dist]) + install_dists = set([odist]) + while todo: + dist = todo.pop() + name = dist.key # case-insensitive + if name not in self.dists_by_name: + self.add_distribution(dist) + else: + # import pdb; pdb.set_trace() + other = self.dists_by_name[name] + if other != dist: + self.try_to_replace(dist, other, problems) + + ireqts = dist.run_requires | dist.meta_requires + sreqts = dist.build_requires + ereqts = set() + if meta_extras and dist in install_dists: + for key in ('test', 'build', 'dev'): + e = ':%s:' % key + if e in meta_extras: + ereqts |= getattr(dist, '%s_requires' % key) + all_reqts = ireqts | sreqts | ereqts + for r in all_reqts: + providers = self.find_providers(r) + if not providers: + logger.debug('No providers found for %r', r) + provider = self.locator.locate(r, prereleases=prereleases) + # If no provider is found and we didn't consider + # prereleases, consider them now. + if provider is None and not prereleases: + provider = self.locator.locate(r, prereleases=True) + if provider is None: + logger.debug('Cannot satisfy %r', r) + problems.add(('unsatisfied', r)) + else: + n, v = provider.key, provider.version + if (n, v) not in self.dists: + todo.add(provider) + providers.add(provider) + if r in ireqts and dist in install_dists: + install_dists.add(provider) + logger.debug('Adding %s to install_dists', + provider.name_and_version) + for p in providers: + name = p.key + if name not in self.dists_by_name: + self.reqts.setdefault(p, set()).add(r) + else: + other = self.dists_by_name[name] + if other != p: + # see if other can be replaced by p + self.try_to_replace(p, other, problems) + + dists = set(self.dists.values()) + for dist in dists: + dist.build_time_dependency = dist not in install_dists + if dist.build_time_dependency: + logger.debug('%s is a build-time dependency only.', + dist.name_and_version) + logger.debug('find done for %s', odist) + return dists, problems diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/manifest.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/manifest.py new file mode 100644 index 0000000..420dcf1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/manifest.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2023 Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Class representing the list of files in a distribution. + +Equivalent to distutils.filelist, but fixes some problems. +""" +import fnmatch +import logging +import os +import re +import sys + +from . import DistlibException +from .compat import fsdecode +from .util import convert_path + + +__all__ = ['Manifest'] + +logger = logging.getLogger(__name__) + +# a \ followed by some spaces + EOL +_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M) +_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) + +# +# Due to the different results returned by fnmatch.translate, we need +# to do slightly different processing for Python 2.7 and 3.2 ... this needed +# to be brought in for Python 3.6 onwards. +# +_PYTHON_VERSION = sys.version_info[:2] + + +class Manifest(object): + """ + A list of files built by exploring the filesystem and filtered by applying various + patterns to what we find there. + """ + + def __init__(self, base=None): + """ + Initialise an instance. + + :param base: The base directory to explore under. + """ + self.base = os.path.abspath(os.path.normpath(base or os.getcwd())) + self.prefix = self.base + os.sep + self.allfiles = None + self.files = set() + + # + # Public API + # + + def findall(self): + """Find all files under the base and set ``allfiles`` to the absolute + pathnames of files found. + """ + from stat import S_ISREG, S_ISDIR, S_ISLNK + + self.allfiles = allfiles = [] + root = self.base + stack = [root] + pop = stack.pop + push = stack.append + + while stack: + root = pop() + names = os.listdir(root) + + for name in names: + fullname = os.path.join(root, name) + + # Avoid excess stat calls -- just one will do, thank you! + stat = os.stat(fullname) + mode = stat.st_mode + if S_ISREG(mode): + allfiles.append(fsdecode(fullname)) + elif S_ISDIR(mode) and not S_ISLNK(mode): + push(fullname) + + def add(self, item): + """ + Add a file to the manifest. + + :param item: The pathname to add. This can be relative to the base. + """ + if not item.startswith(self.prefix): + item = os.path.join(self.base, item) + self.files.add(os.path.normpath(item)) + + def add_many(self, items): + """ + Add a list of files to the manifest. + + :param items: The pathnames to add. These can be relative to the base. + """ + for item in items: + self.add(item) + + def sorted(self, wantdirs=False): + """ + Return sorted files in directory order + """ + + def add_dir(dirs, d): + dirs.add(d) + logger.debug('add_dir added %s', d) + if d != self.base: + parent, _ = os.path.split(d) + assert parent not in ('', '/') + add_dir(dirs, parent) + + result = set(self.files) # make a copy! + if wantdirs: + dirs = set() + for f in result: + add_dir(dirs, os.path.dirname(f)) + result |= dirs + return [os.path.join(*path_tuple) for path_tuple in + sorted(os.path.split(path) for path in result)] + + def clear(self): + """Clear all collected files.""" + self.files = set() + self.allfiles = [] + + def process_directive(self, directive): + """ + Process a directive which either adds some files from ``allfiles`` to + ``files``, or removes some files from ``files``. + + :param directive: The directive to process. This should be in a format + compatible with distutils ``MANIFEST.in`` files: + + http://docs.python.org/distutils/sourcedist.html#commands + """ + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dirpattern). + action, patterns, thedir, dirpattern = self._parse_directive(directive) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=True): + logger.warning('no files found matching %r', pattern) + + elif action == 'exclude': + for pattern in patterns: + self._exclude_pattern(pattern, anchor=True) + + elif action == 'global-include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=False): + logger.warning('no files found matching %r ' + 'anywhere in distribution', pattern) + + elif action == 'global-exclude': + for pattern in patterns: + self._exclude_pattern(pattern, anchor=False) + + elif action == 'recursive-include': + for pattern in patterns: + if not self._include_pattern(pattern, prefix=thedir): + logger.warning('no files found matching %r ' + 'under directory %r', pattern, thedir) + + elif action == 'recursive-exclude': + for pattern in patterns: + self._exclude_pattern(pattern, prefix=thedir) + + elif action == 'graft': + if not self._include_pattern(None, prefix=dirpattern): + logger.warning('no directories found matching %r', + dirpattern) + + elif action == 'prune': + if not self._exclude_pattern(None, prefix=dirpattern): + logger.warning('no previously-included directories found ' + 'matching %r', dirpattern) + else: # pragma: no cover + # This should never happen, as it should be caught in + # _parse_template_line + raise DistlibException( + 'invalid action %r' % action) + + # + # Private API + # + + def _parse_directive(self, directive): + """ + Validate a directive. + :param directive: The directive to validate. + :return: A tuple of action, patterns, thedir, dir_patterns + """ + words = directive.split() + if len(words) == 1 and words[0] not in ('include', 'exclude', + 'global-include', + 'global-exclude', + 'recursive-include', + 'recursive-exclude', + 'graft', 'prune'): + # no action given, let's use the default 'include' + words.insert(0, 'include') + + action = words[0] + patterns = thedir = dir_pattern = None + + if action in ('include', 'exclude', + 'global-include', 'global-exclude'): + if len(words) < 2: + raise DistlibException( + '%r expects ...' % action) + + patterns = [convert_path(word) for word in words[1:]] + + elif action in ('recursive-include', 'recursive-exclude'): + if len(words) < 3: + raise DistlibException( + '%r expects ...' % action) + + thedir = convert_path(words[1]) + patterns = [convert_path(word) for word in words[2:]] + + elif action in ('graft', 'prune'): + if len(words) != 2: + raise DistlibException( + '%r expects a single ' % action) + + dir_pattern = convert_path(words[1]) + + else: + raise DistlibException('unknown action %r' % action) + + return action, patterns, thedir, dir_pattern + + def _include_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Select strings (presumably filenames) from 'self.files' that + match 'pattern', a Unix-style wildcard (glob) pattern. + + Patterns are not quite the same as implemented by the 'fnmatch' + module: '*' and '?' match non-special characters, where "special" + is platform-dependent: slash on Unix; colon, slash, and backslash on + DOS/Windows; and colon on Mac OS. + + If 'anchor' is true (the default), then the pattern match is more + stringent: "*.py" will match "foo.py" but not "foo/bar.py". If + 'anchor' is false, both of these will match. + + If 'prefix' is supplied, then only filenames starting with 'prefix' + (itself a pattern) and ending with 'pattern', with anything in between + them, will match. 'anchor' is ignored in this case. + + If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and + 'pattern' is assumed to be either a string containing a regex or a + regex object -- no translation is done, the regex is just compiled + and used as-is. + + Selected strings will be added to self.files. + + Return True if files are found. + """ + # XXX docstring lying about what the special chars are? + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + + # delayed loading of allfiles list + if self.allfiles is None: + self.findall() + + for name in self.allfiles: + if pattern_re.search(name): + self.files.add(name) + found = True + return found + + def _exclude_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Remove strings (presumably filenames) from 'files' that match + 'pattern'. + + Other parameters are the same as for 'include_pattern()', above. + The list 'self.files' is modified in place. Return True if files are + found. + + This API is public to allow e.g. exclusion of SCM subdirs, e.g. when + packaging source distributions + """ + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + for f in list(self.files): + if pattern_re.search(f): + self.files.remove(f) + found = True + return found + + def _translate_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Translate a shell-like wildcard pattern to a compiled regular + expression. + + Return the compiled regex. If 'is_regex' true, + then 'pattern' is directly compiled to a regex (if it's a string) + or just returned as-is (assumes it's a regex object). + """ + if is_regex: + if isinstance(pattern, str): + return re.compile(pattern) + else: + return pattern + + if _PYTHON_VERSION > (3, 2): + # ditch start and end characters + start, _, end = self._glob_to_re('_').partition('_') + + if pattern: + pattern_re = self._glob_to_re(pattern) + if _PYTHON_VERSION > (3, 2): + assert pattern_re.startswith(start) and pattern_re.endswith(end) + else: + pattern_re = '' + + base = re.escape(os.path.join(self.base, '')) + if prefix is not None: + # ditch end of pattern character + if _PYTHON_VERSION <= (3, 2): + empty_pattern = self._glob_to_re('') + prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] + else: + prefix_re = self._glob_to_re(prefix) + assert prefix_re.startswith(start) and prefix_re.endswith(end) + prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] + sep = os.sep + if os.sep == '\\': + sep = r'\\' + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + sep.join((prefix_re, + '.*' + pattern_re)) + else: + pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] + pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep, + pattern_re, end) + else: # no prefix -- respect anchor flag + if anchor: + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + pattern_re + else: + pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):]) + + return re.compile(pattern_re) + + def _glob_to_re(self, pattern): + """Translate a shell-like glob pattern to a regular expression. + + Return a string containing the regex. Differs from + 'fnmatch.translate()' in that '*' does not match "special characters" + (which are platform-specific). + """ + pattern_re = fnmatch.translate(pattern) + + # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which + # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, + # and by extension they shouldn't match such "special characters" under + # any OS. So change all non-escaped dots in the RE to match any + # character except the special characters (currently: just os.sep). + sep = os.sep + if os.sep == '\\': + # we're using a regex to manipulate a regex, so we need + # to escape the backslash twice + sep = r'\\\\' + escaped = r'\1[^%s]' % sep + pattern_re = re.sub(r'((? y, + '!=': lambda x, y: x != y, + '<': lambda x, y: x < y, + '<=': lambda x, y: x == y or x < y, + '>': lambda x, y: x > y, + '>=': lambda x, y: x == y or x > y, + 'and': lambda x, y: x and y, + 'or': lambda x, y: x or y, + 'in': lambda x, y: x in y, + 'not in': lambda x, y: x not in y, + } + + def evaluate(self, expr, context): + """ + Evaluate a marker expression returned by the :func:`parse_requirement` + function in the specified context. + """ + if isinstance(expr, string_types): + if expr[0] in '\'"': + result = expr[1:-1] + else: + if expr not in context: + raise SyntaxError('unknown variable: %s' % expr) + result = context[expr] + else: + assert isinstance(expr, dict) + op = expr['op'] + if op not in self.operations: + raise NotImplementedError('op not implemented: %s' % op) + elhs = expr['lhs'] + erhs = expr['rhs'] + if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): + raise SyntaxError('invalid comparison: %s %s %s' % + (elhs, op, erhs)) + + lhs = self.evaluate(elhs, context) + rhs = self.evaluate(erhs, context) + if ((_is_version_marker(elhs) or _is_version_marker(erhs)) + and op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): + lhs = LV(lhs) + rhs = LV(rhs) + elif _is_version_marker(elhs) and op in ('in', 'not in'): + lhs = LV(lhs) + rhs = _get_versions(rhs) + result = self.operations[op](lhs, rhs) + return result + + +_DIGITS = re.compile(r'\d+\.\d+') + + +def default_context(): + + def format_full_version(info): + version = '%s.%s.%s' % (info.major, info.minor, info.micro) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + if hasattr(sys, 'implementation'): + implementation_version = format_full_version( + sys.implementation.version) + implementation_name = sys.implementation.name + else: + implementation_version = '0' + implementation_name = '' + + ppv = platform.python_version() + m = _DIGITS.match(ppv) + pv = m.group(0) + result = { + 'implementation_name': implementation_name, + 'implementation_version': implementation_version, + 'os_name': os.name, + 'platform_machine': platform.machine(), + 'platform_python_implementation': platform.python_implementation(), + 'platform_release': platform.release(), + 'platform_system': platform.system(), + 'platform_version': platform.version(), + 'platform_in_venv': str(in_venv()), + 'python_full_version': ppv, + 'python_version': pv, + 'sys_platform': sys.platform, + } + return result + + +DEFAULT_CONTEXT = default_context() +del default_context + +evaluator = Evaluator() + + +def interpret(marker, execution_context=None): + """ + Interpret a marker and return a result depending on environment. + + :param marker: The marker to interpret. + :type marker: str + :param execution_context: The context used for name lookup. + :type execution_context: mapping + """ + try: + expr, rest = parse_marker(marker) + except Exception as e: + raise SyntaxError('Unable to interpret marker syntax: %s: %s' % + (marker, e)) + if rest and rest[0] != '#': + raise SyntaxError('unexpected trailing data in marker: %s: %s' % + (marker, rest)) + context = dict(DEFAULT_CONTEXT) + if execution_context: + context.update(execution_context) + return evaluator.evaluate(expr, context) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/metadata.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/metadata.py new file mode 100644 index 0000000..7189aee --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/metadata.py @@ -0,0 +1,1068 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Implementation of the Metadata for Python packages PEPs. + +Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and 2.2). +""" +from __future__ import unicode_literals + +import codecs +from email import message_from_file +import json +import logging +import re + + +from . import DistlibException, __version__ +from .compat import StringIO, string_types, text_type +from .markers import interpret +from .util import extract_by_key, get_extras +from .version import get_scheme, PEP440_VERSION_RE + +logger = logging.getLogger(__name__) + + +class MetadataMissingError(DistlibException): + """A required metadata is missing""" + + +class MetadataConflictError(DistlibException): + """Attempt to read or write metadata fields that are conflictual.""" + + +class MetadataUnrecognizedVersionError(DistlibException): + """Unknown metadata version number.""" + + +class MetadataInvalidError(DistlibException): + """A metadata value is invalid""" + +# public API of this module +__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] + +# Encoding used for the PKG-INFO files +PKG_INFO_ENCODING = 'utf-8' + +# preferred version. Hopefully will be changed +# to 1.2 once PEP 345 is supported everywhere +PKG_INFO_PREFERRED_VERSION = '1.1' + +_LINE_PREFIX_1_2 = re.compile('\n \\|') +_LINE_PREFIX_PRE_1_2 = re.compile('\n ') +_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License') + +_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License', 'Classifier', 'Download-URL', 'Obsoletes', + 'Provides', 'Requires') + +_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', + 'Download-URL') + +_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External') + +_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', + 'Obsoletes-Dist', 'Requires-External', 'Maintainer', + 'Maintainer-email', 'Project-URL') + +_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External', 'Private-Version', + 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', + 'Provides-Extra') + +_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', + 'Setup-Requires-Dist', 'Extension') + +# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in +# the metadata. Include them in the tuple literal below to allow them +# (for now). +# Ditto for Obsoletes - see issue #140. +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', + 'Requires', 'Provides', 'Obsoletes') + +_566_MARKERS = ('Description-Content-Type',) + +_643_MARKERS = ('Dynamic', 'License-File') + +_643_FIELDS = _566_FIELDS + _643_MARKERS + +_ALL_FIELDS = set() +_ALL_FIELDS.update(_241_FIELDS) +_ALL_FIELDS.update(_314_FIELDS) +_ALL_FIELDS.update(_345_FIELDS) +_ALL_FIELDS.update(_426_FIELDS) +_ALL_FIELDS.update(_566_FIELDS) +_ALL_FIELDS.update(_643_FIELDS) + +EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') + + +def _version2fieldlist(version): + if version == '1.0': + return _241_FIELDS + elif version == '1.1': + return _314_FIELDS + elif version == '1.2': + return _345_FIELDS + elif version in ('1.3', '2.1'): + # avoid adding field names if already there + return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS) + elif version == '2.0': + raise ValueError('Metadata 2.0 is withdrawn and not supported') + # return _426_FIELDS + elif version == '2.2': + return _643_FIELDS + raise MetadataUnrecognizedVersionError(version) + + +def _best_version(fields): + """Detect the best version depending on the fields used.""" + def _has_marker(keys, markers): + return any(marker in keys for marker in markers) + + keys = [key for key, value in fields.items() if value not in ([], 'UNKNOWN', None)] + possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.1', '2.2'] # 2.0 removed + + # first let's try to see if a field is not part of one of the version + for key in keys: + if key not in _241_FIELDS and '1.0' in possible_versions: + possible_versions.remove('1.0') + logger.debug('Removed 1.0 due to %s', key) + if key not in _314_FIELDS and '1.1' in possible_versions: + possible_versions.remove('1.1') + logger.debug('Removed 1.1 due to %s', key) + if key not in _345_FIELDS and '1.2' in possible_versions: + possible_versions.remove('1.2') + logger.debug('Removed 1.2 due to %s', key) + if key not in _566_FIELDS and '1.3' in possible_versions: + possible_versions.remove('1.3') + logger.debug('Removed 1.3 due to %s', key) + if key not in _566_FIELDS and '2.1' in possible_versions: + if key != 'Description': # In 2.1, description allowed after headers + possible_versions.remove('2.1') + logger.debug('Removed 2.1 due to %s', key) + if key not in _643_FIELDS and '2.2' in possible_versions: + possible_versions.remove('2.2') + logger.debug('Removed 2.2 due to %s', key) + # if key not in _426_FIELDS and '2.0' in possible_versions: + # possible_versions.remove('2.0') + # logger.debug('Removed 2.0 due to %s', key) + + # possible_version contains qualified versions + if len(possible_versions) == 1: + return possible_versions[0] # found ! + elif len(possible_versions) == 0: + logger.debug('Out of options - unknown metadata set: %s', fields) + raise MetadataConflictError('Unknown metadata set') + + # let's see if one unique marker is found + is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) + is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) + is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) + # is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) + is_2_2 = '2.2' in possible_versions and _has_marker(keys, _643_MARKERS) + if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_2) > 1: + raise MetadataConflictError('You used incompatible 1.1/1.2/2.1/2.2 fields') + + # we have the choice, 1.0, or 1.2, 2.1 or 2.2 + # - 1.0 has a broken Summary field but works with all tools + # - 1.1 is to avoid + # - 1.2 fixes Summary but has little adoption + # - 2.1 adds more features + # - 2.2 is the latest + if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_2: + # we couldn't find any specific marker + if PKG_INFO_PREFERRED_VERSION in possible_versions: + return PKG_INFO_PREFERRED_VERSION + if is_1_1: + return '1.1' + if is_1_2: + return '1.2' + if is_2_1: + return '2.1' + # if is_2_2: + # return '2.2' + + return '2.2' + +# This follows the rules about transforming keys as described in +# https://www.python.org/dev/peps/pep-0566/#id17 +_ATTR2FIELD = { + name.lower().replace("-", "_"): name for name in _ALL_FIELDS +} +_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} + +_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') +_VERSIONS_FIELDS = ('Requires-Python',) +_VERSION_FIELDS = ('Version',) +_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', + 'Requires', 'Provides', 'Obsoletes-Dist', + 'Provides-Dist', 'Requires-Dist', 'Requires-External', + 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', + 'Provides-Extra', 'Extension', 'License-File') +_LISTTUPLEFIELDS = ('Project-URL',) + +_ELEMENTSFIELD = ('Keywords',) + +_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') + +_MISSING = object() + +_FILESAFE = re.compile('[^A-Za-z0-9.]+') + + +def _get_name_and_version(name, version, for_filename=False): + """Return the distribution name with version. + + If for_filename is true, return a filename-escaped form.""" + if for_filename: + # For both name and version any runs of non-alphanumeric or '.' + # characters are replaced with a single '-'. Additionally any + # spaces in the version string become '.' + name = _FILESAFE.sub('-', name) + version = _FILESAFE.sub('-', version.replace(' ', '.')) + return '%s-%s' % (name, version) + + +class LegacyMetadata(object): + """The legacy metadata of a release. + + Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can + instantiate the class with one of these arguments (or none): + - *path*, the path to a metadata file + - *fileobj* give a file-like object with metadata as content + - *mapping* is a dict-like object + - *scheme* is a version scheme name + """ + # TODO document the mapping API and UNKNOWN default key + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._fields = {} + self.requires_files = [] + self._dependencies = None + self.scheme = scheme + if path is not None: + self.read(path) + elif fileobj is not None: + self.read_file(fileobj) + elif mapping is not None: + self.update(mapping) + self.set_metadata_version() + + def set_metadata_version(self): + self._fields['Metadata-Version'] = _best_version(self._fields) + + def _write_field(self, fileobj, name, value): + fileobj.write('%s: %s\n' % (name, value)) + + def __getitem__(self, name): + return self.get(name) + + def __setitem__(self, name, value): + return self.set(name, value) + + def __delitem__(self, name): + field_name = self._convert_name(name) + try: + del self._fields[field_name] + except KeyError: + raise KeyError(name) + + def __contains__(self, name): + return (name in self._fields or + self._convert_name(name) in self._fields) + + def _convert_name(self, name): + if name in _ALL_FIELDS: + return name + name = name.replace('-', '_').lower() + return _ATTR2FIELD.get(name, name) + + def _default_value(self, name): + if name in _LISTFIELDS or name in _ELEMENTSFIELD: + return [] + return 'UNKNOWN' + + def _remove_line_prefix(self, value): + if self.metadata_version in ('1.0', '1.1'): + return _LINE_PREFIX_PRE_1_2.sub('\n', value) + else: + return _LINE_PREFIX_1_2.sub('\n', value) + + def __getattr__(self, name): + if name in _ATTR2FIELD: + return self[name] + raise AttributeError(name) + + # + # Public API + # + +# dependencies = property(_get_dependencies, _set_dependencies) + + def get_fullname(self, filesafe=False): + """Return the distribution name with version. + + If filesafe is true, return a filename-escaped form.""" + return _get_name_and_version(self['Name'], self['Version'], filesafe) + + def is_field(self, name): + """return True if name is a valid metadata key""" + name = self._convert_name(name) + return name in _ALL_FIELDS + + def is_multi_field(self, name): + name = self._convert_name(name) + return name in _LISTFIELDS + + def read(self, filepath): + """Read the metadata values from a file path.""" + fp = codecs.open(filepath, 'r', encoding='utf-8') + try: + self.read_file(fp) + finally: + fp.close() + + def read_file(self, fileob): + """Read the metadata values from a file object.""" + msg = message_from_file(fileob) + self._fields['Metadata-Version'] = msg['metadata-version'] + + # When reading, get all the fields we can + for field in _ALL_FIELDS: + if field not in msg: + continue + if field in _LISTFIELDS: + # we can have multiple lines + values = msg.get_all(field) + if field in _LISTTUPLEFIELDS and values is not None: + values = [tuple(value.split(',')) for value in values] + self.set(field, values) + else: + # single line + value = msg[field] + if value is not None and value != 'UNKNOWN': + self.set(field, value) + + # PEP 566 specifies that the body be used for the description, if + # available + body = msg.get_payload() + self["Description"] = body if body else self["Description"] + # logger.debug('Attempting to set metadata for %s', self) + # self.set_metadata_version() + + def write(self, filepath, skip_unknown=False): + """Write the metadata fields to filepath.""" + fp = codecs.open(filepath, 'w', encoding='utf-8') + try: + self.write_file(fp, skip_unknown) + finally: + fp.close() + + def write_file(self, fileobject, skip_unknown=False): + """Write the PKG-INFO format data to a file object.""" + self.set_metadata_version() + + for field in _version2fieldlist(self['Metadata-Version']): + values = self.get(field) + if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): + continue + if field in _ELEMENTSFIELD: + self._write_field(fileobject, field, ','.join(values)) + continue + if field not in _LISTFIELDS: + if field == 'Description': + if self.metadata_version in ('1.0', '1.1'): + values = values.replace('\n', '\n ') + else: + values = values.replace('\n', '\n |') + values = [values] + + if field in _LISTTUPLEFIELDS: + values = [','.join(value) for value in values] + + for value in values: + self._write_field(fileobject, field, value) + + def update(self, other=None, **kwargs): + """Set metadata values from the given iterable `other` and kwargs. + + Behavior is like `dict.update`: If `other` has a ``keys`` method, + they are looped over and ``self[key]`` is assigned ``other[key]``. + Else, ``other`` is an iterable of ``(key, value)`` iterables. + + Keys that don't match a metadata field or that have an empty value are + dropped. + """ + def _set(key, value): + if key in _ATTR2FIELD and value: + self.set(self._convert_name(key), value) + + if not other: + # other is None or empty container + pass + elif hasattr(other, 'keys'): + for k in other.keys(): + _set(k, other[k]) + else: + for k, v in other: + _set(k, v) + + if kwargs: + for k, v in kwargs.items(): + _set(k, v) + + def set(self, name, value): + """Control then set a metadata field.""" + name = self._convert_name(name) + + if ((name in _ELEMENTSFIELD or name == 'Platform') and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [v.strip() for v in value.split(',')] + else: + value = [] + elif (name in _LISTFIELDS and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [value] + else: + value = [] + + if logger.isEnabledFor(logging.WARNING): + project_name = self['Name'] + + scheme = get_scheme(self.scheme) + if name in _PREDICATE_FIELDS and value is not None: + for v in value: + # check that the values are valid + if not scheme.is_valid_matcher(v.split(';')[0]): + logger.warning( + "'%s': '%s' is not valid (field '%s')", + project_name, v, name) + # FIXME this rejects UNKNOWN, is that right? + elif name in _VERSIONS_FIELDS and value is not None: + if not scheme.is_valid_constraint_list(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + elif name in _VERSION_FIELDS and value is not None: + if not scheme.is_valid_version(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + + if name in _UNICODEFIELDS: + if name == 'Description': + value = self._remove_line_prefix(value) + + self._fields[name] = value + + def get(self, name, default=_MISSING): + """Get a metadata field.""" + name = self._convert_name(name) + if name not in self._fields: + if default is _MISSING: + default = self._default_value(name) + return default + if name in _UNICODEFIELDS: + value = self._fields[name] + return value + elif name in _LISTFIELDS: + value = self._fields[name] + if value is None: + return [] + res = [] + for val in value: + if name not in _LISTTUPLEFIELDS: + res.append(val) + else: + # That's for Project-URL + res.append((val[0], val[1])) + return res + + elif name in _ELEMENTSFIELD: + value = self._fields[name] + if isinstance(value, string_types): + return value.split(',') + return self._fields[name] + + def check(self, strict=False): + """Check if the metadata is compliant. If strict is True then raise if + no Name or Version are provided""" + self.set_metadata_version() + + # XXX should check the versions (if the file was loaded) + missing, warnings = [], [] + + for attr in ('Name', 'Version'): # required by PEP 345 + if attr not in self: + missing.append(attr) + + if strict and missing != []: + msg = 'missing required metadata: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + + for attr in ('Home-page', 'Author'): + if attr not in self: + missing.append(attr) + + # checking metadata 1.2 (XXX needs to check 1.1, 1.0) + if self['Metadata-Version'] != '1.2': + return missing, warnings + + scheme = get_scheme(self.scheme) + + def are_valid_constraints(value): + for v in value: + if not scheme.is_valid_matcher(v.split(';')[0]): + return False + return True + + for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), + (_VERSIONS_FIELDS, + scheme.is_valid_constraint_list), + (_VERSION_FIELDS, + scheme.is_valid_version)): + for field in fields: + value = self.get(field, None) + if value is not None and not controller(value): + warnings.append("Wrong value for '%s': %s" % (field, value)) + + return missing, warnings + + def todict(self, skip_missing=False): + """Return fields as a dict. + + Field names will be converted to use the underscore-lowercase style + instead of hyphen-mixed case (i.e. home_page instead of Home-page). + This is as per https://www.python.org/dev/peps/pep-0566/#id17. + """ + self.set_metadata_version() + + fields = _version2fieldlist(self['Metadata-Version']) + + data = {} + + for field_name in fields: + if not skip_missing or field_name in self._fields: + key = _FIELD2ATTR[field_name] + if key != 'project_url': + data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] + + return data + + def add_requirements(self, requirements): + if self['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + if field in self: + del self[field] + self['Requires-Dist'] += requirements + + # Mapping API + # TODO could add iter* variants + + def keys(self): + return list(_version2fieldlist(self['Metadata-Version'])) + + def __iter__(self): + for key in self.keys(): + yield key + + def values(self): + return [self[key] for key in self.keys()] + + def items(self): + return [(key, self[key]) for key in self.keys()] + + def __repr__(self): + return '<%s %s %s>' % (self.__class__.__name__, self.name, + self.version) + + +METADATA_FILENAME = 'pydist.json' +WHEEL_METADATA_FILENAME = 'metadata.json' +LEGACY_METADATA_FILENAME = 'METADATA' + + +class Metadata(object): + """ + The metadata of a release. This implementation uses 2.1 + metadata where possible. If not possible, it wraps a LegacyMetadata + instance which handles the key-value metadata format. + """ + + METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') + + NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) + + FIELDNAME_MATCHER = re.compile('^[A-Z]([0-9A-Z-]*[0-9A-Z])?$', re.I) + + VERSION_MATCHER = PEP440_VERSION_RE + + SUMMARY_MATCHER = re.compile('.{1,2047}') + + METADATA_VERSION = '2.0' + + GENERATOR = 'distlib (%s)' % __version__ + + MANDATORY_KEYS = { + 'name': (), + 'version': (), + 'summary': ('legacy',), + } + + INDEX_KEYS = ('name version license summary description author ' + 'author_email keywords platform home_page classifiers ' + 'download_url') + + DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' + 'dev_requires provides meta_requires obsoleted_by ' + 'supports_environments') + + SYNTAX_VALIDATORS = { + 'metadata_version': (METADATA_VERSION_MATCHER, ()), + 'name': (NAME_MATCHER, ('legacy',)), + 'version': (VERSION_MATCHER, ('legacy',)), + 'summary': (SUMMARY_MATCHER, ('legacy',)), + 'dynamic': (FIELDNAME_MATCHER, ('legacy',)), + } + + __slots__ = ('_legacy', '_data', 'scheme') + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._legacy = None + self._data = None + self.scheme = scheme + #import pdb; pdb.set_trace() + if mapping is not None: + try: + self._validate_mapping(mapping, scheme) + self._data = mapping + except MetadataUnrecognizedVersionError: + self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) + self.validate() + else: + data = None + if path: + with open(path, 'rb') as f: + data = f.read() + elif fileobj: + data = fileobj.read() + if data is None: + # Initialised with no args - to be added + self._data = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + else: + if not isinstance(data, text_type): + data = data.decode('utf-8') + try: + self._data = json.loads(data) + self._validate_mapping(self._data, scheme) + except ValueError: + # Note: MetadataUnrecognizedVersionError does not + # inherit from ValueError (it's a DistlibException, + # which should not inherit from ValueError). + # The ValueError comes from the json.load - if that + # succeeds and we get a validation error, we want + # that to propagate + self._legacy = LegacyMetadata(fileobj=StringIO(data), + scheme=scheme) + self.validate() + + common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) + + none_list = (None, list) + none_dict = (None, dict) + + mapped_keys = { + 'run_requires': ('Requires-Dist', list), + 'build_requires': ('Setup-Requires-Dist', list), + 'dev_requires': none_list, + 'test_requires': none_list, + 'meta_requires': none_list, + 'extras': ('Provides-Extra', list), + 'modules': none_list, + 'namespaces': none_list, + 'exports': none_dict, + 'commands': none_dict, + 'classifiers': ('Classifier', list), + 'source_url': ('Download-URL', None), + 'metadata_version': ('Metadata-Version', None), + } + + del none_list, none_dict + + def __getattribute__(self, key): + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, maker = mapped[key] + if self._legacy: + if lk is None: + result = None if maker is None else maker() + else: + result = self._legacy.get(lk) + else: + value = None if maker is None else maker() + if key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + result = self._data.get(key, value) + else: + # special cases for PEP 459 + sentinel = object() + result = sentinel + d = self._data.get('extensions') + if d: + if key == 'commands': + result = d.get('python.commands', value) + elif key == 'classifiers': + d = d.get('python.details') + if d: + result = d.get(key, value) + else: + d = d.get('python.exports') + if not d: + d = self._data.get('python.exports') + if d: + result = d.get(key, value) + if result is sentinel: + result = value + elif key not in common: + result = object.__getattribute__(self, key) + elif self._legacy: + result = self._legacy.get(key) + else: + result = self._data.get(key) + return result + + def _validate_value(self, key, value, scheme=None): + if key in self.SYNTAX_VALIDATORS: + pattern, exclusions = self.SYNTAX_VALIDATORS[key] + if (scheme or self.scheme) not in exclusions: + m = pattern.match(value) + if not m: + raise MetadataInvalidError("'%s' is an invalid value for " + "the '%s' property" % (value, + key)) + + def __setattr__(self, key, value): + self._validate_value(key, value) + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, _ = mapped[key] + if self._legacy: + if lk is None: + raise NotImplementedError + self._legacy[lk] = value + elif key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + self._data[key] = value + else: + # special cases for PEP 459 + d = self._data.setdefault('extensions', {}) + if key == 'commands': + d['python.commands'] = value + elif key == 'classifiers': + d = d.setdefault('python.details', {}) + d[key] = value + else: + d = d.setdefault('python.exports', {}) + d[key] = value + elif key not in common: + object.__setattr__(self, key, value) + else: + if key == 'keywords': + if isinstance(value, string_types): + value = value.strip() + if value: + value = value.split() + else: + value = [] + if self._legacy: + self._legacy[key] = value + else: + self._data[key] = value + + @property + def name_and_version(self): + return _get_name_and_version(self.name, self.version, True) + + @property + def provides(self): + if self._legacy: + result = self._legacy['Provides-Dist'] + else: + result = self._data.setdefault('provides', []) + s = '%s (%s)' % (self.name, self.version) + if s not in result: + result.append(s) + return result + + @provides.setter + def provides(self, value): + if self._legacy: + self._legacy['Provides-Dist'] = value + else: + self._data['provides'] = value + + def get_requirements(self, reqts, extras=None, env=None): + """ + Base method to get dependencies, given a set of extras + to satisfy and an optional environment context. + :param reqts: A list of sometimes-wanted dependencies, + perhaps dependent on extras and environment. + :param extras: A list of optional components being requested. + :param env: An optional environment for marker evaluation. + """ + if self._legacy: + result = reqts + else: + result = [] + extras = get_extras(extras or [], self.extras) + for d in reqts: + if 'extra' not in d and 'environment' not in d: + # unconditional + include = True + else: + if 'extra' not in d: + # Not extra-dependent - only environment-dependent + include = True + else: + include = d.get('extra') in extras + if include: + # Not excluded because of extras, check environment + marker = d.get('environment') + if marker: + include = interpret(marker, env) + if include: + result.extend(d['requires']) + for key in ('build', 'dev', 'test'): + e = ':%s:' % key + if e in extras: + extras.remove(e) + # A recursive call, but it should terminate since 'test' + # has been removed from the extras + reqts = self._data.get('%s_requires' % key, []) + result.extend(self.get_requirements(reqts, extras=extras, + env=env)) + return result + + @property + def dictionary(self): + if self._legacy: + return self._from_legacy() + return self._data + + @property + def dependencies(self): + if self._legacy: + raise NotImplementedError + else: + return extract_by_key(self._data, self.DEPENDENCY_KEYS) + + @dependencies.setter + def dependencies(self, value): + if self._legacy: + raise NotImplementedError + else: + self._data.update(value) + + def _validate_mapping(self, mapping, scheme): + if mapping.get('metadata_version') != self.METADATA_VERSION: + raise MetadataUnrecognizedVersionError() + missing = [] + for key, exclusions in self.MANDATORY_KEYS.items(): + if key not in mapping: + if scheme not in exclusions: + missing.append(key) + if missing: + msg = 'Missing metadata items: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + for k, v in mapping.items(): + self._validate_value(k, v, scheme) + + def validate(self): + if self._legacy: + missing, warnings = self._legacy.check(True) + if missing or warnings: + logger.warning('Metadata: missing: %s, warnings: %s', + missing, warnings) + else: + self._validate_mapping(self._data, self.scheme) + + def todict(self): + if self._legacy: + return self._legacy.todict(True) + else: + result = extract_by_key(self._data, self.INDEX_KEYS) + return result + + def _from_legacy(self): + assert self._legacy and not self._data + result = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', + 'classifier'): + if k in lmd: + if k == 'classifier': + nk = 'classifiers' + else: + nk = k + result[nk] = lmd[k] + kw = lmd.get('Keywords', []) + if kw == ['']: + kw = [] + result['keywords'] = kw + keys = (('requires_dist', 'run_requires'), + ('setup_requires_dist', 'build_requires')) + for ok, nk in keys: + if ok in lmd and lmd[ok]: + result[nk] = [{'requires': lmd[ok]}] + result['provides'] = self.provides + author = {} + maintainer = {} + return result + + LEGACY_MAPPING = { + 'name': 'Name', + 'version': 'Version', + ('extensions', 'python.details', 'license'): 'License', + 'summary': 'Summary', + 'description': 'Description', + ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page', + ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author', + ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email', + 'source_url': 'Download-URL', + ('extensions', 'python.details', 'classifiers'): 'Classifier', + } + + def _to_legacy(self): + def process_entries(entries): + reqts = set() + for e in entries: + extra = e.get('extra') + env = e.get('environment') + rlist = e['requires'] + for r in rlist: + if not env and not extra: + reqts.add(r) + else: + marker = '' + if extra: + marker = 'extra == "%s"' % extra + if env: + if marker: + marker = '(%s) and %s' % (env, marker) + else: + marker = env + reqts.add(';'.join((r, marker))) + return reqts + + assert self._data and not self._legacy + result = LegacyMetadata() + nmd = self._data + # import pdb; pdb.set_trace() + for nk, ok in self.LEGACY_MAPPING.items(): + if not isinstance(nk, tuple): + if nk in nmd: + result[ok] = nmd[nk] + else: + d = nmd + found = True + for k in nk: + try: + d = d[k] + except (KeyError, IndexError): + found = False + break + if found: + result[ok] = d + r1 = process_entries(self.run_requires + self.meta_requires) + r2 = process_entries(self.build_requires + self.dev_requires) + if self.extras: + result['Provides-Extra'] = sorted(self.extras) + result['Requires-Dist'] = sorted(r1) + result['Setup-Requires-Dist'] = sorted(r2) + # TODO: any other fields wanted + return result + + def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): + if [path, fileobj].count(None) != 1: + raise ValueError('Exactly one of path and fileobj is needed') + self.validate() + if legacy: + if self._legacy: + legacy_md = self._legacy + else: + legacy_md = self._to_legacy() + if path: + legacy_md.write(path, skip_unknown=skip_unknown) + else: + legacy_md.write_file(fileobj, skip_unknown=skip_unknown) + else: + if self._legacy: + d = self._from_legacy() + else: + d = self._data + if fileobj: + json.dump(d, fileobj, ensure_ascii=True, indent=2, + sort_keys=True) + else: + with codecs.open(path, 'w', 'utf-8') as f: + json.dump(d, f, ensure_ascii=True, indent=2, + sort_keys=True) + + def add_requirements(self, requirements): + if self._legacy: + self._legacy.add_requirements(requirements) + else: + run_requires = self._data.setdefault('run_requires', []) + always = None + for entry in run_requires: + if 'environment' not in entry and 'extra' not in entry: + always = entry + break + if always is None: + always = { 'requires': requirements } + run_requires.insert(0, always) + else: + rset = set(always['requires']) | set(requirements) + always['requires'] = sorted(rset) + + def __repr__(self): + name = self.name or '(no name)' + version = self.version or 'no version' + return '<%s %s %s (%s)>' % (self.__class__.__name__, + self.metadata_version, name, version) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/resources.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/resources.py new file mode 100644 index 0000000..fef52aa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/resources.py @@ -0,0 +1,358 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import bisect +import io +import logging +import os +import pkgutil +import sys +import types +import zipimport + +from . import DistlibException +from .util import cached_property, get_cache_base, Cache + +logger = logging.getLogger(__name__) + + +cache = None # created when needed + + +class ResourceCache(Cache): + def __init__(self, base=None): + if base is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('resource-cache')) + super(ResourceCache, self).__init__(base) + + def is_stale(self, resource, path): + """ + Is the cache stale for the given resource? + + :param resource: The :class:`Resource` being cached. + :param path: The path of the resource in the cache. + :return: True if the cache is stale. + """ + # Cache invalidation is a hard problem :-) + return True + + def get(self, resource): + """ + Get a resource into the cache, + + :param resource: A :class:`Resource` instance. + :return: The pathname of the resource in the cache. + """ + prefix, path = resource.finder.get_cache_info(resource) + if prefix is None: + result = path + else: + result = os.path.join(self.base, self.prefix_to_dir(prefix), path) + dirname = os.path.dirname(result) + if not os.path.isdir(dirname): + os.makedirs(dirname) + if not os.path.exists(result): + stale = True + else: + stale = self.is_stale(resource, path) + if stale: + # write the bytes of the resource to the cache location + with open(result, 'wb') as f: + f.write(resource.bytes) + return result + + +class ResourceBase(object): + def __init__(self, finder, name): + self.finder = finder + self.name = name + + +class Resource(ResourceBase): + """ + A class representing an in-package resource, such as a data file. This is + not normally instantiated by user code, but rather by a + :class:`ResourceFinder` which manages the resource. + """ + is_container = False # Backwards compatibility + + def as_stream(self): + """ + Get the resource as a stream. + + This is not a property to make it obvious that it returns a new stream + each time. + """ + return self.finder.get_stream(self) + + @cached_property + def file_path(self): + global cache + if cache is None: + cache = ResourceCache() + return cache.get(self) + + @cached_property + def bytes(self): + return self.finder.get_bytes(self) + + @cached_property + def size(self): + return self.finder.get_size(self) + + +class ResourceContainer(ResourceBase): + is_container = True # Backwards compatibility + + @cached_property + def resources(self): + return self.finder.get_resources(self) + + +class ResourceFinder(object): + """ + Resource finder for file system resources. + """ + + if sys.platform.startswith('java'): + skipped_extensions = ('.pyc', '.pyo', '.class') + else: + skipped_extensions = ('.pyc', '.pyo') + + def __init__(self, module): + self.module = module + self.loader = getattr(module, '__loader__', None) + self.base = os.path.dirname(getattr(module, '__file__', '')) + + def _adjust_path(self, path): + return os.path.realpath(path) + + def _make_path(self, resource_name): + # Issue #50: need to preserve type of path on Python 2.x + # like os.path._get_sep + if isinstance(resource_name, bytes): # should only happen on 2.x + sep = b'/' + else: + sep = '/' + parts = resource_name.split(sep) + parts.insert(0, self.base) + result = os.path.join(*parts) + return self._adjust_path(result) + + def _find(self, path): + return os.path.exists(path) + + def get_cache_info(self, resource): + return None, resource.path + + def find(self, resource_name): + path = self._make_path(resource_name) + if not self._find(path): + result = None + else: + if self._is_directory(path): + result = ResourceContainer(self, resource_name) + else: + result = Resource(self, resource_name) + result.path = path + return result + + def get_stream(self, resource): + return open(resource.path, 'rb') + + def get_bytes(self, resource): + with open(resource.path, 'rb') as f: + return f.read() + + def get_size(self, resource): + return os.path.getsize(resource.path) + + def get_resources(self, resource): + def allowed(f): + return (f != '__pycache__' and not + f.endswith(self.skipped_extensions)) + return set([f for f in os.listdir(resource.path) if allowed(f)]) + + def is_container(self, resource): + return self._is_directory(resource.path) + + _is_directory = staticmethod(os.path.isdir) + + def iterator(self, resource_name): + resource = self.find(resource_name) + if resource is not None: + todo = [resource] + while todo: + resource = todo.pop(0) + yield resource + if resource.is_container: + rname = resource.name + for name in resource.resources: + if not rname: + new_name = name + else: + new_name = '/'.join([rname, name]) + child = self.find(new_name) + if child.is_container: + todo.append(child) + else: + yield child + + +class ZipResourceFinder(ResourceFinder): + """ + Resource finder for resources in .zip files. + """ + def __init__(self, module): + super(ZipResourceFinder, self).__init__(module) + archive = self.loader.archive + self.prefix_len = 1 + len(archive) + # PyPy doesn't have a _files attr on zipimporter, and you can't set one + if hasattr(self.loader, '_files'): + self._files = self.loader._files + else: + self._files = zipimport._zip_directory_cache[archive] + self.index = sorted(self._files) + + def _adjust_path(self, path): + return path + + def _find(self, path): + path = path[self.prefix_len:] + if path in self._files: + result = True + else: + if path and path[-1] != os.sep: + path = path + os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + if not result: + logger.debug('_find failed: %r %r', path, self.loader.prefix) + else: + logger.debug('_find worked: %r %r', path, self.loader.prefix) + return result + + def get_cache_info(self, resource): + prefix = self.loader.archive + path = resource.path[1 + len(prefix):] + return prefix, path + + def get_bytes(self, resource): + return self.loader.get_data(resource.path) + + def get_stream(self, resource): + return io.BytesIO(self.get_bytes(resource)) + + def get_size(self, resource): + path = resource.path[self.prefix_len:] + return self._files[path][3] + + def get_resources(self, resource): + path = resource.path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + plen = len(path) + result = set() + i = bisect.bisect(self.index, path) + while i < len(self.index): + if not self.index[i].startswith(path): + break + s = self.index[i][plen:] + result.add(s.split(os.sep, 1)[0]) # only immediate children + i += 1 + return result + + def _is_directory(self, path): + path = path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + return result + + +_finder_registry = { + type(None): ResourceFinder, + zipimport.zipimporter: ZipResourceFinder +} + +try: + # In Python 3.6, _frozen_importlib -> _frozen_importlib_external + try: + import _frozen_importlib_external as _fi + except ImportError: + import _frozen_importlib as _fi + _finder_registry[_fi.SourceFileLoader] = ResourceFinder + _finder_registry[_fi.FileFinder] = ResourceFinder + # See issue #146 + _finder_registry[_fi.SourcelessFileLoader] = ResourceFinder + del _fi +except (ImportError, AttributeError): + pass + + +def register_finder(loader, finder_maker): + _finder_registry[type(loader)] = finder_maker + + +_finder_cache = {} + + +def finder(package): + """ + Return a resource finder for a package. + :param package: The name of the package. + :return: A :class:`ResourceFinder` instance for the package. + """ + if package in _finder_cache: + result = _finder_cache[package] + else: + if package not in sys.modules: + __import__(package) + module = sys.modules[package] + path = getattr(module, '__path__', None) + if path is None: + raise DistlibException('You cannot get a finder for a module, ' + 'only for a package') + loader = getattr(module, '__loader__', None) + finder_maker = _finder_registry.get(type(loader)) + if finder_maker is None: + raise DistlibException('Unable to locate finder for %r' % package) + result = finder_maker(module) + _finder_cache[package] = result + return result + + +_dummy_module = types.ModuleType(str('__dummy__')) + + +def finder_for_path(path): + """ + Return a resource finder for a path, which should represent a container. + + :param path: The path. + :return: A :class:`ResourceFinder` instance for the path. + """ + result = None + # calls any path hooks, gets importer into cache + pkgutil.get_importer(path) + loader = sys.path_importer_cache.get(path) + finder = _finder_registry.get(type(loader)) + if finder: + module = _dummy_module + module.__file__ = os.path.join(path, '') + module.__loader__ = loader + result = finder(module) + return result diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/scripts.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/scripts.py new file mode 100644 index 0000000..cfa45d2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/scripts.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2023 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from io import BytesIO +import logging +import os +import re +import struct +import sys +import time +from zipfile import ZipInfo + +from .compat import sysconfig, detect_encoding, ZipFile +from .resources import finder +from .util import (FileOperator, get_export_entry, convert_path, + get_executable, get_platform, in_venv) + +logger = logging.getLogger(__name__) + +_DEFAULT_MANIFEST = ''' + + + + + + + + + + + + +'''.strip() + +# check if Python is called on the first line with this expression +FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') +SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- +import re +import sys +from %(module)s import %(import_name)s +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) +''' + + +def enquote_executable(executable): + if ' ' in executable: + # make sure we quote only the executable in case of env + # for example /usr/bin/env "/dir with spaces/bin/jython" + # instead of "/usr/bin/env /dir with spaces/bin/jython" + # otherwise whole + if executable.startswith('/usr/bin/env '): + env, _executable = executable.split(' ', 1) + if ' ' in _executable and not _executable.startswith('"'): + executable = '%s "%s"' % (env, _executable) + else: + if not executable.startswith('"'): + executable = '"%s"' % executable + return executable + + +# Keep the old name around (for now), as there is at least one project using it! +_enquote_executable = enquote_executable + + +class ScriptMaker(object): + """ + A class to copy or create scripts from source scripts or callable + specifications. + """ + script_template = SCRIPT_TEMPLATE + + executable = None # for shebangs + + def __init__(self, + source_dir, + target_dir, + add_launchers=True, + dry_run=False, + fileop=None): + self.source_dir = source_dir + self.target_dir = target_dir + self.add_launchers = add_launchers + self.force = False + self.clobber = False + # It only makes sense to set mode bits on POSIX. + self.set_mode = (os.name == 'posix') or (os.name == 'java' + and os._name == 'posix') + self.variants = set(('', 'X.Y')) + self._fileop = fileop or FileOperator(dry_run) + + self._is_nt = os.name == 'nt' or (os.name == 'java' + and os._name == 'nt') + self.version_info = sys.version_info + + def _get_alternate_executable(self, executable, options): + if options.get('gui', False) and self._is_nt: # pragma: no cover + dn, fn = os.path.split(executable) + fn = fn.replace('python', 'pythonw') + executable = os.path.join(dn, fn) + return executable + + if sys.platform.startswith('java'): # pragma: no cover + + def _is_shell(self, executable): + """ + Determine if the specified executable is a script + (contains a #! line) + """ + try: + with open(executable) as fp: + return fp.read(2) == '#!' + except (OSError, IOError): + logger.warning('Failed to open %s', executable) + return False + + def _fix_jython_executable(self, executable): + if self._is_shell(executable): + # Workaround for Jython is not needed on Linux systems. + import java + + if java.lang.System.getProperty('os.name') == 'Linux': + return executable + elif executable.lower().endswith('jython.exe'): + # Use wrapper exe for Jython on Windows + return executable + return '/usr/bin/env %s' % executable + + def _build_shebang(self, executable, post_interp): + """ + Build a shebang line. In the simple case (on Windows, or a shebang line + which is not too long or contains spaces) use a simple formulation for + the shebang. Otherwise, use /bin/sh as the executable, with a contrived + shebang which allows the script to run either under Python or sh, using + suitable quoting. Thanks to Harald Nordgren for his input. + + See also: http://www.in-ulm.de/~mascheck/various/shebang/#length + https://hg.mozilla.org/mozilla-central/file/tip/mach + """ + if os.name != 'posix': + simple_shebang = True + else: + # Add 3 for '#!' prefix and newline suffix. + shebang_length = len(executable) + len(post_interp) + 3 + if sys.platform == 'darwin': + max_shebang_length = 512 + else: + max_shebang_length = 127 + simple_shebang = ((b' ' not in executable) + and (shebang_length <= max_shebang_length)) + + if simple_shebang: + result = b'#!' + executable + post_interp + b'\n' + else: + result = b'#!/bin/sh\n' + result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' + result += b"' '''" + return result + + def _get_shebang(self, encoding, post_interp=b'', options=None): + enquote = True + if self.executable: + executable = self.executable + enquote = False # assume this will be taken care of + elif not sysconfig.is_python_build(): + executable = get_executable() + elif in_venv(): # pragma: no cover + executable = os.path.join( + sysconfig.get_path('scripts'), + 'python%s' % sysconfig.get_config_var('EXE')) + else: # pragma: no cover + if os.name == 'nt': + # for Python builds from source on Windows, no Python executables with + # a version suffix are created, so we use python.exe + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s' % (sysconfig.get_config_var('EXE'))) + else: + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), + sysconfig.get_config_var('EXE'))) + if options: + executable = self._get_alternate_executable(executable, options) + + if sys.platform.startswith('java'): # pragma: no cover + executable = self._fix_jython_executable(executable) + + # Normalise case for Windows - COMMENTED OUT + # executable = os.path.normcase(executable) + # N.B. The normalising operation above has been commented out: See + # issue #124. Although paths in Windows are generally case-insensitive, + # they aren't always. For example, a path containing a ẞ (which is a + # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a + # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by + # Windows as equivalent in path names. + + # If the user didn't specify an executable, it may be necessary to + # cater for executable paths with spaces (not uncommon on Windows) + if enquote: + executable = enquote_executable(executable) + # Issue #51: don't use fsencode, since we later try to + # check that the shebang is decodable using utf-8. + executable = executable.encode('utf-8') + # in case of IronPython, play safe and enable frames support + if (sys.platform == 'cli' and '-X:Frames' not in post_interp + and '-X:FullFrames' not in post_interp): # pragma: no cover + post_interp += b' -X:Frames' + shebang = self._build_shebang(executable, post_interp) + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: # pragma: no cover + raise ValueError('The shebang (%r) is not decodable from utf-8' % + shebang) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + if encoding != 'utf-8': + try: + shebang.decode(encoding) + except UnicodeDecodeError: # pragma: no cover + raise ValueError('The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % + (shebang, encoding)) + return shebang + + def _get_script_text(self, entry): + return self.script_template % dict( + module=entry.prefix, + import_name=entry.suffix.split('.')[0], + func=entry.suffix) + + manifest = _DEFAULT_MANIFEST + + def get_manifest(self, exename): + base = os.path.basename(exename) + return self.manifest % base + + def _write_script(self, names, shebang, script_bytes, filenames, ext): + use_launcher = self.add_launchers and self._is_nt + linesep = os.linesep.encode('utf-8') + if not shebang.endswith(linesep): + shebang += linesep + if not use_launcher: + script_bytes = shebang + script_bytes + else: # pragma: no cover + if ext == 'py': + launcher = self._get_launcher('t') + else: + launcher = self._get_launcher('w') + stream = BytesIO() + with ZipFile(stream, 'w') as zf: + source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH') + if source_date_epoch: + date_time = time.gmtime(int(source_date_epoch))[:6] + zinfo = ZipInfo(filename='__main__.py', + date_time=date_time) + zf.writestr(zinfo, script_bytes) + else: + zf.writestr('__main__.py', script_bytes) + zip_data = stream.getvalue() + script_bytes = launcher + shebang + zip_data + for name in names: + outname = os.path.join(self.target_dir, name) + if use_launcher: # pragma: no cover + n, e = os.path.splitext(outname) + if e.startswith('.py'): + outname = n + outname = '%s.exe' % outname + try: + self._fileop.write_binary_file(outname, script_bytes) + except Exception: + # Failed writing an executable - it might be in use. + logger.warning('Failed to write executable - trying to ' + 'use .deleteme logic') + dfname = '%s.deleteme' % outname + if os.path.exists(dfname): + os.remove(dfname) # Not allowed to fail here + os.rename(outname, dfname) # nor here + self._fileop.write_binary_file(outname, script_bytes) + logger.debug('Able to replace executable using ' + '.deleteme logic') + try: + os.remove(dfname) + except Exception: + pass # still in use - ignore error + else: + if self._is_nt and not outname.endswith( + '.' + ext): # pragma: no cover + outname = '%s.%s' % (outname, ext) + if os.path.exists(outname) and not self.clobber: + logger.warning('Skipping existing file %s', outname) + continue + self._fileop.write_binary_file(outname, script_bytes) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + + variant_separator = '-' + + def get_script_filenames(self, name): + result = set() + if '' in self.variants: + result.add(name) + if 'X' in self.variants: + result.add('%s%s' % (name, self.version_info[0])) + if 'X.Y' in self.variants: + result.add('%s%s%s.%s' % + (name, self.variant_separator, self.version_info[0], + self.version_info[1])) + return result + + def _make_script(self, entry, filenames, options=None): + post_interp = b'' + if options: + args = options.get('interpreter_args', []) + if args: + args = ' %s' % ' '.join(args) + post_interp = args.encode('utf-8') + shebang = self._get_shebang('utf-8', post_interp, options=options) + script = self._get_script_text(entry).encode('utf-8') + scriptnames = self.get_script_filenames(entry.name) + if options and options.get('gui', False): + ext = 'pyw' + else: + ext = 'py' + self._write_script(scriptnames, shebang, script, filenames, ext) + + def _copy_script(self, script, filenames): + adjust = False + script = os.path.join(self.source_dir, convert_path(script)) + outname = os.path.join(self.target_dir, os.path.basename(script)) + if not self.force and not self._fileop.newer(script, outname): + logger.debug('not copying %s (up-to-date)', script) + return + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, 'rb') + except IOError: # pragma: no cover + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: # pragma: no cover + logger.warning('%s is an empty file (skipping)', script) + return + + match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if not adjust: + if f: + f.close() + self._fileop.copy_file(script, outname) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + else: + logger.info('copying and adjusting %s -> %s', script, + self.target_dir) + if not self._fileop.dry_run: + encoding, lines = detect_encoding(f.readline) + f.seek(0) + shebang = self._get_shebang(encoding, post_interp) + if b'pythonw' in first_line: # pragma: no cover + ext = 'pyw' + else: + ext = 'py' + n = os.path.basename(outname) + self._write_script([n], shebang, f.read(), filenames, ext) + if f: + f.close() + + @property + def dry_run(self): + return self._fileop.dry_run + + @dry_run.setter + def dry_run(self, value): + self._fileop.dry_run = value + + if os.name == 'nt' or (os.name == 'java' + and os._name == 'nt'): # pragma: no cover + # Executable launcher support. + # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ + + def _get_launcher(self, kind): + if struct.calcsize('P') == 8: # 64-bit + bits = '64' + else: + bits = '32' + platform_suffix = '-arm' if get_platform() == 'win-arm64' else '' + name = '%s%s%s.exe' % (kind, bits, platform_suffix) + # Issue 31: don't hardcode an absolute package name, but + # determine it relative to the current package + distlib_package = __name__.rsplit('.', 1)[0] + resource = finder(distlib_package).find(name) + if not resource: + msg = ('Unable to find resource %s in package %s' % + (name, distlib_package)) + raise ValueError(msg) + return resource.bytes + + # Public API follows + + def make(self, specification, options=None): + """ + Make a script. + + :param specification: The specification, which is either a valid export + entry specification (to make a script from a + callable) or a filename (to make a script by + copying from a source location). + :param options: A dictionary of options controlling script generation. + :return: A list of all absolute pathnames written to. + """ + filenames = [] + entry = get_export_entry(specification) + if entry is None: + self._copy_script(specification, filenames) + else: + self._make_script(entry, filenames, options=options) + return filenames + + def make_multiple(self, specifications, options=None): + """ + Take a list of specifications and make scripts from them, + :param specifications: A list of specifications. + :return: A list of all absolute pathnames written to, + """ + filenames = [] + for specification in specifications: + filenames.extend(self.make(specification, options)) + return filenames diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/util.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/util.py new file mode 100644 index 0000000..ba58858 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/util.py @@ -0,0 +1,2025 @@ +# +# Copyright (C) 2012-2023 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import codecs +from collections import deque +import contextlib +import csv +from glob import iglob as std_iglob +import io +import json +import logging +import os +import py_compile +import re +import socket +try: + import ssl +except ImportError: # pragma: no cover + ssl = None +import subprocess +import sys +import tarfile +import tempfile +import textwrap + +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import time + +from . import DistlibException +from .compat import (string_types, text_type, shutil, raw_input, StringIO, + cache_from_source, urlopen, urljoin, httplib, xmlrpclib, + HTTPHandler, BaseConfigurator, valid_ident, + Container, configparser, URLError, ZipFile, fsdecode, + unquote, urlparse) + +logger = logging.getLogger(__name__) + +# +# Requirement parsing code as per PEP 508 +# + +IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') +VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') +COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') +MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') +OR = re.compile(r'^or\b\s*') +AND = re.compile(r'^and\b\s*') +NON_SPACE = re.compile(r'(\S+)\s*') +STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') + + +def parse_marker(marker_string): + """ + Parse a marker string and return a dictionary containing a marker expression. + + The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in + the expression grammar, or strings. A string contained in quotes is to be + interpreted as a literal string, and a string not contained in quotes is a + variable (such as os_name). + """ + + def marker_var(remaining): + # either identifier, or literal string + m = IDENTIFIER.match(remaining) + if m: + result = m.groups()[0] + remaining = remaining[m.end():] + elif not remaining: + raise SyntaxError('unexpected end of input') + else: + q = remaining[0] + if q not in '\'"': + raise SyntaxError('invalid expression: %s' % remaining) + oq = '\'"'.replace(q, '') + remaining = remaining[1:] + parts = [q] + while remaining: + # either a string chunk, or oq, or q to terminate + if remaining[0] == q: + break + elif remaining[0] == oq: + parts.append(oq) + remaining = remaining[1:] + else: + m = STRING_CHUNK.match(remaining) + if not m: + raise SyntaxError('error in string literal: %s' % + remaining) + parts.append(m.groups()[0]) + remaining = remaining[m.end():] + else: + s = ''.join(parts) + raise SyntaxError('unterminated string: %s' % s) + parts.append(q) + result = ''.join(parts) + remaining = remaining[1:].lstrip() # skip past closing quote + return result, remaining + + def marker_expr(remaining): + if remaining and remaining[0] == '(': + result, remaining = marker(remaining[1:].lstrip()) + if remaining[0] != ')': + raise SyntaxError('unterminated parenthesis: %s' % remaining) + remaining = remaining[1:].lstrip() + else: + lhs, remaining = marker_var(remaining) + while remaining: + m = MARKER_OP.match(remaining) + if not m: + break + op = m.groups()[0] + remaining = remaining[m.end():] + rhs, remaining = marker_var(remaining) + lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} + result = lhs + return result, remaining + + def marker_and(remaining): + lhs, remaining = marker_expr(remaining) + while remaining: + m = AND.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_expr(remaining) + lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + def marker(remaining): + lhs, remaining = marker_and(remaining) + while remaining: + m = OR.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_and(remaining) + lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + return marker(marker_string) + + +def parse_requirement(req): + """ + Parse a requirement passed in as a string. Return a Container + whose attributes contain the various parts of the requirement. + """ + remaining = req.strip() + if not remaining or remaining.startswith('#'): + return None + m = IDENTIFIER.match(remaining) + if not m: + raise SyntaxError('name expected: %s' % remaining) + distname = m.groups()[0] + remaining = remaining[m.end():] + extras = mark_expr = versions = uri = None + if remaining and remaining[0] == '[': + i = remaining.find(']', 1) + if i < 0: + raise SyntaxError('unterminated extra: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + extras = [] + while s: + m = IDENTIFIER.match(s) + if not m: + raise SyntaxError('malformed extra: %s' % s) + extras.append(m.groups()[0]) + s = s[m.end():] + if not s: + break + if s[0] != ',': + raise SyntaxError('comma expected in extras: %s' % s) + s = s[1:].lstrip() + if not extras: + extras = None + if remaining: + if remaining[0] == '@': + # it's a URI + remaining = remaining[1:].lstrip() + m = NON_SPACE.match(remaining) + if not m: + raise SyntaxError('invalid URI: %s' % remaining) + uri = m.groups()[0] + t = urlparse(uri) + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not (t.scheme and t.netloc): + raise SyntaxError('Invalid URL: %s' % uri) + remaining = remaining[m.end():].lstrip() + else: + + def get_versions(ver_remaining): + """ + Return a list of operator, version tuples if any are + specified, else None. + """ + m = COMPARE_OP.match(ver_remaining) + versions = None + if m: + versions = [] + while True: + op = m.groups()[0] + ver_remaining = ver_remaining[m.end():] + m = VERSION_IDENTIFIER.match(ver_remaining) + if not m: + raise SyntaxError('invalid version: %s' % + ver_remaining) + v = m.groups()[0] + versions.append((op, v)) + ver_remaining = ver_remaining[m.end():] + if not ver_remaining or ver_remaining[0] != ',': + break + ver_remaining = ver_remaining[1:].lstrip() + # Some packages have a trailing comma which would break things + # See issue #148 + if not ver_remaining: + break + m = COMPARE_OP.match(ver_remaining) + if not m: + raise SyntaxError('invalid constraint: %s' % + ver_remaining) + if not versions: + versions = None + return versions, ver_remaining + + if remaining[0] != '(': + versions, remaining = get_versions(remaining) + else: + i = remaining.find(')', 1) + if i < 0: + raise SyntaxError('unterminated parenthesis: %s' % + remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + # As a special diversion from PEP 508, allow a version number + # a.b.c in parentheses as a synonym for ~= a.b.c (because this + # is allowed in earlier PEPs) + if COMPARE_OP.match(s): + versions, _ = get_versions(s) + else: + m = VERSION_IDENTIFIER.match(s) + if not m: + raise SyntaxError('invalid constraint: %s' % s) + v = m.groups()[0] + s = s[m.end():].lstrip() + if s: + raise SyntaxError('invalid constraint: %s' % s) + versions = [('~=', v)] + + if remaining: + if remaining[0] != ';': + raise SyntaxError('invalid requirement: %s' % remaining) + remaining = remaining[1:].lstrip() + + mark_expr, remaining = parse_marker(remaining) + + if remaining and remaining[0] != '#': + raise SyntaxError('unexpected trailing data: %s' % remaining) + + if not versions: + rs = distname + else: + rs = '%s %s' % (distname, ', '.join( + ['%s %s' % con for con in versions])) + return Container(name=distname, + extras=extras, + constraints=versions, + marker=mark_expr, + url=uri, + requirement=rs) + + +def get_resources_dests(resources_root, rules): + """Find destinations for resources files""" + + def get_rel_path(root, path): + # normalizes and returns a lstripped-/-separated path + root = root.replace(os.path.sep, '/') + path = path.replace(os.path.sep, '/') + assert path.startswith(root) + return path[len(root):].lstrip('/') + + destinations = {} + for base, suffix, dest in rules: + prefix = os.path.join(resources_root, base) + for abs_base in iglob(prefix): + abs_glob = os.path.join(abs_base, suffix) + for abs_path in iglob(abs_glob): + resource_file = get_rel_path(resources_root, abs_path) + if dest is None: # remove the entry if it was here + destinations.pop(resource_file, None) + else: + rel_path = get_rel_path(abs_base, abs_path) + rel_dest = dest.replace(os.path.sep, '/').rstrip('/') + destinations[resource_file] = rel_dest + '/' + rel_path + return destinations + + +def in_venv(): + if hasattr(sys, 'real_prefix'): + # virtualenv venvs + result = True + else: + # PEP 405 venvs + result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) + return result + + +def get_executable(): + # The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as + # changes to the stub launcher mean that sys.executable always points + # to the stub on OS X + # if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' + # in os.environ): + # result = os.environ['__PYVENV_LAUNCHER__'] + # else: + # result = sys.executable + # return result + # Avoid normcasing: see issue #143 + # result = os.path.normcase(sys.executable) + result = sys.executable + if not isinstance(result, text_type): + result = fsdecode(result) + return result + + +def proceed(prompt, allowed_chars, error_prompt=None, default=None): + p = prompt + while True: + s = raw_input(p) + p = prompt + if not s and default: + s = default + if s: + c = s[0].lower() + if c in allowed_chars: + break + if error_prompt: + p = '%c: %s\n%s' % (c, error_prompt, prompt) + return c + + +def extract_by_key(d, keys): + if isinstance(keys, string_types): + keys = keys.split() + result = {} + for key in keys: + if key in d: + result[key] = d[key] + return result + + +def read_exports(stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + # Try to load as JSON, falling back on legacy format + data = stream.read() + stream = StringIO(data) + try: + jdata = json.load(stream) + result = jdata['extensions']['python.exports']['exports'] + for group, entries in result.items(): + for k, v in entries.items(): + s = '%s = %s' % (k, v) + entry = get_export_entry(s) + assert entry is not None + entries[k] = entry + return result + except Exception: + stream.seek(0, 0) + + def read_stream(cp, stream): + if hasattr(cp, 'read_file'): + cp.read_file(stream) + else: + cp.readfp(stream) + + cp = configparser.ConfigParser() + try: + read_stream(cp, stream) + except configparser.MissingSectionHeaderError: + stream.close() + data = textwrap.dedent(data) + stream = StringIO(data) + read_stream(cp, stream) + + result = {} + for key in cp.sections(): + result[key] = entries = {} + for name, value in cp.items(key): + s = '%s = %s' % (name, value) + entry = get_export_entry(s) + assert entry is not None + # entry.dist = self + entries[name] = entry + return result + + +def write_exports(exports, stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getwriter('utf-8')(stream) + cp = configparser.ConfigParser() + for k, v in exports.items(): + # TODO check k, v for valid values + cp.add_section(k) + for entry in v.values(): + if entry.suffix is None: + s = entry.prefix + else: + s = '%s:%s' % (entry.prefix, entry.suffix) + if entry.flags: + s = '%s [%s]' % (s, ', '.join(entry.flags)) + cp.set(k, entry.name, s) + cp.write(stream) + + +@contextlib.contextmanager +def tempdir(): + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + + +@contextlib.contextmanager +def chdir(d): + cwd = os.getcwd() + try: + os.chdir(d) + yield + finally: + os.chdir(cwd) + + +@contextlib.contextmanager +def socket_timeout(seconds=15): + cto = socket.getdefaulttimeout() + try: + socket.setdefaulttimeout(seconds) + yield + finally: + socket.setdefaulttimeout(cto) + + +class cached_property(object): + + def __init__(self, func): + self.func = func + # for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) + + def __get__(self, obj, cls=None): + if obj is None: + return self + value = self.func(obj) + object.__setattr__(obj, self.func.__name__, value) + # obj.__dict__[self.func.__name__] = value = self.func(obj) + return value + + +def convert_path(pathname): + """Return 'pathname' as a name that will work on the native filesystem. + + The path is split on '/' and put back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while os.curdir in paths: + paths.remove(os.curdir) + if not paths: + return os.curdir + return os.path.join(*paths) + + +class FileOperator(object): + + def __init__(self, dry_run=False): + self.dry_run = dry_run + self.ensured = set() + self._init_record() + + def _init_record(self): + self.record = False + self.files_written = set() + self.dirs_created = set() + + def record_as_written(self, path): + if self.record: + self.files_written.add(path) + + def newer(self, source, target): + """Tell if the target is newer than the source. + + Returns true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. + + Returns false if both exist and 'target' is the same age or younger + than 'source'. Raise PackagingFileError if 'source' does not exist. + + Note that this test is not very accurate: files created in the same + second will have the same "age". + """ + if not os.path.exists(source): + raise DistlibException("file '%r' does not exist" % + os.path.abspath(source)) + if not os.path.exists(target): + return True + + return os.stat(source).st_mtime > os.stat(target).st_mtime + + def copy_file(self, infile, outfile, check=True): + """Copy a file respecting dry-run and force flags. + """ + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying %s to %s', infile, outfile) + if not self.dry_run: + msg = None + if check: + if os.path.islink(outfile): + msg = '%s is a symlink' % outfile + elif os.path.exists(outfile) and not os.path.isfile(outfile): + msg = '%s is a non-regular file' % outfile + if msg: + raise ValueError(msg + ' which would be overwritten') + shutil.copyfile(infile, outfile) + self.record_as_written(outfile) + + def copy_stream(self, instream, outfile, encoding=None): + assert not os.path.isdir(outfile) + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying stream %s to %s', instream, outfile) + if not self.dry_run: + if encoding is None: + outstream = open(outfile, 'wb') + else: + outstream = codecs.open(outfile, 'w', encoding=encoding) + try: + shutil.copyfileobj(instream, outstream) + finally: + outstream.close() + self.record_as_written(outfile) + + def write_binary_file(self, path, data): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + if os.path.exists(path): + os.remove(path) + with open(path, 'wb') as f: + f.write(data) + self.record_as_written(path) + + def write_text_file(self, path, data, encoding): + self.write_binary_file(path, data.encode(encoding)) + + def set_mode(self, bits, mask, files): + if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): + # Set the executable bits (owner, group, and world) on + # all the files specified. + for f in files: + if self.dry_run: + logger.info("changing mode of %s", f) + else: + mode = (os.stat(f).st_mode | bits) & mask + logger.info("changing mode of %s to %o", f, mode) + os.chmod(f, mode) + + set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) + + def ensure_dir(self, path): + path = os.path.abspath(path) + if path not in self.ensured and not os.path.exists(path): + self.ensured.add(path) + d, f = os.path.split(path) + self.ensure_dir(d) + logger.info('Creating %s' % path) + if not self.dry_run: + os.mkdir(path) + if self.record: + self.dirs_created.add(path) + + def byte_compile(self, + path, + optimize=False, + force=False, + prefix=None, + hashed_invalidation=False): + dpath = cache_from_source(path, not optimize) + logger.info('Byte-compiling %s to %s', path, dpath) + if not self.dry_run: + if force or self.newer(path, dpath): + if not prefix: + diagpath = None + else: + assert path.startswith(prefix) + diagpath = path[len(prefix):] + compile_kwargs = {} + if hashed_invalidation and hasattr(py_compile, + 'PycInvalidationMode'): + compile_kwargs[ + 'invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH + py_compile.compile(path, dpath, diagpath, True, + **compile_kwargs) # raise error + self.record_as_written(dpath) + return dpath + + def ensure_removed(self, path): + if os.path.exists(path): + if os.path.isdir(path) and not os.path.islink(path): + logger.debug('Removing directory tree at %s', path) + if not self.dry_run: + shutil.rmtree(path) + if self.record: + if path in self.dirs_created: + self.dirs_created.remove(path) + else: + if os.path.islink(path): + s = 'link' + else: + s = 'file' + logger.debug('Removing %s %s', s, path) + if not self.dry_run: + os.remove(path) + if self.record: + if path in self.files_written: + self.files_written.remove(path) + + def is_writable(self, path): + result = False + while not result: + if os.path.exists(path): + result = os.access(path, os.W_OK) + break + parent = os.path.dirname(path) + if parent == path: + break + path = parent + return result + + def commit(self): + """ + Commit recorded changes, turn off recording, return + changes. + """ + assert self.record + result = self.files_written, self.dirs_created + self._init_record() + return result + + def rollback(self): + if not self.dry_run: + for f in list(self.files_written): + if os.path.exists(f): + os.remove(f) + # dirs should all be empty now, except perhaps for + # __pycache__ subdirs + # reverse so that subdirs appear before their parents + dirs = sorted(self.dirs_created, reverse=True) + for d in dirs: + flist = os.listdir(d) + if flist: + assert flist == ['__pycache__'] + sd = os.path.join(d, flist[0]) + os.rmdir(sd) + os.rmdir(d) # should fail if non-empty + self._init_record() + + +def resolve(module_name, dotted_path): + if module_name in sys.modules: + mod = sys.modules[module_name] + else: + mod = __import__(module_name) + if dotted_path is None: + result = mod + else: + parts = dotted_path.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + +class ExportEntry(object): + + def __init__(self, name, prefix, suffix, flags): + self.name = name + self.prefix = prefix + self.suffix = suffix + self.flags = flags + + @cached_property + def value(self): + return resolve(self.prefix, self.suffix) + + def __repr__(self): # pragma: no cover + return '' % (self.name, self.prefix, + self.suffix, self.flags) + + def __eq__(self, other): + if not isinstance(other, ExportEntry): + result = False + else: + result = (self.name == other.name and self.prefix == other.prefix + and self.suffix == other.suffix + and self.flags == other.flags) + return result + + __hash__ = object.__hash__ + + +ENTRY_RE = re.compile( + r'''(?P([^\[]\S*)) + \s*=\s*(?P(\w+)([:\.]\w+)*) + \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + ''', re.VERBOSE) + + +def get_export_entry(specification): + m = ENTRY_RE.search(specification) + if not m: + result = None + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + else: + d = m.groupdict() + name = d['name'] + path = d['callable'] + colons = path.count(':') + if colons == 0: + prefix, suffix = path, None + else: + if colons != 1: + raise DistlibException("Invalid specification " + "'%s'" % specification) + prefix, suffix = path.split(':') + flags = d['flags'] + if flags is None: + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + flags = [] + else: + flags = [f.strip() for f in flags.split(',')] + result = ExportEntry(name, prefix, suffix, flags) + return result + + +def get_cache_base(suffix=None): + """ + Return the default base location for distlib caches. If the directory does + not exist, it is created. Use the suffix provided for the base directory, + and default to '.distlib' if it isn't provided. + + On Windows, if LOCALAPPDATA is defined in the environment, then it is + assumed to be a directory, and will be the parent directory of the result. + On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home + directory - using os.expanduser('~') - will be the parent directory of + the result. + + The result is just the directory '.distlib' in the parent directory as + determined above, or with the name specified with ``suffix``. + """ + if suffix is None: + suffix = '.distlib' + if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: + result = os.path.expandvars('$localappdata') + else: + # Assume posix, or old Windows + result = os.path.expanduser('~') + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if os.path.isdir(result): + usable = os.access(result, os.W_OK) + if not usable: + logger.warning('Directory exists but is not writable: %s', result) + else: + try: + os.makedirs(result) + usable = True + except OSError: + logger.warning('Unable to create %s', result, exc_info=True) + usable = False + if not usable: + result = tempfile.mkdtemp() + logger.warning('Default location unusable, using %s', result) + return os.path.join(result, suffix) + + +def path_to_cache_dir(path): + """ + Convert an absolute path to a directory name for use in a cache. + + The algorithm used is: + + #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. + #. Any occurrence of ``os.sep`` is replaced with ``'--'``. + #. ``'.cache'`` is appended. + """ + d, p = os.path.splitdrive(os.path.abspath(path)) + if d: + d = d.replace(':', '---') + p = p.replace(os.sep, '--') + return d + p + '.cache' + + +def ensure_slash(s): + if not s.endswith('/'): + return s + '/' + return s + + +def parse_credentials(netloc): + username = password = None + if '@' in netloc: + prefix, netloc = netloc.rsplit('@', 1) + if ':' not in prefix: + username = prefix + else: + username, password = prefix.split(':', 1) + if username: + username = unquote(username) + if password: + password = unquote(password) + return username, password, netloc + + +def get_process_umask(): + result = os.umask(0o22) + os.umask(result) + return result + + +def is_string_sequence(seq): + result = True + i = None + for i, s in enumerate(seq): + if not isinstance(s, string_types): + result = False + break + assert i is not None + return result + + +PROJECT_NAME_AND_VERSION = re.compile( + '([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) +PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') + + +def split_filename(filename, project_name=None): + """ + Extract name, version, python version from a filename (no extension) + + Return name, version, pyver or None + """ + result = None + pyver = None + filename = unquote(filename).replace(' ', '-') + m = PYTHON_VERSION.search(filename) + if m: + pyver = m.group(1) + filename = filename[:m.start()] + if project_name and len(filename) > len(project_name) + 1: + m = re.match(re.escape(project_name) + r'\b', filename) + if m: + n = m.end() + result = filename[:n], filename[n + 1:], pyver + if result is None: + m = PROJECT_NAME_AND_VERSION.match(filename) + if m: + result = m.group(1), m.group(3), pyver + return result + + +# Allow spaces in name because of legacy dists like "Twisted Core" +NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' + r'\(\s*(?P[^\s)]+)\)$') + + +def parse_name_and_version(p): + """ + A utility method used to get name and version from a string. + + From e.g. a Provides-Dist value. + + :param p: A value in a form 'foo (1.0)' + :return: The name and version as a tuple. + """ + m = NAME_VERSION_RE.match(p) + if not m: + raise DistlibException('Ill-formed name/version string: \'%s\'' % p) + d = m.groupdict() + return d['name'].strip().lower(), d['ver'] + + +def get_extras(requested, available): + result = set() + requested = set(requested or []) + available = set(available or []) + if '*' in requested: + requested.remove('*') + result |= available + for r in requested: + if r == '-': + result.add(r) + elif r.startswith('-'): + unwanted = r[1:] + if unwanted not in available: + logger.warning('undeclared extra: %s' % unwanted) + if unwanted in result: + result.remove(unwanted) + else: + if r not in available: + logger.warning('undeclared extra: %s' % r) + result.add(r) + return result + + +# +# Extended metadata functionality +# + + +def _get_external_data(url): + result = {} + try: + # urlopen might fail if it runs into redirections, + # because of Python issue #13696. Fixed in locators + # using a custom redirect handler. + resp = urlopen(url) + headers = resp.info() + ct = headers.get('Content-Type') + if not ct.startswith('application/json'): + logger.debug('Unexpected response for JSON request: %s', ct) + else: + reader = codecs.getreader('utf-8')(resp) + # data = reader.read().decode('utf-8') + # result = json.loads(data) + result = json.load(reader) + except Exception as e: + logger.exception('Failed to get external data for %s: %s', url, e) + return result + + +_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + + +def get_project_data(name): + url = '%s/%s/project.json' % (name[0].upper(), name) + url = urljoin(_external_data_base_url, url) + result = _get_external_data(url) + return result + + +def get_package_data(name, version): + url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) + url = urljoin(_external_data_base_url, url) + return _get_external_data(url) + + +class Cache(object): + """ + A class implementing a cache for resources that need to live in the file system + e.g. shared libraries. This class was moved from resources to here because it + could be used by other modules, e.g. the wheel module. + """ + + def __init__(self, base): + """ + Initialise an instance. + + :param base: The base directory where the cache should be located. + """ + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if not os.path.isdir(base): # pragma: no cover + os.makedirs(base) + if (os.stat(base).st_mode & 0o77) != 0: + logger.warning('Directory \'%s\' is not private', base) + self.base = os.path.abspath(os.path.normpath(base)) + + def prefix_to_dir(self, prefix): + """ + Converts a resource prefix to a directory name in the cache. + """ + return path_to_cache_dir(prefix) + + def clear(self): + """ + Clear the cache. + """ + not_removed = [] + for fn in os.listdir(self.base): + fn = os.path.join(self.base, fn) + try: + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + except Exception: + not_removed.append(fn) + return not_removed + + +class EventMixin(object): + """ + A very simple publish/subscribe system. + """ + + def __init__(self): + self._subscribers = {} + + def add(self, event, subscriber, append=True): + """ + Add a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be added (and called when the + event is published). + :param append: Whether to append or prepend the subscriber to an + existing subscriber list for the event. + """ + subs = self._subscribers + if event not in subs: + subs[event] = deque([subscriber]) + else: + sq = subs[event] + if append: + sq.append(subscriber) + else: + sq.appendleft(subscriber) + + def remove(self, event, subscriber): + """ + Remove a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be removed. + """ + subs = self._subscribers + if event not in subs: + raise ValueError('No subscribers: %r' % event) + subs[event].remove(subscriber) + + def get_subscribers(self, event): + """ + Return an iterator for the subscribers for an event. + :param event: The event to return subscribers for. + """ + return iter(self._subscribers.get(event, ())) + + def publish(self, event, *args, **kwargs): + """ + Publish a event and return a list of values returned by its + subscribers. + + :param event: The event to publish. + :param args: The positional arguments to pass to the event's + subscribers. + :param kwargs: The keyword arguments to pass to the event's + subscribers. + """ + result = [] + for subscriber in self.get_subscribers(event): + try: + value = subscriber(event, *args, **kwargs) + except Exception: + logger.exception('Exception during event publication') + value = None + result.append(value) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, + args, kwargs, result) + return result + + +# +# Simple sequencing +# +class Sequencer(object): + + def __init__(self): + self._preds = {} + self._succs = {} + self._nodes = set() # nodes with no preds/succs + + def add_node(self, node): + self._nodes.add(node) + + def remove_node(self, node, edges=False): + if node in self._nodes: + self._nodes.remove(node) + if edges: + for p in set(self._preds.get(node, ())): + self.remove(p, node) + for s in set(self._succs.get(node, ())): + self.remove(node, s) + # Remove empties + for k, v in list(self._preds.items()): + if not v: + del self._preds[k] + for k, v in list(self._succs.items()): + if not v: + del self._succs[k] + + def add(self, pred, succ): + assert pred != succ + self._preds.setdefault(succ, set()).add(pred) + self._succs.setdefault(pred, set()).add(succ) + + def remove(self, pred, succ): + assert pred != succ + try: + preds = self._preds[succ] + succs = self._succs[pred] + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of anything' % succ) + try: + preds.remove(pred) + succs.remove(succ) + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of %r' % (succ, pred)) + + def is_step(self, step): + return (step in self._preds or step in self._succs + or step in self._nodes) + + def get_steps(self, final): + if not self.is_step(final): + raise ValueError('Unknown: %r' % final) + result = [] + todo = [] + seen = set() + todo.append(final) + while todo: + step = todo.pop(0) + if step in seen: + # if a step was already seen, + # move it to the end (so it will appear earlier + # when reversed on return) ... but not for the + # final step, as that would be confusing for + # users + if step != final: + result.remove(step) + result.append(step) + else: + seen.add(step) + result.append(step) + preds = self._preds.get(step, ()) + todo.extend(preds) + return reversed(result) + + @property + def strong_connections(self): + # http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + index_counter = [0] + stack = [] + lowlinks = {} + index = {} + result = [] + + graph = self._succs + + def strongconnect(node): + # set the depth index for this node to the smallest unused index + index[node] = index_counter[0] + lowlinks[node] = index_counter[0] + index_counter[0] += 1 + stack.append(node) + + # Consider successors + try: + successors = graph[node] + except Exception: + successors = [] + for successor in successors: + if successor not in lowlinks: + # Successor has not yet been visited + strongconnect(successor) + lowlinks[node] = min(lowlinks[node], lowlinks[successor]) + elif successor in stack: + # the successor is in the stack and hence in the current + # strongly connected component (SCC) + lowlinks[node] = min(lowlinks[node], index[successor]) + + # If `node` is a root node, pop the stack and generate an SCC + if lowlinks[node] == index[node]: + connected_component = [] + + while True: + successor = stack.pop() + connected_component.append(successor) + if successor == node: + break + component = tuple(connected_component) + # storing the result + result.append(component) + + for node in graph: + if node not in lowlinks: + strongconnect(node) + + return result + + @property + def dot(self): + result = ['digraph G {'] + for succ in self._preds: + preds = self._preds[succ] + for pred in preds: + result.append(' %s -> %s;' % (pred, succ)) + for node in self._nodes: + result.append(' %s;' % node) + result.append('}') + return '\n'.join(result) + + +# +# Unarchiving functionality for zip, tar, tgz, tbz, whl +# + +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', + '.whl') + + +def unarchive(archive_filename, dest_dir, format=None, check=True): + + def check_path(path): + if not isinstance(path, text_type): + path = path.decode('utf-8') + p = os.path.abspath(os.path.join(dest_dir, path)) + if not p.startswith(dest_dir) or p[plen] != os.sep: + raise ValueError('path outside destination: %r' % p) + + dest_dir = os.path.abspath(dest_dir) + plen = len(dest_dir) + archive = None + if format is None: + if archive_filename.endswith(('.zip', '.whl')): + format = 'zip' + elif archive_filename.endswith(('.tar.gz', '.tgz')): + format = 'tgz' + mode = 'r:gz' + elif archive_filename.endswith(('.tar.bz2', '.tbz')): + format = 'tbz' + mode = 'r:bz2' + elif archive_filename.endswith('.tar'): + format = 'tar' + mode = 'r' + else: # pragma: no cover + raise ValueError('Unknown format for %r' % archive_filename) + try: + if format == 'zip': + archive = ZipFile(archive_filename, 'r') + if check: + names = archive.namelist() + for name in names: + check_path(name) + else: + archive = tarfile.open(archive_filename, mode) + if check: + names = archive.getnames() + for name in names: + check_path(name) + if format != 'zip' and sys.version_info[0] < 3: + # See Python issue 17153. If the dest path contains Unicode, + # tarfile extraction fails on Python 2.x if a member path name + # contains non-ASCII characters - it leads to an implicit + # bytes -> unicode conversion using ASCII to decode. + for tarinfo in archive.getmembers(): + if not isinstance(tarinfo.name, text_type): + tarinfo.name = tarinfo.name.decode('utf-8') + + # Limit extraction of dangerous items, if this Python + # allows it easily. If not, just trust the input. + # See: https://docs.python.org/3/library/tarfile.html#extraction-filters + def extraction_filter(member, path): + """Run tarfile.tar_filter, but raise the expected ValueError""" + # This is only called if the current Python has tarfile filters + try: + return tarfile.tar_filter(member, path) + except tarfile.FilterError as exc: + raise ValueError(str(exc)) + + archive.extraction_filter = extraction_filter + + archive.extractall(dest_dir) + + finally: + if archive: + archive.close() + + +def zip_dir(directory): + """zip a directory tree into a BytesIO object""" + result = io.BytesIO() + dlen = len(directory) + with ZipFile(result, "w") as zf: + for root, dirs, files in os.walk(directory): + for name in files: + full = os.path.join(root, name) + rel = root[dlen:] + dest = os.path.join(rel, name) + zf.write(full, dest) + return result + + +# +# Simple progress bar +# + +UNITS = ('', 'K', 'M', 'G', 'T', 'P') + + +class Progress(object): + unknown = 'UNKNOWN' + + def __init__(self, minval=0, maxval=100): + assert maxval is None or maxval >= minval + self.min = self.cur = minval + self.max = maxval + self.started = None + self.elapsed = 0 + self.done = False + + def update(self, curval): + assert self.min <= curval + assert self.max is None or curval <= self.max + self.cur = curval + now = time.time() + if self.started is None: + self.started = now + else: + self.elapsed = now - self.started + + def increment(self, incr): + assert incr >= 0 + self.update(self.cur + incr) + + def start(self): + self.update(self.min) + return self + + def stop(self): + if self.max is not None: + self.update(self.max) + self.done = True + + @property + def maximum(self): + return self.unknown if self.max is None else self.max + + @property + def percentage(self): + if self.done: + result = '100 %' + elif self.max is None: + result = ' ?? %' + else: + v = 100.0 * (self.cur - self.min) / (self.max - self.min) + result = '%3d %%' % v + return result + + def format_duration(self, duration): + if (duration <= 0) and self.max is None or self.cur == self.min: + result = '??:??:??' + # elif duration < 1: + # result = '--:--:--' + else: + result = time.strftime('%H:%M:%S', time.gmtime(duration)) + return result + + @property + def ETA(self): + if self.done: + prefix = 'Done' + t = self.elapsed + # import pdb; pdb.set_trace() + else: + prefix = 'ETA ' + if self.max is None: + t = -1 + elif self.elapsed == 0 or (self.cur == self.min): + t = 0 + else: + # import pdb; pdb.set_trace() + t = float(self.max - self.min) + t /= self.cur - self.min + t = (t - 1) * self.elapsed + return '%s: %s' % (prefix, self.format_duration(t)) + + @property + def speed(self): + if self.elapsed == 0: + result = 0.0 + else: + result = (self.cur - self.min) / self.elapsed + for unit in UNITS: + if result < 1000: + break + result /= 1000.0 + return '%d %sB/s' % (result, unit) + + +# +# Glob functionality +# + +RICH_GLOB = re.compile(r'\{([^}]*)\}') +_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') +_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') + + +def iglob(path_glob): + """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" + if _CHECK_RECURSIVE_GLOB.search(path_glob): + msg = """invalid glob %r: recursive glob "**" must be used alone""" + raise ValueError(msg % path_glob) + if _CHECK_MISMATCH_SET.search(path_glob): + msg = """invalid glob %r: mismatching set marker '{' or '}'""" + raise ValueError(msg % path_glob) + return _iglob(path_glob) + + +def _iglob(path_glob): + rich_path_glob = RICH_GLOB.split(path_glob, 1) + if len(rich_path_glob) > 1: + assert len(rich_path_glob) == 3, rich_path_glob + prefix, set, suffix = rich_path_glob + for item in set.split(','): + for path in _iglob(''.join((prefix, item, suffix))): + yield path + else: + if '**' not in path_glob: + for item in std_iglob(path_glob): + yield item + else: + prefix, radical = path_glob.split('**', 1) + if prefix == '': + prefix = '.' + if radical == '': + radical = '*' + else: + # we support both + radical = radical.lstrip('/') + radical = radical.lstrip('\\') + for path, dir, files in os.walk(prefix): + path = os.path.normpath(path) + for fn in _iglob(os.path.join(path, radical)): + yield fn + + +if ssl: + from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, + CertificateError) + + # + # HTTPSConnection which verifies certificates/matches domains + # + + class HTTPSConnection(httplib.HTTPSConnection): + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None + + # noinspection PyPropertyAccess + def connect(self): + sock = socket.create_connection((self.host, self.port), + self.timeout) + if getattr(self, '_tunnel_host', False): + self.sock = sock + self._tunnel() + + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + if hasattr(ssl, 'OP_NO_SSLv2'): + context.options |= ssl.OP_NO_SSLv2 + if getattr(self, 'cert_file', None): + context.load_cert_chain(self.cert_file, self.key_file) + kwargs = {} + if self.ca_certs: + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations(cafile=self.ca_certs) + if getattr(ssl, 'HAS_SNI', False): + kwargs['server_hostname'] = self.host + + self.sock = context.wrap_socket(sock, **kwargs) + if self.ca_certs and self.check_domain: + try: + match_hostname(self.sock.getpeercert(), self.host) + logger.debug('Host verified: %s', self.host) + except CertificateError: # pragma: no cover + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + class HTTPSHandler(BaseHTTPSHandler): + + def __init__(self, ca_certs, check_domain=True): + BaseHTTPSHandler.__init__(self) + self.ca_certs = ca_certs + self.check_domain = check_domain + + def _conn_maker(self, *args, **kwargs): + """ + This is called to create a connection instance. Normally you'd + pass a connection class to do_open, but it doesn't actually check for + a class, and just expects a callable. As long as we behave just as a + constructor would have, we should be OK. If it ever changes so that + we *must* pass a class, we'll create an UnsafeHTTPSConnection class + which just sets check_domain to False in the class definition, and + choose which one to pass to do_open. + """ + result = HTTPSConnection(*args, **kwargs) + if self.ca_certs: + result.ca_certs = self.ca_certs + result.check_domain = self.check_domain + return result + + def https_open(self, req): + try: + return self.do_open(self._conn_maker, req) + except URLError as e: + if 'certificate verify failed' in str(e.reason): + raise CertificateError( + 'Unable to verify server certificate ' + 'for %s' % req.host) + else: + raise + + # + # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- + # Middle proxy using HTTP listens on port 443, or an index mistakenly serves + # HTML containing a http://xyz link when it should be https://xyz), + # you can use the following handler class, which does not allow HTTP traffic. + # + # It works by inheriting from HTTPHandler - so build_opener won't add a + # handler for HTTP itself. + # + class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + + def http_open(self, req): + raise URLError( + 'Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + + +# +# XML-RPC with timeouts +# +class Transport(xmlrpclib.Transport): + + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.Transport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, x509 = self.get_host_info(host) + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + return self._connection[1] + + +if ssl: + + class SafeTransport(xmlrpclib.SafeTransport): + + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.SafeTransport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, kwargs = self.get_host_info(host) + if not kwargs: + kwargs = {} + kwargs['timeout'] = self.timeout + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection( + h, None, **kwargs) + return self._connection[1] + + +class ServerProxy(xmlrpclib.ServerProxy): + + def __init__(self, uri, **kwargs): + self.timeout = timeout = kwargs.pop('timeout', None) + # The above classes only come into play if a timeout + # is specified + if timeout is not None: + # scheme = splittype(uri) # deprecated as of Python 3.8 + scheme = urlparse(uri)[0] + use_datetime = kwargs.get('use_datetime', 0) + if scheme == 'https': + tcls = SafeTransport + else: + tcls = Transport + kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) + self.transport = t + xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + + +# +# CSV functionality. This is provided because on 2.x, the csv module can't +# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. +# + + +def _csv_open(fn, mode, **kwargs): + if sys.version_info[0] < 3: + mode += 'b' + else: + kwargs['newline'] = '' + # Python 3 determines encoding from locale. Force 'utf-8' + # file encoding to match other forced utf-8 encoding + kwargs['encoding'] = 'utf-8' + return open(fn, mode, **kwargs) + + +class CSVBase(object): + defaults = { + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) + } + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.stream.close() + + +class CSVReader(CSVBase): + + def __init__(self, **kwargs): + if 'stream' in kwargs: + stream = kwargs['stream'] + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + self.stream = stream + else: + self.stream = _csv_open(kwargs['path'], 'r') + self.reader = csv.reader(self.stream, **self.defaults) + + def __iter__(self): + return self + + def next(self): + result = next(self.reader) + if sys.version_info[0] < 3: + for i, item in enumerate(result): + if not isinstance(item, text_type): + result[i] = item.decode('utf-8') + return result + + __next__ = next + + +class CSVWriter(CSVBase): + + def __init__(self, fn, **kwargs): + self.stream = _csv_open(fn, 'w') + self.writer = csv.writer(self.stream, **self.defaults) + + def writerow(self, row): + if sys.version_info[0] < 3: + r = [] + for item in row: + if isinstance(item, text_type): + item = item.encode('utf-8') + r.append(item) + row = r + self.writer.writerow(row) + + +# +# Configurator functionality +# + + +class Configurator(BaseConfigurator): + + value_converters = dict(BaseConfigurator.value_converters) + value_converters['inc'] = 'inc_convert' + + def __init__(self, config, base=None): + super(Configurator, self).__init__(config) + self.base = base or os.getcwd() + + def configure_custom(self, config): + + def convert(o): + if isinstance(o, (list, tuple)): + result = type(o)([convert(i) for i in o]) + elif isinstance(o, dict): + if '()' in o: + result = self.configure_custom(o) + else: + result = {} + for k in o: + result[k] = convert(o[k]) + else: + result = self.convert(o) + return result + + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + args = config.pop('[]', ()) + if args: + args = tuple([convert(o) for o in args]) + items = [(k, convert(config[k])) for k in config if valid_ident(k)] + kwargs = dict(items) + result = c(*args, **kwargs) + if props: + for n, v in props.items(): + setattr(result, n, convert(v)) + return result + + def __getitem__(self, key): + result = self.config[key] + if isinstance(result, dict) and '()' in result: + self.config[key] = result = self.configure_custom(result) + return result + + def inc_convert(self, value): + """Default converter for the inc:// protocol.""" + if not os.path.isabs(value): + value = os.path.join(self.base, value) + with codecs.open(value, 'r', encoding='utf-8') as f: + result = json.load(f) + return result + + +class SubprocessMixin(object): + """ + Mixin for running subprocesses and capturing their output + """ + + def __init__(self, verbose=False, progress=None): + self.verbose = verbose + self.progress = progress + + def reader(self, stream, context): + """ + Read lines from a subprocess' output stream and either pass to a progress + callable (if specified) or write progress information to sys.stderr. + """ + progress = self.progress + verbose = self.verbose + while True: + s = stream.readline() + if not s: + break + if progress is not None: + progress(s, context) + else: + if not verbose: + sys.stderr.write('.') + else: + sys.stderr.write(s.decode('utf-8')) + sys.stderr.flush() + stream.close() + + def run_command(self, cmd, **kwargs): + p = subprocess.Popen(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + **kwargs) + t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) + t1.start() + t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) + t2.start() + p.wait() + t1.join() + t2.join() + if self.progress is not None: + self.progress('done.', 'main') + elif self.verbose: + sys.stderr.write('done.\n') + return p + + +def normalize_name(name): + """Normalize a python package name a la PEP 503""" + # https://www.python.org/dev/peps/pep-0503/#normalized-names + return re.sub('[-_.]+', '-', name).lower() + + +# def _get_pypirc_command(): +# """ +# Get the distutils command for interacting with PyPI configurations. +# :return: the command. +# """ +# from distutils.core import Distribution +# from distutils.config import PyPIRCCommand +# d = Distribution() +# return PyPIRCCommand(d) + + +class PyPIRCFile(object): + + DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' + DEFAULT_REALM = 'pypi' + + def __init__(self, fn=None, url=None): + if fn is None: + fn = os.path.join(os.path.expanduser('~'), '.pypirc') + self.filename = fn + self.url = url + + def read(self): + result = {} + + if os.path.exists(self.filename): + repository = self.url or self.DEFAULT_REPOSITORY + + config = configparser.RawConfigParser() + config.read(self.filename) + sections = config.sections() + if 'distutils' in sections: + # let's get the list of servers + index_servers = config.get('distutils', 'index-servers') + _servers = [ + server.strip() for server in index_servers.split('\n') + if server.strip() != '' + ] + if _servers == []: + # nothing set, let's try to get the default pypi + if 'pypi' in sections: + _servers = ['pypi'] + else: + for server in _servers: + result = {'server': server} + result['username'] = config.get(server, 'username') + + # optional params + for key, default in (('repository', + self.DEFAULT_REPOSITORY), + ('realm', self.DEFAULT_REALM), + ('password', None)): + if config.has_option(server, key): + result[key] = config.get(server, key) + else: + result[key] = default + + # work around people having "repository" for the "pypi" + # section of their config set to the HTTP (rather than + # HTTPS) URL + if (server == 'pypi' and repository + in (self.DEFAULT_REPOSITORY, 'pypi')): + result['repository'] = self.DEFAULT_REPOSITORY + elif (result['server'] != repository + and result['repository'] != repository): + result = {} + elif 'server-login' in sections: + # old format + server = 'server-login' + if config.has_option(server, 'repository'): + repository = config.get(server, 'repository') + else: + repository = self.DEFAULT_REPOSITORY + result = { + 'username': config.get(server, 'username'), + 'password': config.get(server, 'password'), + 'repository': repository, + 'server': server, + 'realm': self.DEFAULT_REALM + } + return result + + def update(self, username, password): + # import pdb; pdb.set_trace() + config = configparser.RawConfigParser() + fn = self.filename + config.read(fn) + if not config.has_section('pypi'): + config.add_section('pypi') + config.set('pypi', 'username', username) + config.set('pypi', 'password', password) + with open(fn, 'w') as f: + config.write(f) + + +def _load_pypirc(index): + """ + Read the PyPI access configuration as supported by distutils. + """ + return PyPIRCFile(url=index.url).read() + + +def _store_pypirc(index): + PyPIRCFile().update(index.username, index.password) + + +# +# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor +# tweaks +# + + +def get_host_platform(): + """Return a string that identifies the current platform. This is used mainly to + distinguish platform-specific build directories and platform-specific built + distributions. Typically includes the OS name and version and the + architecture (as supplied by 'os.uname()'), although the exact information + included depends on the OS; eg. on Linux, the kernel version isn't + particularly important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + + """ + if os.name == 'nt': + if 'amd64' in sys.version.lower(): + return 'win-amd64' + if '(arm)' in sys.version.lower(): + return 'win-arm32' + if '(arm64)' in sys.version.lower(): + return 'win-arm64' + return sys.platform + + # Set for cross builds explicitly + if "_PYTHON_HOST_PLATFORM" in os.environ: + return os.environ["_PYTHON_HOST_PLATFORM"] + + if os.name != 'posix' or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + + (osname, host, release, version, machine) = os.uname() + + # Convert the OS name to lowercase, remove '/' characters, and translate + # spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_').replace('/', '-') + + if osname[:5] == 'linux': + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + + elif osname[:5] == 'sunos': + if release[0] >= '5': # SunOS 5 == Solaris 2 + osname = 'solaris' + release = '%d.%s' % (int(release[0]) - 3, release[2:]) + # We can't use 'platform.architecture()[0]' because a + # bootstrap problem. We use a dict to get an error + # if some suspicious happens. + bitness = {2147483647: '32bit', 9223372036854775807: '64bit'} + machine += '.%s' % bitness[sys.maxsize] + # fall through to standard osname-release-machine representation + elif osname[:3] == 'aix': + from _aix_support import aix_platform + return aix_platform() + elif osname[:6] == 'cygwin': + osname = 'cygwin' + rel_re = re.compile(r'[\d.]+', re.ASCII) + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == 'darwin': + import _osx_support + try: + from distutils import sysconfig + except ImportError: + import sysconfig + osname, release, machine = _osx_support.get_platform_osx( + sysconfig.get_config_vars(), osname, release, machine) + + return '%s-%s-%s' % (osname, release, machine) + + +_TARGET_TO_PLAT = { + 'x86': 'win32', + 'x64': 'win-amd64', + 'arm': 'win-arm32', +} + + +def get_platform(): + if os.name != 'nt': + return get_host_platform() + cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH') + if cross_compilation_target not in _TARGET_TO_PLAT: + return get_host_platform() + return _TARGET_TO_PLAT[cross_compilation_target] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/version.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/version.py new file mode 100644 index 0000000..14171ac --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/version.py @@ -0,0 +1,751 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2023 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Implementation of a flexible versioning scheme providing support for PEP-440, +setuptools-compatible and semantic versioning. +""" + +import logging +import re + +from .compat import string_types +from .util import parse_requirement + +__all__ = ['NormalizedVersion', 'NormalizedMatcher', + 'LegacyVersion', 'LegacyMatcher', + 'SemanticVersion', 'SemanticMatcher', + 'UnsupportedVersionError', 'get_scheme'] + +logger = logging.getLogger(__name__) + + +class UnsupportedVersionError(ValueError): + """This is an unsupported version.""" + pass + + +class Version(object): + def __init__(self, s): + self._string = s = s.strip() + self._parts = parts = self.parse(s) + assert isinstance(parts, tuple) + assert len(parts) > 0 + + def parse(self, s): + raise NotImplementedError('please implement in a subclass') + + def _check_compatible(self, other): + if type(self) != type(other): + raise TypeError('cannot compare %r and %r' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + self._check_compatible(other) + return self._parts < other._parts + + def __gt__(self, other): + return not (self.__lt__(other) or self.__eq__(other)) + + def __le__(self, other): + return self.__lt__(other) or self.__eq__(other) + + def __ge__(self, other): + return self.__gt__(other) or self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self._parts) + + def __repr__(self): + return "%s('%s')" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + @property + def is_prerelease(self): + raise NotImplementedError('Please implement in subclasses.') + + +class Matcher(object): + version_class = None + + # value is either a callable or the name of a method + _operators = { + '<': lambda v, c, p: v < c, + '>': lambda v, c, p: v > c, + '<=': lambda v, c, p: v == c or v < c, + '>=': lambda v, c, p: v == c or v > c, + '==': lambda v, c, p: v == c, + '===': lambda v, c, p: v == c, + # by default, compatible => >=. + '~=': lambda v, c, p: v == c or v > c, + '!=': lambda v, c, p: v != c, + } + + # this is a method only to support alternative implementations + # via overriding + def parse_requirement(self, s): + return parse_requirement(s) + + def __init__(self, s): + if self.version_class is None: + raise ValueError('Please specify a version class') + self._string = s = s.strip() + r = self.parse_requirement(s) + if not r: + raise ValueError('Not valid: %r' % s) + self.name = r.name + self.key = self.name.lower() # for case-insensitive comparisons + clist = [] + if r.constraints: + # import pdb; pdb.set_trace() + for op, s in r.constraints: + if s.endswith('.*'): + if op not in ('==', '!='): + raise ValueError('\'.*\' not allowed for ' + '%r constraints' % op) + # Could be a partial version (e.g. for '2.*') which + # won't parse as a version, so keep it as a string + vn, prefix = s[:-2], True + # Just to check that vn is a valid version + self.version_class(vn) + else: + # Should parse as a version, so we can create an + # instance for the comparison + vn, prefix = self.version_class(s), False + clist.append((op, vn, prefix)) + self._parts = tuple(clist) + + def match(self, version): + """ + Check if the provided version matches the constraints. + + :param version: The version to match against this instance. + :type version: String or :class:`Version` instance. + """ + if isinstance(version, string_types): + version = self.version_class(version) + for operator, constraint, prefix in self._parts: + f = self._operators.get(operator) + if isinstance(f, string_types): + f = getattr(self, f) + if not f: + msg = ('%r not implemented ' + 'for %s' % (operator, self.__class__.__name__)) + raise NotImplementedError(msg) + if not f(version, constraint, prefix): + return False + return True + + @property + def exact_version(self): + result = None + if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): + result = self._parts[0][1] + return result + + def _check_compatible(self, other): + if type(self) != type(other) or self.name != other.name: + raise TypeError('cannot compare %s and %s' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self.key == other.key and self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self.key) + hash(self._parts) + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + +PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|alpha|b|beta|c|rc|pre|preview)(\d+)?)?' + r'(\.(post|r|rev)(\d+)?)?([._-]?(dev)(\d+)?)?' + r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$', re.I) + + +def _pep_440_key(s): + s = s.strip() + m = PEP440_VERSION_RE.match(s) + if not m: + raise UnsupportedVersionError('Not a valid version: %s' % s) + groups = m.groups() + nums = tuple(int(v) for v in groups[1].split('.')) + while len(nums) > 1 and nums[-1] == 0: + nums = nums[:-1] + + if not groups[0]: + epoch = 0 + else: + epoch = int(groups[0][:-1]) + pre = groups[4:6] + post = groups[7:9] + dev = groups[10:12] + local = groups[13] + if pre == (None, None): + pre = () + else: + if pre[1] is None: + pre = pre[0], 0 + else: + pre = pre[0], int(pre[1]) + if post == (None, None): + post = () + else: + if post[1] is None: + post = post[0], 0 + else: + post = post[0], int(post[1]) + if dev == (None, None): + dev = () + else: + if dev[1] is None: + dev = dev[0], 0 + else: + dev = dev[0], int(dev[1]) + if local is None: + local = () + else: + parts = [] + for part in local.split('.'): + # to ensure that numeric compares as > lexicographic, avoid + # comparing them directly, but encode a tuple which ensures + # correct sorting + if part.isdigit(): + part = (1, int(part)) + else: + part = (0, part) + parts.append(part) + local = tuple(parts) + if not pre: + # either before pre-release, or final release and after + if not post and dev: + # before pre-release + pre = ('a', -1) # to sort before a0 + else: + pre = ('z',) # to sort after all pre-releases + # now look at the state of post and dev. + if not post: + post = ('_',) # sort before 'a' + if not dev: + dev = ('final',) + + return epoch, nums, pre, post, dev, local + + +_normalized_key = _pep_440_key + + +class NormalizedVersion(Version): + """A rational version. + + Good: + 1.2 # equivalent to "1.2.0" + 1.2.0 + 1.2a1 + 1.2.3a2 + 1.2.3b1 + 1.2.3c1 + 1.2.3.4 + TODO: fill this out + + Bad: + 1 # minimum two numbers + 1.2a # release level must have a release serial + 1.2.3b + """ + def parse(self, s): + result = _normalized_key(s) + # _normalized_key loses trailing zeroes in the release + # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 + # However, PEP 440 prefix matching needs it: for example, + # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). + m = PEP440_VERSION_RE.match(s) # must succeed + groups = m.groups() + self._release_clause = tuple(int(v) for v in groups[1].split('.')) + return result + + PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) + + @property + def is_prerelease(self): + return any(t[0] in self.PREREL_TAGS for t in self._parts if t) + + +def _match_prefix(x, y): + x = str(x) + y = str(y) + if x == y: + return True + if not x.startswith(y): + return False + n = len(y) + return x[n] == '.' + + +class NormalizedMatcher(Matcher): + version_class = NormalizedVersion + + # value is either a callable or the name of a method + _operators = { + '~=': '_match_compatible', + '<': '_match_lt', + '>': '_match_gt', + '<=': '_match_le', + '>=': '_match_ge', + '==': '_match_eq', + '===': '_match_arbitrary', + '!=': '_match_ne', + } + + def _adjust_local(self, version, constraint, prefix): + if prefix: + strip_local = '+' not in constraint and version._parts[-1] + else: + # both constraint and version are + # NormalizedVersion instances. + # If constraint does not have a local component, + # ensure the version doesn't, either. + strip_local = not constraint._parts[-1] and version._parts[-1] + if strip_local: + s = version._string.split('+', 1)[0] + version = self.version_class(s) + return version, constraint + + def _match_lt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version >= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_gt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version <= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_le(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version <= constraint + + def _match_ge(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version >= constraint + + def _match_eq(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version == constraint) + else: + result = _match_prefix(version, constraint) + return result + + def _match_arbitrary(self, version, constraint, prefix): + return str(version) == str(constraint) + + def _match_ne(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version != constraint) + else: + result = not _match_prefix(version, constraint) + return result + + def _match_compatible(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version == constraint: + return True + if version < constraint: + return False +# if not prefix: +# return True + release_clause = constraint._release_clause + if len(release_clause) > 1: + release_clause = release_clause[:-1] + pfx = '.'.join([str(i) for i in release_clause]) + return _match_prefix(version, pfx) + + +_REPLACEMENTS = ( + (re.compile('[.+-]$'), ''), # remove trailing puncts + (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start + (re.compile('^[.-]'), ''), # remove leading puncts + (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses + (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha + (re.compile(r'\b(pre-alpha|prealpha)\b'), + 'pre.alpha'), # standardise + (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses +) + +_SUFFIX_REPLACEMENTS = ( + (re.compile('^[:~._+-]+'), ''), # remove leading puncts + (re.compile('[,*")([\\]]'), ''), # remove unwanted chars + (re.compile('[~:+_ -]'), '.'), # replace illegal chars + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\.$'), ''), # trailing '.' +) + +_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') + + +def _suggest_semantic_version(s): + """ + Try to suggest a semantic form for a version for which + _suggest_normalized_version couldn't come up with anything. + """ + result = s.strip().lower() + for pat, repl in _REPLACEMENTS: + result = pat.sub(repl, result) + if not result: + result = '0.0.0' + + # Now look for numeric prefix, and separate it out from + # the rest. + # import pdb; pdb.set_trace() + m = _NUMERIC_PREFIX.match(result) + if not m: + prefix = '0.0.0' + suffix = result + else: + prefix = m.groups()[0].split('.') + prefix = [int(i) for i in prefix] + while len(prefix) < 3: + prefix.append(0) + if len(prefix) == 3: + suffix = result[m.end():] + else: + suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] + prefix = prefix[:3] + prefix = '.'.join([str(i) for i in prefix]) + suffix = suffix.strip() + if suffix: + # import pdb; pdb.set_trace() + # massage the suffix. + for pat, repl in _SUFFIX_REPLACEMENTS: + suffix = pat.sub(repl, suffix) + + if not suffix: + result = prefix + else: + sep = '-' if 'dev' in suffix else '+' + result = prefix + sep + suffix + if not is_semver(result): + result = None + return result + + +def _suggest_normalized_version(s): + """Suggest a normalized version close to the given version string. + + If you have a version string that isn't rational (i.e. NormalizedVersion + doesn't like it) then you might be able to get an equivalent (or close) + rational version from this function. + + This does a number of simple normalizations to the given string, based + on observation of versions currently in use on PyPI. Given a dump of + those version during PyCon 2009, 4287 of them: + - 2312 (53.93%) match NormalizedVersion without change + with the automatic suggestion + - 3474 (81.04%) match when using this suggestion method + + @param s {str} An irrational version string. + @returns A rational version string, or None, if couldn't determine one. + """ + try: + _normalized_key(s) + return s # already rational + except UnsupportedVersionError: + pass + + rs = s.lower() + + # part of this could use maketrans + for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), + ('beta', 'b'), ('rc', 'c'), ('-final', ''), + ('-pre', 'c'), + ('-release', ''), ('.release', ''), ('-stable', ''), + ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), + ('final', '')): + rs = rs.replace(orig, repl) + + # if something ends with dev or pre, we add a 0 + rs = re.sub(r"pre$", r"pre0", rs) + rs = re.sub(r"dev$", r"dev0", rs) + + # if we have something like "b-2" or "a.2" at the end of the + # version, that is probably beta, alpha, etc + # let's remove the dash or dot + rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) + + # 1.0-dev-r371 -> 1.0.dev371 + # 0.1-dev-r79 -> 0.1.dev79 + rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) + + # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 + rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) + + # Clean: v0.3, v1.0 + if rs.startswith('v'): + rs = rs[1:] + + # Clean leading '0's on numbers. + # TODO: unintended side-effect on, e.g., "2003.05.09" + # PyPI stats: 77 (~2%) better + rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) + + # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers + # zero. + # PyPI stats: 245 (7.56%) better + rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) + + # the 'dev-rNNN' tag is a dev tag + rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) + + # clean the - when used as a pre delimiter + rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) + + # a terminal "dev" or "devel" can be changed into ".dev0" + rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) + + # a terminal "dev" can be changed into ".dev0" + rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) + + # a terminal "final" or "stable" can be removed + rs = re.sub(r"(final|stable)$", "", rs) + + # The 'r' and the '-' tags are post release tags + # 0.4a1.r10 -> 0.4a1.post10 + # 0.9.33-17222 -> 0.9.33.post17222 + # 0.9.33-r17222 -> 0.9.33.post17222 + rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) + + # Clean 'r' instead of 'dev' usage: + # 0.9.33+r17222 -> 0.9.33.dev17222 + # 1.0dev123 -> 1.0.dev123 + # 1.0.git123 -> 1.0.dev123 + # 1.0.bzr123 -> 1.0.dev123 + # 0.1a0dev.123 -> 0.1a0.dev123 + # PyPI stats: ~150 (~4%) better + rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) + + # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: + # 0.2.pre1 -> 0.2c1 + # 0.2-c1 -> 0.2c1 + # 1.0preview123 -> 1.0c123 + # PyPI stats: ~21 (0.62%) better + rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) + + # Tcl/Tk uses "px" for their post release markers + rs = re.sub(r"p(\d+)$", r".post\1", rs) + + try: + _normalized_key(rs) + except UnsupportedVersionError: + rs = None + return rs + +# +# Legacy version processing (distribute-compatible) +# + + +_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) +_VERSION_REPLACE = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + '': None, + '.': None, +} + + +def _legacy_key(s): + def get_parts(s): + result = [] + for p in _VERSION_PART.split(s.lower()): + p = _VERSION_REPLACE.get(p, p) + if p: + if '0' <= p[:1] <= '9': + p = p.zfill(8) + else: + p = '*' + p + result.append(p) + result.append('*final') + return result + + result = [] + for p in get_parts(s): + if p.startswith('*'): + if p < '*final': + while result and result[-1] == '*final-': + result.pop() + while result and result[-1] == '00000000': + result.pop() + result.append(p) + return tuple(result) + + +class LegacyVersion(Version): + def parse(self, s): + return _legacy_key(s) + + @property + def is_prerelease(self): + result = False + for x in self._parts: + if (isinstance(x, string_types) and x.startswith('*') and + x < '*final'): + result = True + break + return result + + +class LegacyMatcher(Matcher): + version_class = LegacyVersion + + _operators = dict(Matcher._operators) + _operators['~='] = '_match_compatible' + + numeric_re = re.compile(r'^(\d+(\.\d+)*)') + + def _match_compatible(self, version, constraint, prefix): + if version < constraint: + return False + m = self.numeric_re.match(str(constraint)) + if not m: + logger.warning('Cannot compute compatible match for version %s ' + ' and constraint %s', version, constraint) + return True + s = m.groups()[0] + if '.' in s: + s = s.rsplit('.', 1)[0] + return _match_prefix(version, s) + +# +# Semantic versioning +# + + +_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' + r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' + r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) + + +def is_semver(s): + return _SEMVER_RE.match(s) + + +def _semantic_key(s): + def make_tuple(s, absent): + if s is None: + result = (absent,) + else: + parts = s[1:].split('.') + # We can't compare ints and strings on Python 3, so fudge it + # by zero-filling numeric values so simulate a numeric comparison + result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) + return result + + m = is_semver(s) + if not m: + raise UnsupportedVersionError(s) + groups = m.groups() + major, minor, patch = [int(i) for i in groups[:3]] + # choose the '|' and '*' so that versions sort correctly + pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') + return (major, minor, patch), pre, build + + +class SemanticVersion(Version): + def parse(self, s): + return _semantic_key(s) + + @property + def is_prerelease(self): + return self._parts[1][0] != '|' + + +class SemanticMatcher(Matcher): + version_class = SemanticVersion + + +class VersionScheme(object): + def __init__(self, key, matcher, suggester=None): + self.key = key + self.matcher = matcher + self.suggester = suggester + + def is_valid_version(self, s): + try: + self.matcher.version_class(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_matcher(self, s): + try: + self.matcher(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_constraint_list(self, s): + """ + Used for processing some metadata fields + """ + # See issue #140. Be tolerant of a single trailing comma. + if s.endswith(','): + s = s[:-1] + return self.is_valid_matcher('dummy_name (%s)' % s) + + def suggest(self, s): + if self.suggester is None: + result = None + else: + result = self.suggester(s) + return result + + +_SCHEMES = { + 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, + _suggest_normalized_version), + 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), + 'semantic': VersionScheme(_semantic_key, SemanticMatcher, + _suggest_semantic_version), +} + +_SCHEMES['default'] = _SCHEMES['normalized'] + + +def get_scheme(name): + if name not in _SCHEMES: + raise ValueError('unknown scheme name: %r' % name) + return _SCHEMES[name] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/wheel.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/wheel.py new file mode 100644 index 0000000..4a5a30e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distlib/wheel.py @@ -0,0 +1,1099 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2023 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import base64 +import codecs +import datetime +from email import message_from_file +import hashlib +import json +import logging +import os +import posixpath +import re +import shutil +import sys +import tempfile +import zipfile + +from . import __version__, DistlibException +from .compat import sysconfig, ZipFile, fsdecode, text_type, filter +from .database import InstalledDistribution +from .metadata import Metadata, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME +from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, + cached_property, get_cache_base, read_exports, tempdir, + get_platform) +from .version import NormalizedVersion, UnsupportedVersionError + +logger = logging.getLogger(__name__) + +cache = None # created when needed + +if hasattr(sys, 'pypy_version_info'): # pragma: no cover + IMP_PREFIX = 'pp' +elif sys.platform.startswith('java'): # pragma: no cover + IMP_PREFIX = 'jy' +elif sys.platform == 'cli': # pragma: no cover + IMP_PREFIX = 'ip' +else: + IMP_PREFIX = 'cp' + +VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') +if not VER_SUFFIX: # pragma: no cover + VER_SUFFIX = '%s%s' % sys.version_info[:2] +PYVER = 'py' + VER_SUFFIX +IMPVER = IMP_PREFIX + VER_SUFFIX + +ARCH = get_platform().replace('-', '_').replace('.', '_') + +ABI = sysconfig.get_config_var('SOABI') +if ABI and ABI.startswith('cpython-'): + ABI = ABI.replace('cpython-', 'cp').split('-')[0] +else: + + def _derive_abi(): + parts = ['cp', VER_SUFFIX] + if sysconfig.get_config_var('Py_DEBUG'): + parts.append('d') + if IMP_PREFIX == 'cp': + vi = sys.version_info[:2] + if vi < (3, 8): + wpm = sysconfig.get_config_var('WITH_PYMALLOC') + if wpm is None: + wpm = True + if wpm: + parts.append('m') + if vi < (3, 3): + us = sysconfig.get_config_var('Py_UNICODE_SIZE') + if us == 4 or (us is None and sys.maxunicode == 0x10FFFF): + parts.append('u') + return ''.join(parts) + + ABI = _derive_abi() + del _derive_abi + +FILENAME_RE = re.compile( + r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))? +-(?P\w+\d+(\.\w+\d+)*) +-(?P\w+) +-(?P\w+(\.\w+)*) +\.whl$ +''', re.IGNORECASE | re.VERBOSE) + +NAME_VERSION_RE = re.compile( + r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))?$ +''', re.IGNORECASE | re.VERBOSE) + +SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') +SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') +SHEBANG_PYTHON = b'#!python' +SHEBANG_PYTHONW = b'#!pythonw' + +if os.sep == '/': + to_posix = lambda o: o +else: + to_posix = lambda o: o.replace(os.sep, '/') + +if sys.version_info[0] < 3: + import imp +else: + imp = None + import importlib.machinery + import importlib.util + + +def _get_suffixes(): + if imp: + return [s[0] for s in imp.get_suffixes()] + else: + return importlib.machinery.EXTENSION_SUFFIXES + + +def _load_dynamic(name, path): + # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly + if imp: + return imp.load_dynamic(name, path) + else: + spec = importlib.util.spec_from_file_location(name, path) + module = importlib.util.module_from_spec(spec) + sys.modules[name] = module + spec.loader.exec_module(module) + return module + + +class Mounter(object): + + def __init__(self): + self.impure_wheels = {} + self.libs = {} + + def add(self, pathname, extensions): + self.impure_wheels[pathname] = extensions + self.libs.update(extensions) + + def remove(self, pathname): + extensions = self.impure_wheels.pop(pathname) + for k, v in extensions: + if k in self.libs: + del self.libs[k] + + def find_module(self, fullname, path=None): + if fullname in self.libs: + result = self + else: + result = None + return result + + def load_module(self, fullname): + if fullname in sys.modules: + result = sys.modules[fullname] + else: + if fullname not in self.libs: + raise ImportError('unable to find extension for %s' % fullname) + result = _load_dynamic(fullname, self.libs[fullname]) + result.__loader__ = self + parts = fullname.rsplit('.', 1) + if len(parts) > 1: + result.__package__ = parts[0] + return result + + +_hook = Mounter() + + +class Wheel(object): + """ + Class to build and install from Wheel files (PEP 427). + """ + + wheel_version = (1, 1) + hash_kind = 'sha256' + + def __init__(self, filename=None, sign=False, verify=False): + """ + Initialise an instance using a (valid) filename. + """ + self.sign = sign + self.should_verify = verify + self.buildver = '' + self.pyver = [PYVER] + self.abi = ['none'] + self.arch = ['any'] + self.dirname = os.getcwd() + if filename is None: + self.name = 'dummy' + self.version = '0.1' + self._filename = self.filename + else: + m = NAME_VERSION_RE.match(filename) + if m: + info = m.groupdict('') + self.name = info['nm'] + # Reinstate the local version separator + self.version = info['vn'].replace('_', '-') + self.buildver = info['bn'] + self._filename = self.filename + else: + dirname, filename = os.path.split(filename) + m = FILENAME_RE.match(filename) + if not m: + raise DistlibException('Invalid name or ' + 'filename: %r' % filename) + if dirname: + self.dirname = os.path.abspath(dirname) + self._filename = filename + info = m.groupdict('') + self.name = info['nm'] + self.version = info['vn'] + self.buildver = info['bn'] + self.pyver = info['py'].split('.') + self.abi = info['bi'].split('.') + self.arch = info['ar'].split('.') + + @property + def filename(self): + """ + Build and return a filename from the various components. + """ + if self.buildver: + buildver = '-' + self.buildver + else: + buildver = '' + pyver = '.'.join(self.pyver) + abi = '.'.join(self.abi) + arch = '.'.join(self.arch) + # replace - with _ as a local version separator + version = self.version.replace('-', '_') + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver, + abi, arch) + + @property + def exists(self): + path = os.path.join(self.dirname, self.filename) + return os.path.isfile(path) + + @property + def tags(self): + for pyver in self.pyver: + for abi in self.abi: + for arch in self.arch: + yield pyver, abi, arch + + @cached_property + def metadata(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + wrapper = codecs.getreader('utf-8') + with ZipFile(pathname, 'r') as zf: + self.get_wheel_metadata(zf) + # wv = wheel_metadata['Wheel-Version'].split('.', 1) + # file_version = tuple([int(i) for i in wv]) + # if file_version < (1, 1): + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] + # else: + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] + result = None + for fn in fns: + try: + metadata_filename = posixpath.join(info_dir, fn) + with zf.open(metadata_filename) as bf: + wf = wrapper(bf) + result = Metadata(fileobj=wf) + if result: + break + except KeyError: + pass + if not result: + raise ValueError('Invalid wheel, because metadata is ' + 'missing: looked in %s' % ', '.join(fns)) + return result + + def get_wheel_metadata(self, zf): + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + metadata_filename = posixpath.join(info_dir, 'WHEEL') + with zf.open(metadata_filename) as bf: + wf = codecs.getreader('utf-8')(bf) + message = message_from_file(wf) + return dict(message) + + @cached_property + def info(self): + pathname = os.path.join(self.dirname, self.filename) + with ZipFile(pathname, 'r') as zf: + result = self.get_wheel_metadata(zf) + return result + + def process_shebang(self, data): + m = SHEBANG_RE.match(data) + if m: + end = m.end() + shebang, data_after_shebang = data[:end], data[end:] + # Preserve any arguments after the interpreter + if b'pythonw' in shebang.lower(): + shebang_python = SHEBANG_PYTHONW + else: + shebang_python = SHEBANG_PYTHON + m = SHEBANG_DETAIL_RE.match(shebang) + if m: + args = b' ' + m.groups()[-1] + else: + args = b'' + shebang = shebang_python + args + data = shebang + data_after_shebang + else: + cr = data.find(b'\r') + lf = data.find(b'\n') + if cr < 0 or cr > lf: + term = b'\n' + else: + if data[cr:cr + 2] == b'\r\n': + term = b'\r\n' + else: + term = b'\r' + data = SHEBANG_PYTHON + term + data + return data + + def get_hash(self, data, hash_kind=None): + if hash_kind is None: + hash_kind = self.hash_kind + try: + hasher = getattr(hashlib, hash_kind) + except AttributeError: + raise DistlibException('Unsupported hash algorithm: %r' % + hash_kind) + result = hasher(data).digest() + result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') + return hash_kind, result + + def write_record(self, records, record_path, archive_record_path): + records = list(records) # make a copy, as mutated + records.append((archive_record_path, '', '')) + with CSVWriter(record_path) as writer: + for row in records: + writer.writerow(row) + + def write_records(self, info, libdir, archive_paths): + records = [] + distinfo, info_dir = info + # hasher = getattr(hashlib, self.hash_kind) + for ap, p in archive_paths: + with open(p, 'rb') as f: + data = f.read() + digest = '%s=%s' % self.get_hash(data) + size = os.path.getsize(p) + records.append((ap, digest, size)) + + p = os.path.join(distinfo, 'RECORD') + ap = to_posix(os.path.join(info_dir, 'RECORD')) + self.write_record(records, p, ap) + archive_paths.append((ap, p)) + + def build_zip(self, pathname, archive_paths): + with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: + for ap, p in archive_paths: + logger.debug('Wrote %s to %s in wheel', p, ap) + zf.write(p, ap) + + def build(self, paths, tags=None, wheel_version=None): + """ + Build a wheel from files in specified paths, and use any specified tags + when determining the name of the wheel. + """ + if tags is None: + tags = {} + + libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] + if libkey == 'platlib': + is_pure = 'false' + default_pyver = [IMPVER] + default_abi = [ABI] + default_arch = [ARCH] + else: + is_pure = 'true' + default_pyver = [PYVER] + default_abi = ['none'] + default_arch = ['any'] + + self.pyver = tags.get('pyver', default_pyver) + self.abi = tags.get('abi', default_abi) + self.arch = tags.get('arch', default_arch) + + libdir = paths[libkey] + + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + archive_paths = [] + + # First, stuff which is not in site-packages + for key in ('data', 'headers', 'scripts'): + if key not in paths: + continue + path = paths[key] + if os.path.isdir(path): + for root, dirs, files in os.walk(path): + for fn in files: + p = fsdecode(os.path.join(root, fn)) + rp = os.path.relpath(p, path) + ap = to_posix(os.path.join(data_dir, key, rp)) + archive_paths.append((ap, p)) + if key == 'scripts' and not p.endswith('.exe'): + with open(p, 'rb') as f: + data = f.read() + data = self.process_shebang(data) + with open(p, 'wb') as f: + f.write(data) + + # Now, stuff which is in site-packages, other than the + # distinfo stuff. + path = libdir + distinfo = None + for root, dirs, files in os.walk(path): + if root == path: + # At the top level only, save distinfo for later + # and skip it for now + for i, dn in enumerate(dirs): + dn = fsdecode(dn) + if dn.endswith('.dist-info'): + distinfo = os.path.join(root, dn) + del dirs[i] + break + assert distinfo, '.dist-info directory expected, not found' + + for fn in files: + # comment out next suite to leave .pyc files in + if fsdecode(fn).endswith(('.pyc', '.pyo')): + continue + p = os.path.join(root, fn) + rp = to_posix(os.path.relpath(p, path)) + archive_paths.append((rp, p)) + + # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. + files = os.listdir(distinfo) + for fn in files: + if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): + p = fsdecode(os.path.join(distinfo, fn)) + ap = to_posix(os.path.join(info_dir, fn)) + archive_paths.append((ap, p)) + + wheel_metadata = [ + 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), + 'Generator: distlib %s' % __version__, + 'Root-Is-Purelib: %s' % is_pure, + ] + for pyver, abi, arch in self.tags: + wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) + p = os.path.join(distinfo, 'WHEEL') + with open(p, 'w') as f: + f.write('\n'.join(wheel_metadata)) + ap = to_posix(os.path.join(info_dir, 'WHEEL')) + archive_paths.append((ap, p)) + + # sort the entries by archive path. Not needed by any spec, but it + # keeps the archive listing and RECORD tidier than they would otherwise + # be. Use the number of path segments to keep directory entries together, + # and keep the dist-info stuff at the end. + def sorter(t): + ap = t[0] + n = ap.count('/') + if '.dist-info' in ap: + n += 10000 + return (n, ap) + + archive_paths = sorted(archive_paths, key=sorter) + + # Now, at last, RECORD. + # Paths in here are archive paths - nothing else makes sense. + self.write_records((distinfo, info_dir), libdir, archive_paths) + # Now, ready to build the zip file + pathname = os.path.join(self.dirname, self.filename) + self.build_zip(pathname, archive_paths) + return pathname + + def skip_entry(self, arcname): + """ + Determine whether an archive entry should be skipped when verifying + or installing. + """ + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + # We also skip directories, as they won't be in RECORD + # either. See: + # + # https://github.com/pypa/wheel/issues/294 + # https://github.com/pypa/wheel/issues/287 + # https://github.com/pypa/wheel/pull/289 + # + return arcname.endswith(('/', '/RECORD.jws')) + + def install(self, paths, maker, **kwargs): + """ + Install a wheel to the specified paths. If kwarg ``warner`` is + specified, it should be a callable, which will be called with two + tuples indicating the wheel version of this software and the wheel + version in the file, if there is a discrepancy in the versions. + This can be used to issue any warnings to raise any exceptions. + If kwarg ``lib_only`` is True, only the purelib/platlib files are + installed, and the headers, scripts, data and dist-info metadata are + not written. If kwarg ``bytecode_hashed_invalidation`` is True, written + bytecode will try to use file-hash based invalidation (PEP-552) on + supported interpreter versions (CPython 2.7+). + + The return value is a :class:`InstalledDistribution` instance unless + ``options.lib_only`` is True, in which case the return value is ``None``. + """ + + dry_run = maker.dry_run + warner = kwargs.get('warner') + lib_only = kwargs.get('lib_only', False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', + False) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if (file_version != self.wheel_version) and warner: + warner(self.wheel_version, file_version) + + if message['Root-Is-Purelib'] == 'true': + libdir = paths['purelib'] + else: + libdir = paths['platlib'] + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + data_pfx = posixpath.join(data_dir, '') + info_pfx = posixpath.join(info_dir, '') + script_pfx = posixpath.join(data_dir, 'scripts', '') + + # make a new instance rather than a copy of maker's, + # as we mutate it + fileop = FileOperator(dry_run=dry_run) + fileop.record = True # so we can rollback if needed + + bc = not sys.dont_write_bytecode # Double negatives. Lovely! + + outfiles = [] # for RECORD writing + + # for script copying/shebang processing + workdir = tempfile.mkdtemp() + # set target dir later + # we default add_launchers to False, as the + # Python Launcher should be used instead + maker.source_dir = workdir + maker.target_dir = None + try: + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + if lib_only and u_arcname.startswith((info_pfx, data_pfx)): + logger.debug('lib_only: skipping %s', u_arcname) + continue + is_script = (u_arcname.startswith(script_pfx) + and not u_arcname.endswith('.exe')) + + if u_arcname.startswith(data_pfx): + _, where, rp = u_arcname.split('/', 2) + outfile = os.path.join(paths[where], convert_path(rp)) + else: + # meant for site-packages. + if u_arcname in (wheel_metadata_name, record_name): + continue + outfile = os.path.join(libdir, convert_path(u_arcname)) + if not is_script: + with zf.open(arcname) as bf: + fileop.copy_stream(bf, outfile) + # Issue #147: permission bits aren't preserved. Using + # zf.extract(zinfo, libdir) should have worked, but didn't, + # see https://www.thetopsites.net/article/53834422.shtml + # So ... manually preserve permission bits as given in zinfo + if os.name == 'posix': + # just set the normal permission bits + os.chmod(outfile, + (zinfo.external_attr >> 16) & 0x1FF) + outfiles.append(outfile) + # Double check the digest of the written file + if not dry_run and row[1]: + with open(outfile, 'rb') as bf: + data = bf.read() + _, newdigest = self.get_hash(data, kind) + if newdigest != digest: + raise DistlibException('digest mismatch ' + 'on write for ' + '%s' % outfile) + if bc and outfile.endswith('.py'): + try: + pyc = fileop.byte_compile( + outfile, + hashed_invalidation=bc_hashed_invalidation) + outfiles.append(pyc) + except Exception: + # Don't give up if byte-compilation fails, + # but log it and perhaps warn the user + logger.warning('Byte-compilation failed', + exc_info=True) + else: + fn = os.path.basename(convert_path(arcname)) + workname = os.path.join(workdir, fn) + with zf.open(arcname) as bf: + fileop.copy_stream(bf, workname) + + dn, fn = os.path.split(outfile) + maker.target_dir = dn + filenames = maker.make(fn) + fileop.set_executable_mode(filenames) + outfiles.extend(filenames) + + if lib_only: + logger.debug('lib_only: returning None') + dist = None + else: + # Generate scripts + + # Try to get pydist.json so we can see if there are + # any commands to generate. If this fails (e.g. because + # of a legacy wheel), log a warning but don't give up. + commands = None + file_version = self.info['Wheel-Version'] + if file_version == '1.0': + # Use legacy info + ep = posixpath.join(info_dir, 'entry_points.txt') + try: + with zf.open(ep) as bwf: + epdata = read_exports(bwf) + commands = {} + for key in ('console', 'gui'): + k = '%s_scripts' % key + if k in epdata: + commands['wrap_%s' % key] = d = {} + for v in epdata[k].values(): + s = '%s:%s' % (v.prefix, v.suffix) + if v.flags: + s += ' [%s]' % ','.join(v.flags) + d[v.name] = s + except Exception: + logger.warning('Unable to read legacy script ' + 'metadata, so cannot generate ' + 'scripts') + else: + try: + with zf.open(metadata_name) as bwf: + wf = wrapper(bwf) + commands = json.load(wf).get('extensions') + if commands: + commands = commands.get('python.commands') + except Exception: + logger.warning('Unable to read JSON metadata, so ' + 'cannot generate scripts') + if commands: + console_scripts = commands.get('wrap_console', {}) + gui_scripts = commands.get('wrap_gui', {}) + if console_scripts or gui_scripts: + script_dir = paths.get('scripts', '') + if not os.path.isdir(script_dir): + raise ValueError('Valid script path not ' + 'specified') + maker.target_dir = script_dir + for k, v in console_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script) + fileop.set_executable_mode(filenames) + + if gui_scripts: + options = {'gui': True} + for k, v in gui_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script, options) + fileop.set_executable_mode(filenames) + + p = os.path.join(libdir, info_dir) + dist = InstalledDistribution(p) + + # Write SHARED + paths = dict(paths) # don't change passed in dict + del paths['purelib'] + del paths['platlib'] + paths['lib'] = libdir + p = dist.write_shared_locations(paths, dry_run) + if p: + outfiles.append(p) + + # Write RECORD + dist.write_installed_files(outfiles, paths['prefix'], + dry_run) + return dist + except Exception: # pragma: no cover + logger.exception('installation failed.') + fileop.rollback() + raise + finally: + shutil.rmtree(workdir) + + def _get_dylib_cache(self): + global cache + if cache is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('dylib-cache'), + '%s.%s' % sys.version_info[:2]) + cache = Cache(base) + return cache + + def _get_extensions(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + arcname = posixpath.join(info_dir, 'EXTENSIONS') + wrapper = codecs.getreader('utf-8') + result = [] + with ZipFile(pathname, 'r') as zf: + try: + with zf.open(arcname) as bf: + wf = wrapper(bf) + extensions = json.load(wf) + cache = self._get_dylib_cache() + prefix = cache.prefix_to_dir(pathname) + cache_base = os.path.join(cache.base, prefix) + if not os.path.isdir(cache_base): + os.makedirs(cache_base) + for name, relpath in extensions.items(): + dest = os.path.join(cache_base, convert_path(relpath)) + if not os.path.exists(dest): + extract = True + else: + file_time = os.stat(dest).st_mtime + file_time = datetime.datetime.fromtimestamp( + file_time) + info = zf.getinfo(relpath) + wheel_time = datetime.datetime(*info.date_time) + extract = wheel_time > file_time + if extract: + zf.extract(relpath, cache_base) + result.append((name, dest)) + except KeyError: + pass + return result + + def is_compatible(self): + """ + Determine if a wheel is compatible with the running system. + """ + return is_compatible(self) + + def is_mountable(self): + """ + Determine if a wheel is asserted as mountable by its metadata. + """ + return True # for now - metadata details TBD + + def mount(self, append=False): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if not self.is_compatible(): + msg = 'Wheel %s not compatible with this Python.' % pathname + raise DistlibException(msg) + if not self.is_mountable(): + msg = 'Wheel %s is marked as not mountable.' % pathname + raise DistlibException(msg) + if pathname in sys.path: + logger.debug('%s already in path', pathname) + else: + if append: + sys.path.append(pathname) + else: + sys.path.insert(0, pathname) + extensions = self._get_extensions() + if extensions: + if _hook not in sys.meta_path: + sys.meta_path.append(_hook) + _hook.add(pathname, extensions) + + def unmount(self): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if pathname not in sys.path: + logger.debug('%s not in path', pathname) + else: + sys.path.remove(pathname) + if pathname in _hook.impure_wheels: + _hook.remove(pathname) + if not _hook.impure_wheels: + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + def verify(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + # data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + # metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message_from_file(wf) + # wv = message['Wheel-Version'].split('.', 1) + # file_version = tuple([int(i) for i in wv]) + # TODO version verification + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + # See issue #115: some wheels have .. in their entries, but + # in the filename ... e.g. __main__..py ! So the check is + # updated to look for .. in the directory portions + p = u_arcname.split('/') + if '..' in p: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + def update(self, modifier, dest_dir=None, **kwargs): + """ + Update the contents of a wheel in a generic way. The modifier should + be a callable which expects a dictionary argument: its keys are + archive-entry paths, and its values are absolute filesystem paths + where the contents the corresponding archive entries can be found. The + modifier is free to change the contents of the files pointed to, add + new entries and remove entries, before returning. This method will + extract the entire contents of the wheel to a temporary location, call + the modifier, and then use the passed (and possibly updated) + dictionary to write a new wheel. If ``dest_dir`` is specified, the new + wheel is written there -- otherwise, the original wheel is overwritten. + + The modifier should return True if it updated the wheel, else False. + This method returns the same value the modifier returns. + """ + + def get_version(path_map, info_dir): + version = path = None + key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) + if key not in path_map: + key = '%s/PKG-INFO' % info_dir + if key in path_map: + path = path_map[key] + version = Metadata(path=path).version + return version, path + + def update_version(version, path): + updated = None + try: + NormalizedVersion(version) + i = version.find('-') + if i < 0: + updated = '%s+1' % version + else: + parts = [int(s) for s in version[i + 1:].split('.')] + parts[-1] += 1 + updated = '%s+%s' % (version[:i], '.'.join( + str(i) for i in parts)) + except UnsupportedVersionError: + logger.debug( + 'Cannot update non-compliant (PEP-440) ' + 'version %r', version) + if updated: + md = Metadata(path=path) + md.version = updated + legacy = path.endswith(LEGACY_METADATA_FILENAME) + md.write(path=path, legacy=legacy) + logger.debug('Version updated from %r to %r', version, updated) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + record_name = posixpath.join(info_dir, 'RECORD') + with tempdir() as workdir: + with ZipFile(pathname, 'r') as zf: + path_map = {} + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if u_arcname == record_name: + continue + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + zf.extract(zinfo, workdir) + path = os.path.join(workdir, convert_path(u_arcname)) + path_map[u_arcname] = path + + # Remember the version. + original_version, _ = get_version(path_map, info_dir) + # Files extracted. Call the modifier. + modified = modifier(path_map, **kwargs) + if modified: + # Something changed - need to build a new wheel. + current_version, path = get_version(path_map, info_dir) + if current_version and (current_version == original_version): + # Add or update local version to signify changes. + update_version(current_version, path) + # Decide where the new wheel goes. + if dest_dir is None: + fd, newpath = tempfile.mkstemp(suffix='.whl', + prefix='wheel-update-', + dir=workdir) + os.close(fd) + else: + if not os.path.isdir(dest_dir): + raise DistlibException('Not a directory: %r' % + dest_dir) + newpath = os.path.join(dest_dir, self.filename) + archive_paths = list(path_map.items()) + distinfo = os.path.join(workdir, info_dir) + info = distinfo, info_dir + self.write_records(info, workdir, archive_paths) + self.build_zip(newpath, archive_paths) + if dest_dir is None: + shutil.copyfile(newpath, pathname) + return modified + + +def _get_glibc_version(): + import platform + ver = platform.libc_ver() + result = [] + if ver[0] == 'glibc': + for s in ver[1].split('.'): + result.append(int(s) if s.isdigit() else 0) + result = tuple(result) + return result + + +def compatible_tags(): + """ + Return (pyver, abi, arch) tuples compatible with this Python. + """ + versions = [VER_SUFFIX] + major = VER_SUFFIX[0] + for minor in range(sys.version_info[1] - 1, -1, -1): + versions.append(''.join([major, str(minor)])) + + abis = [] + for suffix in _get_suffixes(): + if suffix.startswith('.abi'): + abis.append(suffix.split('.', 2)[1]) + abis.sort() + if ABI != 'none': + abis.insert(0, ABI) + abis.append('none') + result = [] + + arches = [ARCH] + if sys.platform == 'darwin': + m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) + if m: + name, major, minor, arch = m.groups() + minor = int(minor) + matches = [arch] + if arch in ('i386', 'ppc'): + matches.append('fat') + if arch in ('i386', 'ppc', 'x86_64'): + matches.append('fat3') + if arch in ('ppc64', 'x86_64'): + matches.append('fat64') + if arch in ('i386', 'x86_64'): + matches.append('intel') + if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): + matches.append('universal') + while minor >= 0: + for match in matches: + s = '%s_%s_%s_%s' % (name, major, minor, match) + if s != ARCH: # already there + arches.append(s) + minor -= 1 + + # Most specific - our Python version, ABI and arch + for abi in abis: + for arch in arches: + result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) + # manylinux + if abi != 'none' and sys.platform.startswith('linux'): + arch = arch.replace('linux_', '') + parts = _get_glibc_version() + if len(parts) == 2: + if parts >= (2, 5): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux1_%s' % arch)) + if parts >= (2, 12): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux2010_%s' % arch)) + if parts >= (2, 17): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux2014_%s' % arch)) + result.append( + (''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux_%s_%s_%s' % (parts[0], parts[1], arch))) + + # where no ABI / arch dependency, but IMP_PREFIX dependency + for i, version in enumerate(versions): + result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) + if i == 0: + result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) + + # no IMP_PREFIX, ABI or arch dependency + for i, version in enumerate(versions): + result.append((''.join(('py', version)), 'none', 'any')) + if i == 0: + result.append((''.join(('py', version[0])), 'none', 'any')) + + return set(result) + + +COMPATIBLE_TAGS = compatible_tags() + +del compatible_tags + + +def is_compatible(wheel, tags=None): + if not isinstance(wheel, Wheel): + wheel = Wheel(wheel) # assume it's a filename + result = False + if tags is None: + tags = COMPATIBLE_TAGS + for ver, abi, arch in tags: + if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: + result = True + break + return result diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__init__.py new file mode 100644 index 0000000..7686fe8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__init__.py @@ -0,0 +1,54 @@ +from .distro import ( + NORMALIZED_DISTRO_ID, + NORMALIZED_LSB_ID, + NORMALIZED_OS_ID, + LinuxDistribution, + __version__, + build_number, + codename, + distro_release_attr, + distro_release_info, + id, + info, + like, + linux_distribution, + lsb_release_attr, + lsb_release_info, + major_version, + minor_version, + name, + os_release_attr, + os_release_info, + uname_attr, + uname_info, + version, + version_parts, +) + +__all__ = [ + "NORMALIZED_DISTRO_ID", + "NORMALIZED_LSB_ID", + "NORMALIZED_OS_ID", + "LinuxDistribution", + "build_number", + "codename", + "distro_release_attr", + "distro_release_info", + "id", + "info", + "like", + "linux_distribution", + "lsb_release_attr", + "lsb_release_info", + "major_version", + "minor_version", + "name", + "os_release_attr", + "os_release_info", + "uname_attr", + "uname_info", + "version", + "version_parts", +] + +__version__ = __version__ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__main__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__main__.py new file mode 100644 index 0000000..0c01d5b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__main__.py @@ -0,0 +1,4 @@ +from .distro import main + +if __name__ == "__main__": + main() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6600f0b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..93ada7f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-312.pyc new file mode 100644 index 0000000..8e4232f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/distro/distro.py b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/distro.py new file mode 100644 index 0000000..89e1868 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/distro/distro.py @@ -0,0 +1,1399 @@ +#!/usr/bin/env python +# Copyright 2015,2016,2017 Nir Cohen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The ``distro`` package (``distro`` stands for Linux Distribution) provides +information about the Linux distribution it runs on, such as a reliable +machine-readable distro ID, or version information. + +It is the recommended replacement for Python's original +:py:func:`platform.linux_distribution` function, but it provides much more +functionality. An alternative implementation became necessary because Python +3.5 deprecated this function, and Python 3.8 removed it altogether. Its +predecessor function :py:func:`platform.dist` was already deprecated since +Python 2.6 and removed in Python 3.8. Still, there are many cases in which +access to OS distribution information is needed. See `Python issue 1322 +`_ for more information. +""" + +import argparse +import json +import logging +import os +import re +import shlex +import subprocess +import sys +import warnings +from typing import ( + Any, + Callable, + Dict, + Iterable, + Optional, + Sequence, + TextIO, + Tuple, + Type, +) + +try: + from typing import TypedDict +except ImportError: + # Python 3.7 + TypedDict = dict + +__version__ = "1.8.0" + + +class VersionDict(TypedDict): + major: str + minor: str + build_number: str + + +class InfoDict(TypedDict): + id: str + version: str + version_parts: VersionDict + like: str + codename: str + + +_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") +_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib") +_OS_RELEASE_BASENAME = "os-release" + +#: Translation table for normalizing the "ID" attribute defined in os-release +#: files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as defined in the os-release file, translated to lower case, +#: with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_OS_ID = { + "ol": "oracle", # Oracle Linux + "opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap +} + +#: Translation table for normalizing the "Distributor ID" attribute returned by +#: the lsb_release command, for use by the :func:`distro.id` method. +#: +#: * Key: Value as returned by the lsb_release command, translated to lower +#: case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_LSB_ID = { + "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4 + "enterpriseenterpriseserver": "oracle", # Oracle Linux 5 + "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation + "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server + "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode +} + +#: Translation table for normalizing the distro ID derived from the file name +#: of distro release files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as derived from the file name of a distro release file, +#: translated to lower case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_DISTRO_ID = { + "redhat": "rhel", # RHEL 6.x, 7.x +} + +# Pattern for content of distro release file (reversed) +_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( + r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)" +) + +# Pattern for base file name of distro release file +_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") + +# Base file names to be looked up for if _UNIXCONFDIR is not readable. +_DISTRO_RELEASE_BASENAMES = [ + "SuSE-release", + "arch-release", + "base-release", + "centos-release", + "fedora-release", + "gentoo-release", + "mageia-release", + "mandrake-release", + "mandriva-release", + "mandrivalinux-release", + "manjaro-release", + "oracle-release", + "redhat-release", + "rocky-release", + "sl-release", + "slackware-version", +] + +# Base file names to be ignored when searching for distro release file +_DISTRO_RELEASE_IGNORE_BASENAMES = ( + "debian_version", + "lsb-release", + "oem-release", + _OS_RELEASE_BASENAME, + "system-release", + "plesk-release", + "iredmail-release", +) + + +def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]: + """ + .. deprecated:: 1.6.0 + + :func:`distro.linux_distribution()` is deprecated. It should only be + used as a compatibility shim with Python's + :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`, + :func:`distro.version` and :func:`distro.name` instead. + + Return information about the current OS distribution as a tuple + ``(id_name, version, codename)`` with items as follows: + + * ``id_name``: If *full_distribution_name* is false, the result of + :func:`distro.id`. Otherwise, the result of :func:`distro.name`. + + * ``version``: The result of :func:`distro.version`. + + * ``codename``: The extra item (usually in parentheses) after the + os-release version number, or the result of :func:`distro.codename`. + + The interface of this function is compatible with the original + :py:func:`platform.linux_distribution` function, supporting a subset of + its parameters. + + The data it returns may not exactly be the same, because it uses more data + sources than the original function, and that may lead to different data if + the OS distribution is not consistent across multiple data sources it + provides (there are indeed such distributions ...). + + Another reason for differences is the fact that the :func:`distro.id` + method normalizes the distro ID string to a reliable machine-readable value + for a number of popular OS distributions. + """ + warnings.warn( + "distro.linux_distribution() is deprecated. It should only be used as a " + "compatibility shim with Python's platform.linux_distribution(). Please use " + "distro.id(), distro.version() and distro.name() instead.", + DeprecationWarning, + stacklevel=2, + ) + return _distro.linux_distribution(full_distribution_name) + + +def id() -> str: + """ + Return the distro ID of the current distribution, as a + machine-readable string. + + For a number of OS distributions, the returned distro ID value is + *reliable*, in the sense that it is documented and that it does not change + across releases of the distribution. + + This package maintains the following reliable distro ID values: + + ============== ========================================= + Distro ID Distribution + ============== ========================================= + "ubuntu" Ubuntu + "debian" Debian + "rhel" RedHat Enterprise Linux + "centos" CentOS + "fedora" Fedora + "sles" SUSE Linux Enterprise Server + "opensuse" openSUSE + "amzn" Amazon Linux + "arch" Arch Linux + "buildroot" Buildroot + "cloudlinux" CloudLinux OS + "exherbo" Exherbo Linux + "gentoo" GenToo Linux + "ibm_powerkvm" IBM PowerKVM + "kvmibm" KVM for IBM z Systems + "linuxmint" Linux Mint + "mageia" Mageia + "mandriva" Mandriva Linux + "parallels" Parallels + "pidora" Pidora + "raspbian" Raspbian + "oracle" Oracle Linux (and Oracle Enterprise Linux) + "scientific" Scientific Linux + "slackware" Slackware + "xenserver" XenServer + "openbsd" OpenBSD + "netbsd" NetBSD + "freebsd" FreeBSD + "midnightbsd" MidnightBSD + "rocky" Rocky Linux + "aix" AIX + "guix" Guix System + ============== ========================================= + + If you have a need to get distros for reliable IDs added into this set, + or if you find that the :func:`distro.id` function returns a different + distro ID for one of the listed distros, please create an issue in the + `distro issue tracker`_. + + **Lookup hierarchy and transformations:** + + First, the ID is obtained from the following sources, in the specified + order. The first available and non-empty value is used: + + * the value of the "ID" attribute of the os-release file, + + * the value of the "Distributor ID" attribute returned by the lsb_release + command, + + * the first part of the file name of the distro release file, + + The so determined ID value then passes the following transformations, + before it is returned by this method: + + * it is translated to lower case, + + * blanks (which should not be there anyway) are translated to underscores, + + * a normalization of the ID is performed, based upon + `normalization tables`_. The purpose of this normalization is to ensure + that the ID is as reliable as possible, even across incompatible changes + in the OS distributions. A common reason for an incompatible change is + the addition of an os-release file, or the addition of the lsb_release + command, with ID values that differ from what was previously determined + from the distro release file name. + """ + return _distro.id() + + +def name(pretty: bool = False) -> str: + """ + Return the name of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the name is returned without version or codename. + (e.g. "CentOS Linux") + + If *pretty* is true, the version and codename are appended. + (e.g. "CentOS Linux 7.1.1503 (Core)") + + **Lookup hierarchy:** + + The name is obtained from the following sources, in the specified order. + The first available and non-empty value is used: + + * If *pretty* is false: + + - the value of the "NAME" attribute of the os-release file, + + - the value of the "Distributor ID" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file. + + * If *pretty* is true: + + - the value of the "PRETTY_NAME" attribute of the os-release file, + + - the value of the "Description" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file, appended + with the value of the pretty version ("" and "" + fields) of the distro release file, if available. + """ + return _distro.name(pretty) + + +def version(pretty: bool = False, best: bool = False) -> str: + """ + Return the version of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the version is returned without codename (e.g. + "7.0"). + + If *pretty* is true, the codename in parenthesis is appended, if the + codename is non-empty (e.g. "7.0 (Maipo)"). + + Some distributions provide version numbers with different precisions in + the different sources of distribution information. Examining the different + sources in a fixed priority order does not always yield the most precise + version (e.g. for Debian 8.2, or CentOS 7.1). + + Some other distributions may not provide this kind of information. In these + cases, an empty string would be returned. This behavior can be observed + with rolling releases distributions (e.g. Arch Linux). + + The *best* parameter can be used to control the approach for the returned + version: + + If *best* is false, the first non-empty version number in priority order of + the examined sources is returned. + + If *best* is true, the most precise version number out of all examined + sources is returned. + + **Lookup hierarchy:** + + In all cases, the version number is obtained from the following sources. + If *best* is false, this order represents the priority order: + + * the value of the "VERSION_ID" attribute of the os-release file, + * the value of the "Release" attribute returned by the lsb_release + command, + * the version number parsed from the "" field of the first line + of the distro release file, + * the version number parsed from the "PRETTY_NAME" attribute of the + os-release file, if it follows the format of the distro release files. + * the version number parsed from the "Description" attribute returned by + the lsb_release command, if it follows the format of the distro release + files. + """ + return _distro.version(pretty, best) + + +def version_parts(best: bool = False) -> Tuple[str, str, str]: + """ + Return the version of the current OS distribution as a tuple + ``(major, minor, build_number)`` with items as follows: + + * ``major``: The result of :func:`distro.major_version`. + + * ``minor``: The result of :func:`distro.minor_version`. + + * ``build_number``: The result of :func:`distro.build_number`. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.version_parts(best) + + +def major_version(best: bool = False) -> str: + """ + Return the major version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The major version is the first + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.major_version(best) + + +def minor_version(best: bool = False) -> str: + """ + Return the minor version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The minor version is the second + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.minor_version(best) + + +def build_number(best: bool = False) -> str: + """ + Return the build number of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The build number is the third part + of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.build_number(best) + + +def like() -> str: + """ + Return a space-separated list of distro IDs of distributions that are + closely related to the current OS distribution in regards to packaging + and programming interfaces, for example distributions the current + distribution is a derivative from. + + **Lookup hierarchy:** + + This information item is only provided by the os-release file. + For details, see the description of the "ID_LIKE" attribute in the + `os-release man page + `_. + """ + return _distro.like() + + +def codename() -> str: + """ + Return the codename for the release of the current OS distribution, + as a string. + + If the distribution does not have a codename, an empty string is returned. + + Note that the returned codename is not always really a codename. For + example, openSUSE returns "x86_64". This function does not handle such + cases in any special way and just returns the string it finds, if any. + + **Lookup hierarchy:** + + * the codename within the "VERSION" attribute of the os-release file, if + provided, + + * the value of the "Codename" attribute returned by the lsb_release + command, + + * the value of the "" field of the distro release file. + """ + return _distro.codename() + + +def info(pretty: bool = False, best: bool = False) -> InfoDict: + """ + Return certain machine-readable information items about the current OS + distribution in a dictionary, as shown in the following example: + + .. sourcecode:: python + + { + 'id': 'rhel', + 'version': '7.0', + 'version_parts': { + 'major': '7', + 'minor': '0', + 'build_number': '' + }, + 'like': 'fedora', + 'codename': 'Maipo' + } + + The dictionary structure and keys are always the same, regardless of which + information items are available in the underlying data sources. The values + for the various keys are as follows: + + * ``id``: The result of :func:`distro.id`. + + * ``version``: The result of :func:`distro.version`. + + * ``version_parts -> major``: The result of :func:`distro.major_version`. + + * ``version_parts -> minor``: The result of :func:`distro.minor_version`. + + * ``version_parts -> build_number``: The result of + :func:`distro.build_number`. + + * ``like``: The result of :func:`distro.like`. + + * ``codename``: The result of :func:`distro.codename`. + + For a description of the *pretty* and *best* parameters, see the + :func:`distro.version` method. + """ + return _distro.info(pretty, best) + + +def os_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the os-release file data source of the current OS distribution. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_info() + + +def lsb_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the lsb_release command data source of the current OS distribution. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_info() + + +def distro_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_info() + + +def uname_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + """ + return _distro.uname_info() + + +def os_release_attr(attribute: str) -> str: + """ + Return a single named information item from the os-release file data source + of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_attr(attribute) + + +def lsb_release_attr(attribute: str) -> str: + """ + Return a single named information item from the lsb_release command output + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_attr(attribute) + + +def distro_release_attr(attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_attr(attribute) + + +def uname_attr(attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + """ + return _distro.uname_attr(attribute) + + +try: + from functools import cached_property +except ImportError: + # Python < 3.8 + class cached_property: # type: ignore + """A version of @property which caches the value. On access, it calls the + underlying function and sets the value in `__dict__` so future accesses + will not re-call the property. + """ + + def __init__(self, f: Callable[[Any], Any]) -> None: + self._fname = f.__name__ + self._f = f + + def __get__(self, obj: Any, owner: Type[Any]) -> Any: + assert obj is not None, f"call {self._fname} on an instance" + ret = obj.__dict__[self._fname] = self._f(obj) + return ret + + +class LinuxDistribution: + """ + Provides information about a OS distribution. + + This package creates a private module-global instance of this class with + default initialization arguments, that is used by the + `consolidated accessor functions`_ and `single source accessor functions`_. + By using default initialization arguments, that module-global instance + returns data about the current OS distribution (i.e. the distro this + package runs on). + + Normally, it is not necessary to create additional instances of this class. + However, in situations where control is needed over the exact data sources + that are used, instances of this class can be created with a specific + distro release file, or a specific os-release file, or without invoking the + lsb_release command. + """ + + def __init__( + self, + include_lsb: Optional[bool] = None, + os_release_file: str = "", + distro_release_file: str = "", + include_uname: Optional[bool] = None, + root_dir: Optional[str] = None, + include_oslevel: Optional[bool] = None, + ) -> None: + """ + The initialization method of this class gathers information from the + available data sources, and stores that in private instance attributes. + Subsequent access to the information items uses these private instance + attributes, so that the data sources are read only once. + + Parameters: + + * ``include_lsb`` (bool): Controls whether the + `lsb_release command output`_ is included as a data source. + + If the lsb_release command is not available in the program execution + path, the data source for the lsb_release command will be empty. + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is to be used as a data source. + + An empty string (the default) will cause the default path name to + be used (see `os-release file`_ for details). + + If the specified or defaulted os-release file does not exist, the + data source for the os-release file will be empty. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is to be used as a data source. + + An empty string (the default) will cause a default search algorithm + to be used (see `distro release file`_ for details). + + If the specified distro release file does not exist, or if no default + distro release file can be found, the data source for the distro + release file will be empty. + + * ``include_uname`` (bool): Controls whether uname command output is + included as a data source. If the uname command is not available in + the program execution path the data source for the uname command will + be empty. + + * ``root_dir`` (string): The absolute path to the root directory to use + to find distro-related information files. Note that ``include_*`` + parameters must not be enabled in combination with ``root_dir``. + + * ``include_oslevel`` (bool): Controls whether (AIX) oslevel command + output is included as a data source. If the oslevel command is not + available in the program execution path the data source will be + empty. + + Public instance attributes: + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. + This controls whether the lsb information will be loaded. + + * ``include_uname`` (bool): The result of the ``include_uname`` + parameter. This controls whether the uname information will + be loaded. + + * ``include_oslevel`` (bool): The result of the ``include_oslevel`` + parameter. This controls whether (AIX) oslevel information will be + loaded. + + * ``root_dir`` (string): The result of the ``root_dir`` parameter. + The absolute path to the root directory to use to find distro-related + information files. + + Raises: + + * :py:exc:`ValueError`: Initialization parameters combination is not + supported. + + * :py:exc:`OSError`: Some I/O issue with an os-release file or distro + release file. + + * :py:exc:`UnicodeError`: A data source has unexpected characters or + uses an unexpected encoding. + """ + self.root_dir = root_dir + self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR + self.usr_lib_dir = ( + os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR + ) + + if os_release_file: + self.os_release_file = os_release_file + else: + etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME) + usr_lib_os_release_file = os.path.join( + self.usr_lib_dir, _OS_RELEASE_BASENAME + ) + + # NOTE: The idea is to respect order **and** have it set + # at all times for API backwards compatibility. + if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile( + usr_lib_os_release_file + ): + self.os_release_file = etc_dir_os_release_file + else: + self.os_release_file = usr_lib_os_release_file + + self.distro_release_file = distro_release_file or "" # updated later + + is_root_dir_defined = root_dir is not None + if is_root_dir_defined and (include_lsb or include_uname or include_oslevel): + raise ValueError( + "Including subprocess data sources from specific root_dir is disallowed" + " to prevent false information" + ) + self.include_lsb = ( + include_lsb if include_lsb is not None else not is_root_dir_defined + ) + self.include_uname = ( + include_uname if include_uname is not None else not is_root_dir_defined + ) + self.include_oslevel = ( + include_oslevel if include_oslevel is not None else not is_root_dir_defined + ) + + def __repr__(self) -> str: + """Return repr of all info""" + return ( + "LinuxDistribution(" + "os_release_file={self.os_release_file!r}, " + "distro_release_file={self.distro_release_file!r}, " + "include_lsb={self.include_lsb!r}, " + "include_uname={self.include_uname!r}, " + "include_oslevel={self.include_oslevel!r}, " + "root_dir={self.root_dir!r}, " + "_os_release_info={self._os_release_info!r}, " + "_lsb_release_info={self._lsb_release_info!r}, " + "_distro_release_info={self._distro_release_info!r}, " + "_uname_info={self._uname_info!r}, " + "_oslevel_info={self._oslevel_info!r})".format(self=self) + ) + + def linux_distribution( + self, full_distribution_name: bool = True + ) -> Tuple[str, str, str]: + """ + Return information about the OS distribution that is compatible + with Python's :func:`platform.linux_distribution`, supporting a subset + of its parameters. + + For details, see :func:`distro.linux_distribution`. + """ + return ( + self.name() if full_distribution_name else self.id(), + self.version(), + self._os_release_info.get("release_codename") or self.codename(), + ) + + def id(self) -> str: + """Return the distro ID of the OS distribution, as a string. + + For details, see :func:`distro.id`. + """ + + def normalize(distro_id: str, table: Dict[str, str]) -> str: + distro_id = distro_id.lower().replace(" ", "_") + return table.get(distro_id, distro_id) + + distro_id = self.os_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_OS_ID) + + distro_id = self.lsb_release_attr("distributor_id") + if distro_id: + return normalize(distro_id, NORMALIZED_LSB_ID) + + distro_id = self.distro_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + distro_id = self.uname_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + return "" + + def name(self, pretty: bool = False) -> str: + """ + Return the name of the OS distribution, as a string. + + For details, see :func:`distro.name`. + """ + name = ( + self.os_release_attr("name") + or self.lsb_release_attr("distributor_id") + or self.distro_release_attr("name") + or self.uname_attr("name") + ) + if pretty: + name = self.os_release_attr("pretty_name") or self.lsb_release_attr( + "description" + ) + if not name: + name = self.distro_release_attr("name") or self.uname_attr("name") + version = self.version(pretty=True) + if version: + name = f"{name} {version}" + return name or "" + + def version(self, pretty: bool = False, best: bool = False) -> str: + """ + Return the version of the OS distribution, as a string. + + For details, see :func:`distro.version`. + """ + versions = [ + self.os_release_attr("version_id"), + self.lsb_release_attr("release"), + self.distro_release_attr("version_id"), + self._parse_distro_release_content(self.os_release_attr("pretty_name")).get( + "version_id", "" + ), + self._parse_distro_release_content( + self.lsb_release_attr("description") + ).get("version_id", ""), + self.uname_attr("release"), + ] + if self.uname_attr("id").startswith("aix"): + # On AIX platforms, prefer oslevel command output. + versions.insert(0, self.oslevel_info()) + elif self.id() == "debian" or "debian" in self.like().split(): + # On Debian-like, add debian_version file content to candidates list. + versions.append(self._debian_version) + version = "" + if best: + # This algorithm uses the last version in priority order that has + # the best precision. If the versions are not in conflict, that + # does not matter; otherwise, using the last one instead of the + # first one might be considered a surprise. + for v in versions: + if v.count(".") > version.count(".") or version == "": + version = v + else: + for v in versions: + if v != "": + version = v + break + if pretty and version and self.codename(): + version = f"{version} ({self.codename()})" + return version + + def version_parts(self, best: bool = False) -> Tuple[str, str, str]: + """ + Return the version of the OS distribution, as a tuple of version + numbers. + + For details, see :func:`distro.version_parts`. + """ + version_str = self.version(best=best) + if version_str: + version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?") + matches = version_regex.match(version_str) + if matches: + major, minor, build_number = matches.groups() + return major, minor or "", build_number or "" + return "", "", "" + + def major_version(self, best: bool = False) -> str: + """ + Return the major version number of the current distribution. + + For details, see :func:`distro.major_version`. + """ + return self.version_parts(best)[0] + + def minor_version(self, best: bool = False) -> str: + """ + Return the minor version number of the current distribution. + + For details, see :func:`distro.minor_version`. + """ + return self.version_parts(best)[1] + + def build_number(self, best: bool = False) -> str: + """ + Return the build number of the current distribution. + + For details, see :func:`distro.build_number`. + """ + return self.version_parts(best)[2] + + def like(self) -> str: + """ + Return the IDs of distributions that are like the OS distribution. + + For details, see :func:`distro.like`. + """ + return self.os_release_attr("id_like") or "" + + def codename(self) -> str: + """ + Return the codename of the OS distribution. + + For details, see :func:`distro.codename`. + """ + try: + # Handle os_release specially since distros might purposefully set + # this to empty string to have no codename + return self._os_release_info["codename"] + except KeyError: + return ( + self.lsb_release_attr("codename") + or self.distro_release_attr("codename") + or "" + ) + + def info(self, pretty: bool = False, best: bool = False) -> InfoDict: + """ + Return certain machine-readable information about the OS + distribution. + + For details, see :func:`distro.info`. + """ + return dict( + id=self.id(), + version=self.version(pretty, best), + version_parts=dict( + major=self.major_version(best), + minor=self.minor_version(best), + build_number=self.build_number(best), + ), + like=self.like(), + codename=self.codename(), + ) + + def os_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the os-release file data source of the OS distribution. + + For details, see :func:`distro.os_release_info`. + """ + return self._os_release_info + + def lsb_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the lsb_release command data source of the OS + distribution. + + For details, see :func:`distro.lsb_release_info`. + """ + return self._lsb_release_info + + def distro_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the distro release file data source of the OS + distribution. + + For details, see :func:`distro.distro_release_info`. + """ + return self._distro_release_info + + def uname_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the uname command data source of the OS distribution. + + For details, see :func:`distro.uname_info`. + """ + return self._uname_info + + def oslevel_info(self) -> str: + """ + Return AIX' oslevel command output. + """ + return self._oslevel_info + + def os_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the os-release file data + source of the OS distribution. + + For details, see :func:`distro.os_release_attr`. + """ + return self._os_release_info.get(attribute, "") + + def lsb_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the lsb_release command + output data source of the OS distribution. + + For details, see :func:`distro.lsb_release_attr`. + """ + return self._lsb_release_info.get(attribute, "") + + def distro_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the OS distribution. + + For details, see :func:`distro.distro_release_attr`. + """ + return self._distro_release_info.get(attribute, "") + + def uname_attr(self, attribute: str) -> str: + """ + Return a single named information item from the uname command + output data source of the OS distribution. + + For details, see :func:`distro.uname_attr`. + """ + return self._uname_info.get(attribute, "") + + @cached_property + def _os_release_info(self) -> Dict[str, str]: + """ + Get the information items from the specified os-release file. + + Returns: + A dictionary containing all information items. + """ + if os.path.isfile(self.os_release_file): + with open(self.os_release_file, encoding="utf-8") as release_file: + return self._parse_os_release_content(release_file) + return {} + + @staticmethod + def _parse_os_release_content(lines: TextIO) -> Dict[str, str]: + """ + Parse the lines of an os-release file. + + Parameters: + + * lines: Iterable through the lines in the os-release file. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + lexer = shlex.shlex(lines, posix=True) + lexer.whitespace_split = True + + tokens = list(lexer) + for token in tokens: + # At this point, all shell-like parsing has been done (i.e. + # comments processed, quotes and backslash escape sequences + # processed, multi-line values assembled, trailing newlines + # stripped, etc.), so the tokens are now either: + # * variable assignments: var=value + # * commands or their arguments (not allowed in os-release) + # Ignore any tokens that are not variable assignments + if "=" in token: + k, v = token.split("=", 1) + props[k.lower()] = v + + if "version" in props: + # extract release codename (if any) from version attribute + match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"]) + if match: + release_codename = match.group(1) or match.group(2) + props["codename"] = props["release_codename"] = release_codename + + if "version_codename" in props: + # os-release added a version_codename field. Use that in + # preference to anything else Note that some distros purposefully + # do not have code names. They should be setting + # version_codename="" + props["codename"] = props["version_codename"] + elif "ubuntu_codename" in props: + # Same as above but a non-standard field name used on older Ubuntus + props["codename"] = props["ubuntu_codename"] + + return props + + @cached_property + def _lsb_release_info(self) -> Dict[str, str]: + """ + Get the information items from the lsb_release command output. + + Returns: + A dictionary containing all information items. + """ + if not self.include_lsb: + return {} + try: + cmd = ("lsb_release", "-a") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + # Command not found or lsb_release returned error + except (OSError, subprocess.CalledProcessError): + return {} + content = self._to_str(stdout).splitlines() + return self._parse_lsb_release_content(content) + + @staticmethod + def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]: + """ + Parse the output of the lsb_release command. + + Parameters: + + * lines: Iterable through the lines of the lsb_release output. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + for line in lines: + kv = line.strip("\n").split(":", 1) + if len(kv) != 2: + # Ignore lines without colon. + continue + k, v = kv + props.update({k.replace(" ", "_").lower(): v.strip()}) + return props + + @cached_property + def _uname_info(self) -> Dict[str, str]: + if not self.include_uname: + return {} + try: + cmd = ("uname", "-rs") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + except OSError: + return {} + content = self._to_str(stdout).splitlines() + return self._parse_uname_content(content) + + @cached_property + def _oslevel_info(self) -> str: + if not self.include_oslevel: + return "" + try: + stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL) + except (OSError, subprocess.CalledProcessError): + return "" + return self._to_str(stdout).strip() + + @cached_property + def _debian_version(self) -> str: + try: + with open( + os.path.join(self.etc_dir, "debian_version"), encoding="ascii" + ) as fp: + return fp.readline().rstrip() + except FileNotFoundError: + return "" + + @staticmethod + def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]: + if not lines: + return {} + props = {} + match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) + if match: + name, version = match.groups() + + # This is to prevent the Linux kernel version from + # appearing as the 'best' version on otherwise + # identifiable distributions. + if name == "Linux": + return {} + props["id"] = name.lower() + props["name"] = name + props["release"] = version + return props + + @staticmethod + def _to_str(bytestring: bytes) -> str: + encoding = sys.getfilesystemencoding() + return bytestring.decode(encoding) + + @cached_property + def _distro_release_info(self) -> Dict[str, str]: + """ + Get the information items from the specified distro release file. + + Returns: + A dictionary containing all information items. + """ + if self.distro_release_file: + # If it was specified, we use it and parse what we can, even if + # its file name or content does not match the expected pattern. + distro_info = self._parse_distro_release_file(self.distro_release_file) + basename = os.path.basename(self.distro_release_file) + # The file name pattern for user-specified distro release files + # is somewhat more tolerant (compared to when searching for the + # file), because we want to use what was specified as best as + # possible. + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + else: + try: + basenames = [ + basename + for basename in os.listdir(self.etc_dir) + if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES + and os.path.isfile(os.path.join(self.etc_dir, basename)) + ] + # We sort for repeatability in cases where there are multiple + # distro specific files; e.g. CentOS, Oracle, Enterprise all + # containing `redhat-release` on top of their own. + basenames.sort() + except OSError: + # This may occur when /etc is not readable but we can't be + # sure about the *-release files. Check common entries of + # /etc for information. If they turn out to not be there the + # error is handled in `_parse_distro_release_file()`. + basenames = _DISTRO_RELEASE_BASENAMES + for basename in basenames: + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if match is None: + continue + filepath = os.path.join(self.etc_dir, basename) + distro_info = self._parse_distro_release_file(filepath) + # The name is always present if the pattern matches. + if "name" not in distro_info: + continue + self.distro_release_file = filepath + break + else: # the loop didn't "break": no candidate. + return {} + + if match is not None: + distro_info["id"] = match.group(1) + + # CloudLinux < 7: manually enrich info with proper id. + if "cloudlinux" in distro_info.get("name", "").lower(): + distro_info["id"] = "cloudlinux" + + return distro_info + + def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]: + """ + Parse a distro release file. + + Parameters: + + * filepath: Path name of the distro release file. + + Returns: + A dictionary containing all information items. + """ + try: + with open(filepath, encoding="utf-8") as fp: + # Only parse the first line. For instance, on SLES there + # are multiple lines. We don't want them... + return self._parse_distro_release_content(fp.readline()) + except OSError: + # Ignore not being able to read a specific, seemingly version + # related file. + # See https://github.com/python-distro/distro/issues/162 + return {} + + @staticmethod + def _parse_distro_release_content(line: str) -> Dict[str, str]: + """ + Parse a line from a distro release file. + + Parameters: + * line: Line from the distro release file. Must be a unicode string + or a UTF-8 encoded byte string. + + Returns: + A dictionary containing all information items. + """ + matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1]) + distro_info = {} + if matches: + # regexp ensures non-None + distro_info["name"] = matches.group(3)[::-1] + if matches.group(2): + distro_info["version_id"] = matches.group(2)[::-1] + if matches.group(1): + distro_info["codename"] = matches.group(1)[::-1] + elif line: + distro_info["name"] = line.strip() + return distro_info + + +_distro = LinuxDistribution() + + +def main() -> None: + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + logger.addHandler(logging.StreamHandler(sys.stdout)) + + parser = argparse.ArgumentParser(description="OS distro info tool") + parser.add_argument( + "--json", "-j", help="Output in machine readable format", action="store_true" + ) + + parser.add_argument( + "--root-dir", + "-r", + type=str, + dest="root_dir", + help="Path to the root filesystem directory (defaults to /)", + ) + + args = parser.parse_args() + + if args.root_dir: + dist = LinuxDistribution( + include_lsb=False, + include_uname=False, + include_oslevel=False, + root_dir=args.root_dir, + ) + else: + dist = _distro + + if args.json: + logger.info(json.dumps(dist.info(), indent=4, sort_keys=True)) + else: + logger.info("Name: %s", dist.name(pretty=True)) + distribution_version = dist.version(pretty=True) + logger.info("Version: %s", distribution_version) + distribution_codename = dist.codename() + logger.info("Codename: %s", distribution_codename) + + +if __name__ == "__main__": + main() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__init__.py new file mode 100644 index 0000000..a40eeaf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__init__.py @@ -0,0 +1,44 @@ +from .package_data import __version__ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain + +__all__ = [ + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a85665c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-312.pyc new file mode 100644 index 0000000..5acef54 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..aa8f548 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/core.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000..7dbf807 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/core.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc new file mode 100644 index 0000000..7a2d0e5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc new file mode 100644 index 0000000..0775930 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc new file mode 100644 index 0000000..aafd4cd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc new file mode 100644 index 0000000..6497d9a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/codec.py b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/codec.py new file mode 100644 index 0000000..1ca9ba6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/codec.py @@ -0,0 +1,112 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re +from typing import Tuple, Optional + +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return '', 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return "", 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_str = '.'.join(result) + trailing_dot # type: ignore + size += len(trailing_dot) + return result_str, size + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return ('', 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = '.'.join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def getregentry() -> codecs.CodecInfo: + # Compatibility as a search_function for codecs.register() + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, # type: ignore + decode=Codec().decode, # type: ignore + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/compat.py b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/compat.py new file mode 100644 index 0000000..786e6bd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/compat.py @@ -0,0 +1,13 @@ +from .core import * +from .codec import * +from typing import Any, Union + +def ToASCII(label: str) -> bytes: + return encode(label) + +def ToUnicode(label: Union[bytes, bytearray]) -> str: + return decode(label) + +def nameprep(s: Any) -> None: + raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') + diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/core.py b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/core.py new file mode 100644 index 0000000..4f30037 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/core.py @@ -0,0 +1,400 @@ +from . import idnadata +import bisect +import unicodedata +import re +from typing import Union, Optional +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError('Unknown character in unicodedata') + return v + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s: str) -> bytes: + return s.encode('punycode') + +def _unot(s: int) -> str: + return 'U+{:04X}'.format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = None # type: Optional[str] + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('L'), ord('D')]: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('R'), ord('D')]: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == '\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + except ValueError: + raise IDNAError('Unknown codepoint adjacent to joiner {} at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode('ascii') + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + return label_bytes + except UnicodeEncodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = str(label) + check_label(label) + label_bytes = _punycode(label) + label_bytes = _alabel_prefix + label_bytes + + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = label + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix):] + if not label_bytes: + raise IDNAError('Malformed A-label, no Punycode eligible content found') + if label_bytes.decode('ascii')[-1] == '-': + raise IDNAError('A-label must not end with a hyphen') + else: + check_label(label_bytes) + return label_bytes.decode('ascii') + + try: + label = label_bytes.decode('punycode') + except UnicodeError: + raise IDNAError('Invalid A-label') + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = '' + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, 'Z')) - 1] + status = uts46row[1] + replacement = None # type: Optional[str] + if len(uts46row) == 3: + replacement = uts46row[2] # type: ignore + if (status == 'V' or + (status == 'D' and not transitional) or + (status == '3' and not std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == 'M' or + (status == '3' and not std3_rules) or + (status == 'D' and transitional)): + output += replacement + elif status != 'I': + raise IndexError() + except IndexError: + raise InvalidCodepoint( + 'Codepoint {} not allowed at position {} in {}'.format( + _unot(code_point), pos + 1, repr(domain))) + + return unicodedata.normalize('NFC', output) + + +def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes: + if isinstance(s, (bytes, bytearray)): + try: + s = s.decode('ascii') + except UnicodeDecodeError: + raise IDNAError('should pass a unicode string to the function rather than a byte string.') + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str: + try: + if isinstance(s, (bytes, bytearray)): + s = s.decode('ascii') + except UnicodeDecodeError: + raise IDNAError('Invalid ASCII in A-label') + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split('.') + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append('') + return '.'.join(result) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/idnadata.py b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/idnadata.py new file mode 100644 index 0000000..67db462 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/idnadata.py @@ -0,0 +1,2151 @@ +# This file is automatically generated by tools/idna-data + +__version__ = '15.0.0' +scripts = { + 'Greek': ( + 0x37000000374, + 0x37500000378, + 0x37a0000037e, + 0x37f00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038b, + 0x38c0000038d, + 0x38e000003a2, + 0x3a3000003e2, + 0x3f000000400, + 0x1d2600001d2b, + 0x1d5d00001d62, + 0x1d6600001d6b, + 0x1dbf00001dc0, + 0x1f0000001f16, + 0x1f1800001f1e, + 0x1f2000001f46, + 0x1f4800001f4e, + 0x1f5000001f58, + 0x1f5900001f5a, + 0x1f5b00001f5c, + 0x1f5d00001f5e, + 0x1f5f00001f7e, + 0x1f8000001fb5, + 0x1fb600001fc5, + 0x1fc600001fd4, + 0x1fd600001fdc, + 0x1fdd00001ff0, + 0x1ff200001ff5, + 0x1ff600001fff, + 0x212600002127, + 0xab650000ab66, + 0x101400001018f, + 0x101a0000101a1, + 0x1d2000001d246, + ), + 'Han': ( + 0x2e8000002e9a, + 0x2e9b00002ef4, + 0x2f0000002fd6, + 0x300500003006, + 0x300700003008, + 0x30210000302a, + 0x30380000303c, + 0x340000004dc0, + 0x4e000000a000, + 0xf9000000fa6e, + 0xfa700000fada, + 0x16fe200016fe4, + 0x16ff000016ff2, + 0x200000002a6e0, + 0x2a7000002b73a, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2f8000002fa1e, + 0x300000003134b, + 0x31350000323b0, + ), + 'Hebrew': ( + 0x591000005c8, + 0x5d0000005eb, + 0x5ef000005f5, + 0xfb1d0000fb37, + 0xfb380000fb3d, + 0xfb3e0000fb3f, + 0xfb400000fb42, + 0xfb430000fb45, + 0xfb460000fb50, + ), + 'Hiragana': ( + 0x304100003097, + 0x309d000030a0, + 0x1b0010001b120, + 0x1b1320001b133, + 0x1b1500001b153, + 0x1f2000001f201, + ), + 'Katakana': ( + 0x30a1000030fb, + 0x30fd00003100, + 0x31f000003200, + 0x32d0000032ff, + 0x330000003358, + 0xff660000ff70, + 0xff710000ff9e, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b001, + 0x1b1200001b123, + 0x1b1550001b156, + 0x1b1640001b168, + ), +} +joining_types = { + 0x600: 85, + 0x601: 85, + 0x602: 85, + 0x603: 85, + 0x604: 85, + 0x605: 85, + 0x608: 85, + 0x60b: 85, + 0x620: 68, + 0x621: 85, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62a: 68, + 0x62b: 68, + 0x62c: 68, + 0x62d: 68, + 0x62e: 68, + 0x62f: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63a: 68, + 0x63b: 68, + 0x63c: 68, + 0x63d: 68, + 0x63e: 68, + 0x63f: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64a: 68, + 0x66e: 68, + 0x66f: 68, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x674: 85, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67a: 68, + 0x67b: 68, + 0x67c: 68, + 0x67d: 68, + 0x67e: 68, + 0x67f: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68a: 82, + 0x68b: 82, + 0x68c: 82, + 0x68d: 82, + 0x68e: 82, + 0x68f: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69a: 68, + 0x69b: 68, + 0x69c: 68, + 0x69d: 68, + 0x69e: 68, + 0x69f: 68, + 0x6a0: 68, + 0x6a1: 68, + 0x6a2: 68, + 0x6a3: 68, + 0x6a4: 68, + 0x6a5: 68, + 0x6a6: 68, + 0x6a7: 68, + 0x6a8: 68, + 0x6a9: 68, + 0x6aa: 68, + 0x6ab: 68, + 0x6ac: 68, + 0x6ad: 68, + 0x6ae: 68, + 0x6af: 68, + 0x6b0: 68, + 0x6b1: 68, + 0x6b2: 68, + 0x6b3: 68, + 0x6b4: 68, + 0x6b5: 68, + 0x6b6: 68, + 0x6b7: 68, + 0x6b8: 68, + 0x6b9: 68, + 0x6ba: 68, + 0x6bb: 68, + 0x6bc: 68, + 0x6bd: 68, + 0x6be: 68, + 0x6bf: 68, + 0x6c0: 82, + 0x6c1: 68, + 0x6c2: 68, + 0x6c3: 82, + 0x6c4: 82, + 0x6c5: 82, + 0x6c6: 82, + 0x6c7: 82, + 0x6c8: 82, + 0x6c9: 82, + 0x6ca: 82, + 0x6cb: 82, + 0x6cc: 68, + 0x6cd: 82, + 0x6ce: 68, + 0x6cf: 82, + 0x6d0: 68, + 0x6d1: 68, + 0x6d2: 82, + 0x6d3: 82, + 0x6d5: 82, + 0x6dd: 85, + 0x6ee: 82, + 0x6ef: 82, + 0x6fa: 68, + 0x6fb: 68, + 0x6fc: 68, + 0x6ff: 68, + 0x70f: 84, + 0x710: 82, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71a: 68, + 0x71b: 68, + 0x71c: 68, + 0x71d: 68, + 0x71e: 82, + 0x71f: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72a: 82, + 0x72b: 68, + 0x72c: 82, + 0x72d: 68, + 0x72e: 68, + 0x72f: 82, + 0x74d: 82, + 0x74e: 68, + 0x74f: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75a: 82, + 0x75b: 82, + 0x75c: 68, + 0x75d: 68, + 0x75e: 68, + 0x75f: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76a: 68, + 0x76b: 82, + 0x76c: 82, + 0x76d: 68, + 0x76e: 68, + 0x76f: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77a: 68, + 0x77b: 68, + 0x77c: 68, + 0x77d: 68, + 0x77e: 68, + 0x77f: 68, + 0x7ca: 68, + 0x7cb: 68, + 0x7cc: 68, + 0x7cd: 68, + 0x7ce: 68, + 0x7cf: 68, + 0x7d0: 68, + 0x7d1: 68, + 0x7d2: 68, + 0x7d3: 68, + 0x7d4: 68, + 0x7d5: 68, + 0x7d6: 68, + 0x7d7: 68, + 0x7d8: 68, + 0x7d9: 68, + 0x7da: 68, + 0x7db: 68, + 0x7dc: 68, + 0x7dd: 68, + 0x7de: 68, + 0x7df: 68, + 0x7e0: 68, + 0x7e1: 68, + 0x7e2: 68, + 0x7e3: 68, + 0x7e4: 68, + 0x7e5: 68, + 0x7e6: 68, + 0x7e7: 68, + 0x7e8: 68, + 0x7e9: 68, + 0x7ea: 68, + 0x7fa: 67, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84a: 68, + 0x84b: 68, + 0x84c: 68, + 0x84d: 68, + 0x84e: 68, + 0x84f: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x860: 68, + 0x861: 85, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x866: 85, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86a: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87a: 82, + 0x87b: 82, + 0x87c: 82, + 0x87d: 82, + 0x87e: 82, + 0x87f: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x887: 85, + 0x888: 85, + 0x889: 68, + 0x88a: 68, + 0x88b: 68, + 0x88c: 68, + 0x88d: 68, + 0x88e: 82, + 0x890: 85, + 0x891: 85, + 0x8a0: 68, + 0x8a1: 68, + 0x8a2: 68, + 0x8a3: 68, + 0x8a4: 68, + 0x8a5: 68, + 0x8a6: 68, + 0x8a7: 68, + 0x8a8: 68, + 0x8a9: 68, + 0x8aa: 82, + 0x8ab: 82, + 0x8ac: 82, + 0x8ad: 85, + 0x8ae: 82, + 0x8af: 68, + 0x8b0: 68, + 0x8b1: 82, + 0x8b2: 82, + 0x8b3: 68, + 0x8b4: 68, + 0x8b5: 68, + 0x8b6: 68, + 0x8b7: 68, + 0x8b8: 68, + 0x8b9: 82, + 0x8ba: 68, + 0x8bb: 68, + 0x8bc: 68, + 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, + 0x8c8: 68, + 0x8e2: 85, + 0x1806: 85, + 0x1807: 68, + 0x180a: 67, + 0x180e: 85, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182a: 68, + 0x182b: 68, + 0x182c: 68, + 0x182d: 68, + 0x182e: 68, + 0x182f: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183a: 68, + 0x183b: 68, + 0x183c: 68, + 0x183d: 68, + 0x183e: 68, + 0x183f: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184a: 68, + 0x184b: 68, + 0x184c: 68, + 0x184d: 68, + 0x184e: 68, + 0x184f: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185a: 68, + 0x185b: 68, + 0x185c: 68, + 0x185d: 68, + 0x185e: 68, + 0x185f: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186a: 68, + 0x186b: 68, + 0x186c: 68, + 0x186d: 68, + 0x186e: 68, + 0x186f: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1880: 85, + 0x1881: 85, + 0x1882: 85, + 0x1883: 85, + 0x1884: 85, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188a: 68, + 0x188b: 68, + 0x188c: 68, + 0x188d: 68, + 0x188e: 68, + 0x188f: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189a: 68, + 0x189b: 68, + 0x189c: 68, + 0x189d: 68, + 0x189e: 68, + 0x189f: 68, + 0x18a0: 68, + 0x18a1: 68, + 0x18a2: 68, + 0x18a3: 68, + 0x18a4: 68, + 0x18a5: 68, + 0x18a6: 68, + 0x18a7: 68, + 0x18a8: 68, + 0x18aa: 68, + 0x200c: 85, + 0x200d: 67, + 0x202f: 85, + 0x2066: 85, + 0x2067: 85, + 0x2068: 85, + 0x2069: 85, + 0xa840: 68, + 0xa841: 68, + 0xa842: 68, + 0xa843: 68, + 0xa844: 68, + 0xa845: 68, + 0xa846: 68, + 0xa847: 68, + 0xa848: 68, + 0xa849: 68, + 0xa84a: 68, + 0xa84b: 68, + 0xa84c: 68, + 0xa84d: 68, + 0xa84e: 68, + 0xa84f: 68, + 0xa850: 68, + 0xa851: 68, + 0xa852: 68, + 0xa853: 68, + 0xa854: 68, + 0xa855: 68, + 0xa856: 68, + 0xa857: 68, + 0xa858: 68, + 0xa859: 68, + 0xa85a: 68, + 0xa85b: 68, + 0xa85c: 68, + 0xa85d: 68, + 0xa85e: 68, + 0xa85f: 68, + 0xa860: 68, + 0xa861: 68, + 0xa862: 68, + 0xa863: 68, + 0xa864: 68, + 0xa865: 68, + 0xa866: 68, + 0xa867: 68, + 0xa868: 68, + 0xa869: 68, + 0xa86a: 68, + 0xa86b: 68, + 0xa86c: 68, + 0xa86d: 68, + 0xa86e: 68, + 0xa86f: 68, + 0xa870: 68, + 0xa871: 68, + 0xa872: 76, + 0xa873: 85, + 0x10ac0: 68, + 0x10ac1: 68, + 0x10ac2: 68, + 0x10ac3: 68, + 0x10ac4: 68, + 0x10ac5: 82, + 0x10ac6: 85, + 0x10ac7: 82, + 0x10ac8: 85, + 0x10ac9: 82, + 0x10aca: 82, + 0x10acb: 85, + 0x10acc: 85, + 0x10acd: 76, + 0x10ace: 82, + 0x10acf: 82, + 0x10ad0: 82, + 0x10ad1: 82, + 0x10ad2: 82, + 0x10ad3: 68, + 0x10ad4: 68, + 0x10ad5: 68, + 0x10ad6: 68, + 0x10ad7: 76, + 0x10ad8: 68, + 0x10ad9: 68, + 0x10ada: 68, + 0x10adb: 68, + 0x10adc: 68, + 0x10add: 82, + 0x10ade: 68, + 0x10adf: 68, + 0x10ae0: 68, + 0x10ae1: 82, + 0x10ae2: 85, + 0x10ae3: 85, + 0x10ae4: 82, + 0x10aeb: 68, + 0x10aec: 68, + 0x10aed: 68, + 0x10aee: 68, + 0x10aef: 82, + 0x10b80: 68, + 0x10b81: 82, + 0x10b82: 68, + 0x10b83: 82, + 0x10b84: 82, + 0x10b85: 82, + 0x10b86: 68, + 0x10b87: 68, + 0x10b88: 68, + 0x10b89: 82, + 0x10b8a: 68, + 0x10b8b: 68, + 0x10b8c: 82, + 0x10b8d: 68, + 0x10b8e: 82, + 0x10b8f: 82, + 0x10b90: 68, + 0x10b91: 82, + 0x10ba9: 82, + 0x10baa: 82, + 0x10bab: 82, + 0x10bac: 82, + 0x10bad: 68, + 0x10bae: 68, + 0x10baf: 85, + 0x10d00: 76, + 0x10d01: 68, + 0x10d02: 68, + 0x10d03: 68, + 0x10d04: 68, + 0x10d05: 68, + 0x10d06: 68, + 0x10d07: 68, + 0x10d08: 68, + 0x10d09: 68, + 0x10d0a: 68, + 0x10d0b: 68, + 0x10d0c: 68, + 0x10d0d: 68, + 0x10d0e: 68, + 0x10d0f: 68, + 0x10d10: 68, + 0x10d11: 68, + 0x10d12: 68, + 0x10d13: 68, + 0x10d14: 68, + 0x10d15: 68, + 0x10d16: 68, + 0x10d17: 68, + 0x10d18: 68, + 0x10d19: 68, + 0x10d1a: 68, + 0x10d1b: 68, + 0x10d1c: 68, + 0x10d1d: 68, + 0x10d1e: 68, + 0x10d1f: 68, + 0x10d20: 68, + 0x10d21: 68, + 0x10d22: 82, + 0x10d23: 68, + 0x10f30: 68, + 0x10f31: 68, + 0x10f32: 68, + 0x10f33: 82, + 0x10f34: 68, + 0x10f35: 68, + 0x10f36: 68, + 0x10f37: 68, + 0x10f38: 68, + 0x10f39: 68, + 0x10f3a: 68, + 0x10f3b: 68, + 0x10f3c: 68, + 0x10f3d: 68, + 0x10f3e: 68, + 0x10f3f: 68, + 0x10f40: 68, + 0x10f41: 68, + 0x10f42: 68, + 0x10f43: 68, + 0x10f44: 68, + 0x10f45: 85, + 0x10f51: 68, + 0x10f52: 68, + 0x10f53: 68, + 0x10f54: 82, + 0x10f70: 68, + 0x10f71: 68, + 0x10f72: 68, + 0x10f73: 68, + 0x10f74: 82, + 0x10f75: 82, + 0x10f76: 68, + 0x10f77: 68, + 0x10f78: 68, + 0x10f79: 68, + 0x10f7a: 68, + 0x10f7b: 68, + 0x10f7c: 68, + 0x10f7d: 68, + 0x10f7e: 68, + 0x10f7f: 68, + 0x10f80: 68, + 0x10f81: 68, + 0x10fb0: 68, + 0x10fb1: 85, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb7: 85, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc0: 85, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc5: 85, + 0x10fc6: 85, + 0x10fc7: 85, + 0x10fc8: 85, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, + 0x110bd: 85, + 0x110cd: 85, + 0x1e900: 68, + 0x1e901: 68, + 0x1e902: 68, + 0x1e903: 68, + 0x1e904: 68, + 0x1e905: 68, + 0x1e906: 68, + 0x1e907: 68, + 0x1e908: 68, + 0x1e909: 68, + 0x1e90a: 68, + 0x1e90b: 68, + 0x1e90c: 68, + 0x1e90d: 68, + 0x1e90e: 68, + 0x1e90f: 68, + 0x1e910: 68, + 0x1e911: 68, + 0x1e912: 68, + 0x1e913: 68, + 0x1e914: 68, + 0x1e915: 68, + 0x1e916: 68, + 0x1e917: 68, + 0x1e918: 68, + 0x1e919: 68, + 0x1e91a: 68, + 0x1e91b: 68, + 0x1e91c: 68, + 0x1e91d: 68, + 0x1e91e: 68, + 0x1e91f: 68, + 0x1e920: 68, + 0x1e921: 68, + 0x1e922: 68, + 0x1e923: 68, + 0x1e924: 68, + 0x1e925: 68, + 0x1e926: 68, + 0x1e927: 68, + 0x1e928: 68, + 0x1e929: 68, + 0x1e92a: 68, + 0x1e92b: 68, + 0x1e92c: 68, + 0x1e92d: 68, + 0x1e92e: 68, + 0x1e92f: 68, + 0x1e930: 68, + 0x1e931: 68, + 0x1e932: 68, + 0x1e933: 68, + 0x1e934: 68, + 0x1e935: 68, + 0x1e936: 68, + 0x1e937: 68, + 0x1e938: 68, + 0x1e939: 68, + 0x1e93a: 68, + 0x1e93b: 68, + 0x1e93c: 68, + 0x1e93d: 68, + 0x1e93e: 68, + 0x1e93f: 68, + 0x1e940: 68, + 0x1e941: 68, + 0x1e942: 68, + 0x1e943: 68, + 0x1e94b: 84, +} +codepoint_classes = { + 'PVALID': ( + 0x2d0000002e, + 0x300000003a, + 0x610000007b, + 0xdf000000f7, + 0xf800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010a, + 0x10b0000010c, + 0x10d0000010e, + 0x10f00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011a, + 0x11b0000011c, + 0x11d0000011e, + 0x11f00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012a, + 0x12b0000012c, + 0x12d0000012e, + 0x12f00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13a0000013b, + 0x13c0000013d, + 0x13e0000013f, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14b0000014c, + 0x14d0000014e, + 0x14f00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015a, + 0x15b0000015c, + 0x15d0000015e, + 0x15f00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016a, + 0x16b0000016c, + 0x16d0000016e, + 0x16f00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17a0000017b, + 0x17c0000017d, + 0x17e0000017f, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18c0000018e, + 0x19200000193, + 0x19500000196, + 0x1990000019c, + 0x19e0000019f, + 0x1a1000001a2, + 0x1a3000001a4, + 0x1a5000001a6, + 0x1a8000001a9, + 0x1aa000001ac, + 0x1ad000001ae, + 0x1b0000001b1, + 0x1b4000001b5, + 0x1b6000001b7, + 0x1b9000001bc, + 0x1bd000001c4, + 0x1ce000001cf, + 0x1d0000001d1, + 0x1d2000001d3, + 0x1d4000001d5, + 0x1d6000001d7, + 0x1d8000001d9, + 0x1da000001db, + 0x1dc000001de, + 0x1df000001e0, + 0x1e1000001e2, + 0x1e3000001e4, + 0x1e5000001e6, + 0x1e7000001e8, + 0x1e9000001ea, + 0x1eb000001ec, + 0x1ed000001ee, + 0x1ef000001f1, + 0x1f5000001f6, + 0x1f9000001fa, + 0x1fb000001fc, + 0x1fd000001fe, + 0x1ff00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020a, + 0x20b0000020c, + 0x20d0000020e, + 0x20f00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021a, + 0x21b0000021c, + 0x21d0000021e, + 0x21f00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022a, + 0x22b0000022c, + 0x22d0000022e, + 0x22f00000230, + 0x23100000232, + 0x2330000023a, + 0x23c0000023d, + 0x23f00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024a, + 0x24b0000024c, + 0x24d0000024e, + 0x24f000002b0, + 0x2b9000002c2, + 0x2c6000002d2, + 0x2ec000002ed, + 0x2ee000002ef, + 0x30000000340, + 0x34200000343, + 0x3460000034f, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37b0000037e, + 0x39000000391, + 0x3ac000003cf, + 0x3d7000003d8, + 0x3d9000003da, + 0x3db000003dc, + 0x3dd000003de, + 0x3df000003e0, + 0x3e1000003e2, + 0x3e3000003e4, + 0x3e5000003e6, + 0x3e7000003e8, + 0x3e9000003ea, + 0x3eb000003ec, + 0x3ed000003ee, + 0x3ef000003f0, + 0x3f3000003f4, + 0x3f8000003f9, + 0x3fb000003fd, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046a, + 0x46b0000046c, + 0x46d0000046e, + 0x46f00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047a, + 0x47b0000047c, + 0x47d0000047e, + 0x47f00000480, + 0x48100000482, + 0x48300000488, + 0x48b0000048c, + 0x48d0000048e, + 0x48f00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049a, + 0x49b0000049c, + 0x49d0000049e, + 0x49f000004a0, + 0x4a1000004a2, + 0x4a3000004a4, + 0x4a5000004a6, + 0x4a7000004a8, + 0x4a9000004aa, + 0x4ab000004ac, + 0x4ad000004ae, + 0x4af000004b0, + 0x4b1000004b2, + 0x4b3000004b4, + 0x4b5000004b6, + 0x4b7000004b8, + 0x4b9000004ba, + 0x4bb000004bc, + 0x4bd000004be, + 0x4bf000004c0, + 0x4c2000004c3, + 0x4c4000004c5, + 0x4c6000004c7, + 0x4c8000004c9, + 0x4ca000004cb, + 0x4cc000004cd, + 0x4ce000004d0, + 0x4d1000004d2, + 0x4d3000004d4, + 0x4d5000004d6, + 0x4d7000004d8, + 0x4d9000004da, + 0x4db000004dc, + 0x4dd000004de, + 0x4df000004e0, + 0x4e1000004e2, + 0x4e3000004e4, + 0x4e5000004e6, + 0x4e7000004e8, + 0x4e9000004ea, + 0x4eb000004ec, + 0x4ed000004ee, + 0x4ef000004f0, + 0x4f1000004f2, + 0x4f3000004f4, + 0x4f5000004f6, + 0x4f7000004f8, + 0x4f9000004fa, + 0x4fb000004fc, + 0x4fd000004fe, + 0x4ff00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050a, + 0x50b0000050c, + 0x50d0000050e, + 0x50f00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051a, + 0x51b0000051c, + 0x51d0000051e, + 0x51f00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052a, + 0x52b0000052c, + 0x52d0000052e, + 0x52f00000530, + 0x5590000055a, + 0x56000000587, + 0x58800000589, + 0x591000005be, + 0x5bf000005c0, + 0x5c1000005c3, + 0x5c4000005c6, + 0x5c7000005c8, + 0x5d0000005eb, + 0x5ef000005f3, + 0x6100000061b, + 0x62000000640, + 0x64100000660, + 0x66e00000675, + 0x679000006d4, + 0x6d5000006dd, + 0x6df000006e9, + 0x6ea000006f0, + 0x6fa00000700, + 0x7100000074b, + 0x74d000007b2, + 0x7c0000007f6, + 0x7fd000007fe, + 0x8000000082e, + 0x8400000085c, + 0x8600000086b, + 0x87000000888, + 0x8890000088f, + 0x898000008e2, + 0x8e300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098d, + 0x98f00000991, + 0x993000009a9, + 0x9aa000009b1, + 0x9b2000009b3, + 0x9b6000009ba, + 0x9bc000009c5, + 0x9c7000009c9, + 0x9cb000009cf, + 0x9d7000009d8, + 0x9e0000009e4, + 0x9e6000009f2, + 0x9fc000009fd, + 0x9fe000009ff, + 0xa0100000a04, + 0xa0500000a0b, + 0xa0f00000a11, + 0xa1300000a29, + 0xa2a00000a31, + 0xa3200000a33, + 0xa3500000a36, + 0xa3800000a3a, + 0xa3c00000a3d, + 0xa3e00000a43, + 0xa4700000a49, + 0xa4b00000a4e, + 0xa5100000a52, + 0xa5c00000a5d, + 0xa6600000a76, + 0xa8100000a84, + 0xa8500000a8e, + 0xa8f00000a92, + 0xa9300000aa9, + 0xaaa00000ab1, + 0xab200000ab4, + 0xab500000aba, + 0xabc00000ac6, + 0xac700000aca, + 0xacb00000ace, + 0xad000000ad1, + 0xae000000ae4, + 0xae600000af0, + 0xaf900000b00, + 0xb0100000b04, + 0xb0500000b0d, + 0xb0f00000b11, + 0xb1300000b29, + 0xb2a00000b31, + 0xb3200000b34, + 0xb3500000b3a, + 0xb3c00000b45, + 0xb4700000b49, + 0xb4b00000b4e, + 0xb5500000b58, + 0xb5f00000b64, + 0xb6600000b70, + 0xb7100000b72, + 0xb8200000b84, + 0xb8500000b8b, + 0xb8e00000b91, + 0xb9200000b96, + 0xb9900000b9b, + 0xb9c00000b9d, + 0xb9e00000ba0, + 0xba300000ba5, + 0xba800000bab, + 0xbae00000bba, + 0xbbe00000bc3, + 0xbc600000bc9, + 0xbca00000bce, + 0xbd000000bd1, + 0xbd700000bd8, + 0xbe600000bf0, + 0xc0000000c0d, + 0xc0e00000c11, + 0xc1200000c29, + 0xc2a00000c3a, + 0xc3c00000c45, + 0xc4600000c49, + 0xc4a00000c4e, + 0xc5500000c57, + 0xc5800000c5b, + 0xc5d00000c5e, + 0xc6000000c64, + 0xc6600000c70, + 0xc8000000c84, + 0xc8500000c8d, + 0xc8e00000c91, + 0xc9200000ca9, + 0xcaa00000cb4, + 0xcb500000cba, + 0xcbc00000cc5, + 0xcc600000cc9, + 0xcca00000cce, + 0xcd500000cd7, + 0xcdd00000cdf, + 0xce000000ce4, + 0xce600000cf0, + 0xcf100000cf4, + 0xd0000000d0d, + 0xd0e00000d11, + 0xd1200000d45, + 0xd4600000d49, + 0xd4a00000d4f, + 0xd5400000d58, + 0xd5f00000d64, + 0xd6600000d70, + 0xd7a00000d80, + 0xd8100000d84, + 0xd8500000d97, + 0xd9a00000db2, + 0xdb300000dbc, + 0xdbd00000dbe, + 0xdc000000dc7, + 0xdca00000dcb, + 0xdcf00000dd5, + 0xdd600000dd7, + 0xdd800000de0, + 0xde600000df0, + 0xdf200000df4, + 0xe0100000e33, + 0xe3400000e3b, + 0xe4000000e4f, + 0xe5000000e5a, + 0xe8100000e83, + 0xe8400000e85, + 0xe8600000e8b, + 0xe8c00000ea4, + 0xea500000ea6, + 0xea700000eb3, + 0xeb400000ebe, + 0xec000000ec5, + 0xec600000ec7, + 0xec800000ecf, + 0xed000000eda, + 0xede00000ee0, + 0xf0000000f01, + 0xf0b00000f0c, + 0xf1800000f1a, + 0xf2000000f2a, + 0xf3500000f36, + 0xf3700000f38, + 0xf3900000f3a, + 0xf3e00000f43, + 0xf4400000f48, + 0xf4900000f4d, + 0xf4e00000f52, + 0xf5300000f57, + 0xf5800000f5c, + 0xf5d00000f69, + 0xf6a00000f6d, + 0xf7100000f73, + 0xf7400000f75, + 0xf7a00000f81, + 0xf8200000f85, + 0xf8600000f93, + 0xf9400000f98, + 0xf9900000f9d, + 0xf9e00000fa2, + 0xfa300000fa7, + 0xfa800000fac, + 0xfad00000fb9, + 0xfba00000fbd, + 0xfc600000fc7, + 0x10000000104a, + 0x10500000109e, + 0x10d0000010fb, + 0x10fd00001100, + 0x120000001249, + 0x124a0000124e, + 0x125000001257, + 0x125800001259, + 0x125a0000125e, + 0x126000001289, + 0x128a0000128e, + 0x1290000012b1, + 0x12b2000012b6, + 0x12b8000012bf, + 0x12c0000012c1, + 0x12c2000012c6, + 0x12c8000012d7, + 0x12d800001311, + 0x131200001316, + 0x13180000135b, + 0x135d00001360, + 0x138000001390, + 0x13a0000013f6, + 0x14010000166d, + 0x166f00001680, + 0x16810000169b, + 0x16a0000016eb, + 0x16f1000016f9, + 0x170000001716, + 0x171f00001735, + 0x174000001754, + 0x17600000176d, + 0x176e00001771, + 0x177200001774, + 0x1780000017b4, + 0x17b6000017d4, + 0x17d7000017d8, + 0x17dc000017de, + 0x17e0000017ea, + 0x18100000181a, + 0x182000001879, + 0x1880000018ab, + 0x18b0000018f6, + 0x19000000191f, + 0x19200000192c, + 0x19300000193c, + 0x19460000196e, + 0x197000001975, + 0x1980000019ac, + 0x19b0000019ca, + 0x19d0000019da, + 0x1a0000001a1c, + 0x1a2000001a5f, + 0x1a6000001a7d, + 0x1a7f00001a8a, + 0x1a9000001a9a, + 0x1aa700001aa8, + 0x1ab000001abe, + 0x1abf00001acf, + 0x1b0000001b4d, + 0x1b5000001b5a, + 0x1b6b00001b74, + 0x1b8000001bf4, + 0x1c0000001c38, + 0x1c4000001c4a, + 0x1c4d00001c7e, + 0x1cd000001cd3, + 0x1cd400001cfb, + 0x1d0000001d2c, + 0x1d2f00001d30, + 0x1d3b00001d3c, + 0x1d4e00001d4f, + 0x1d6b00001d78, + 0x1d7900001d9b, + 0x1dc000001e00, + 0x1e0100001e02, + 0x1e0300001e04, + 0x1e0500001e06, + 0x1e0700001e08, + 0x1e0900001e0a, + 0x1e0b00001e0c, + 0x1e0d00001e0e, + 0x1e0f00001e10, + 0x1e1100001e12, + 0x1e1300001e14, + 0x1e1500001e16, + 0x1e1700001e18, + 0x1e1900001e1a, + 0x1e1b00001e1c, + 0x1e1d00001e1e, + 0x1e1f00001e20, + 0x1e2100001e22, + 0x1e2300001e24, + 0x1e2500001e26, + 0x1e2700001e28, + 0x1e2900001e2a, + 0x1e2b00001e2c, + 0x1e2d00001e2e, + 0x1e2f00001e30, + 0x1e3100001e32, + 0x1e3300001e34, + 0x1e3500001e36, + 0x1e3700001e38, + 0x1e3900001e3a, + 0x1e3b00001e3c, + 0x1e3d00001e3e, + 0x1e3f00001e40, + 0x1e4100001e42, + 0x1e4300001e44, + 0x1e4500001e46, + 0x1e4700001e48, + 0x1e4900001e4a, + 0x1e4b00001e4c, + 0x1e4d00001e4e, + 0x1e4f00001e50, + 0x1e5100001e52, + 0x1e5300001e54, + 0x1e5500001e56, + 0x1e5700001e58, + 0x1e5900001e5a, + 0x1e5b00001e5c, + 0x1e5d00001e5e, + 0x1e5f00001e60, + 0x1e6100001e62, + 0x1e6300001e64, + 0x1e6500001e66, + 0x1e6700001e68, + 0x1e6900001e6a, + 0x1e6b00001e6c, + 0x1e6d00001e6e, + 0x1e6f00001e70, + 0x1e7100001e72, + 0x1e7300001e74, + 0x1e7500001e76, + 0x1e7700001e78, + 0x1e7900001e7a, + 0x1e7b00001e7c, + 0x1e7d00001e7e, + 0x1e7f00001e80, + 0x1e8100001e82, + 0x1e8300001e84, + 0x1e8500001e86, + 0x1e8700001e88, + 0x1e8900001e8a, + 0x1e8b00001e8c, + 0x1e8d00001e8e, + 0x1e8f00001e90, + 0x1e9100001e92, + 0x1e9300001e94, + 0x1e9500001e9a, + 0x1e9c00001e9e, + 0x1e9f00001ea0, + 0x1ea100001ea2, + 0x1ea300001ea4, + 0x1ea500001ea6, + 0x1ea700001ea8, + 0x1ea900001eaa, + 0x1eab00001eac, + 0x1ead00001eae, + 0x1eaf00001eb0, + 0x1eb100001eb2, + 0x1eb300001eb4, + 0x1eb500001eb6, + 0x1eb700001eb8, + 0x1eb900001eba, + 0x1ebb00001ebc, + 0x1ebd00001ebe, + 0x1ebf00001ec0, + 0x1ec100001ec2, + 0x1ec300001ec4, + 0x1ec500001ec6, + 0x1ec700001ec8, + 0x1ec900001eca, + 0x1ecb00001ecc, + 0x1ecd00001ece, + 0x1ecf00001ed0, + 0x1ed100001ed2, + 0x1ed300001ed4, + 0x1ed500001ed6, + 0x1ed700001ed8, + 0x1ed900001eda, + 0x1edb00001edc, + 0x1edd00001ede, + 0x1edf00001ee0, + 0x1ee100001ee2, + 0x1ee300001ee4, + 0x1ee500001ee6, + 0x1ee700001ee8, + 0x1ee900001eea, + 0x1eeb00001eec, + 0x1eed00001eee, + 0x1eef00001ef0, + 0x1ef100001ef2, + 0x1ef300001ef4, + 0x1ef500001ef6, + 0x1ef700001ef8, + 0x1ef900001efa, + 0x1efb00001efc, + 0x1efd00001efe, + 0x1eff00001f08, + 0x1f1000001f16, + 0x1f2000001f28, + 0x1f3000001f38, + 0x1f4000001f46, + 0x1f5000001f58, + 0x1f6000001f68, + 0x1f7000001f71, + 0x1f7200001f73, + 0x1f7400001f75, + 0x1f7600001f77, + 0x1f7800001f79, + 0x1f7a00001f7b, + 0x1f7c00001f7d, + 0x1fb000001fb2, + 0x1fb600001fb7, + 0x1fc600001fc7, + 0x1fd000001fd3, + 0x1fd600001fd8, + 0x1fe000001fe3, + 0x1fe400001fe8, + 0x1ff600001ff7, + 0x214e0000214f, + 0x218400002185, + 0x2c3000002c60, + 0x2c6100002c62, + 0x2c6500002c67, + 0x2c6800002c69, + 0x2c6a00002c6b, + 0x2c6c00002c6d, + 0x2c7100002c72, + 0x2c7300002c75, + 0x2c7600002c7c, + 0x2c8100002c82, + 0x2c8300002c84, + 0x2c8500002c86, + 0x2c8700002c88, + 0x2c8900002c8a, + 0x2c8b00002c8c, + 0x2c8d00002c8e, + 0x2c8f00002c90, + 0x2c9100002c92, + 0x2c9300002c94, + 0x2c9500002c96, + 0x2c9700002c98, + 0x2c9900002c9a, + 0x2c9b00002c9c, + 0x2c9d00002c9e, + 0x2c9f00002ca0, + 0x2ca100002ca2, + 0x2ca300002ca4, + 0x2ca500002ca6, + 0x2ca700002ca8, + 0x2ca900002caa, + 0x2cab00002cac, + 0x2cad00002cae, + 0x2caf00002cb0, + 0x2cb100002cb2, + 0x2cb300002cb4, + 0x2cb500002cb6, + 0x2cb700002cb8, + 0x2cb900002cba, + 0x2cbb00002cbc, + 0x2cbd00002cbe, + 0x2cbf00002cc0, + 0x2cc100002cc2, + 0x2cc300002cc4, + 0x2cc500002cc6, + 0x2cc700002cc8, + 0x2cc900002cca, + 0x2ccb00002ccc, + 0x2ccd00002cce, + 0x2ccf00002cd0, + 0x2cd100002cd2, + 0x2cd300002cd4, + 0x2cd500002cd6, + 0x2cd700002cd8, + 0x2cd900002cda, + 0x2cdb00002cdc, + 0x2cdd00002cde, + 0x2cdf00002ce0, + 0x2ce100002ce2, + 0x2ce300002ce5, + 0x2cec00002ced, + 0x2cee00002cf2, + 0x2cf300002cf4, + 0x2d0000002d26, + 0x2d2700002d28, + 0x2d2d00002d2e, + 0x2d3000002d68, + 0x2d7f00002d97, + 0x2da000002da7, + 0x2da800002daf, + 0x2db000002db7, + 0x2db800002dbf, + 0x2dc000002dc7, + 0x2dc800002dcf, + 0x2dd000002dd7, + 0x2dd800002ddf, + 0x2de000002e00, + 0x2e2f00002e30, + 0x300500003008, + 0x302a0000302e, + 0x303c0000303d, + 0x304100003097, + 0x30990000309b, + 0x309d0000309f, + 0x30a1000030fb, + 0x30fc000030ff, + 0x310500003130, + 0x31a0000031c0, + 0x31f000003200, + 0x340000004dc0, + 0x4e000000a48d, + 0xa4d00000a4fe, + 0xa5000000a60d, + 0xa6100000a62c, + 0xa6410000a642, + 0xa6430000a644, + 0xa6450000a646, + 0xa6470000a648, + 0xa6490000a64a, + 0xa64b0000a64c, + 0xa64d0000a64e, + 0xa64f0000a650, + 0xa6510000a652, + 0xa6530000a654, + 0xa6550000a656, + 0xa6570000a658, + 0xa6590000a65a, + 0xa65b0000a65c, + 0xa65d0000a65e, + 0xa65f0000a660, + 0xa6610000a662, + 0xa6630000a664, + 0xa6650000a666, + 0xa6670000a668, + 0xa6690000a66a, + 0xa66b0000a66c, + 0xa66d0000a670, + 0xa6740000a67e, + 0xa67f0000a680, + 0xa6810000a682, + 0xa6830000a684, + 0xa6850000a686, + 0xa6870000a688, + 0xa6890000a68a, + 0xa68b0000a68c, + 0xa68d0000a68e, + 0xa68f0000a690, + 0xa6910000a692, + 0xa6930000a694, + 0xa6950000a696, + 0xa6970000a698, + 0xa6990000a69a, + 0xa69b0000a69c, + 0xa69e0000a6e6, + 0xa6f00000a6f2, + 0xa7170000a720, + 0xa7230000a724, + 0xa7250000a726, + 0xa7270000a728, + 0xa7290000a72a, + 0xa72b0000a72c, + 0xa72d0000a72e, + 0xa72f0000a732, + 0xa7330000a734, + 0xa7350000a736, + 0xa7370000a738, + 0xa7390000a73a, + 0xa73b0000a73c, + 0xa73d0000a73e, + 0xa73f0000a740, + 0xa7410000a742, + 0xa7430000a744, + 0xa7450000a746, + 0xa7470000a748, + 0xa7490000a74a, + 0xa74b0000a74c, + 0xa74d0000a74e, + 0xa74f0000a750, + 0xa7510000a752, + 0xa7530000a754, + 0xa7550000a756, + 0xa7570000a758, + 0xa7590000a75a, + 0xa75b0000a75c, + 0xa75d0000a75e, + 0xa75f0000a760, + 0xa7610000a762, + 0xa7630000a764, + 0xa7650000a766, + 0xa7670000a768, + 0xa7690000a76a, + 0xa76b0000a76c, + 0xa76d0000a76e, + 0xa76f0000a770, + 0xa7710000a779, + 0xa77a0000a77b, + 0xa77c0000a77d, + 0xa77f0000a780, + 0xa7810000a782, + 0xa7830000a784, + 0xa7850000a786, + 0xa7870000a789, + 0xa78c0000a78d, + 0xa78e0000a790, + 0xa7910000a792, + 0xa7930000a796, + 0xa7970000a798, + 0xa7990000a79a, + 0xa79b0000a79c, + 0xa79d0000a79e, + 0xa79f0000a7a0, + 0xa7a10000a7a2, + 0xa7a30000a7a4, + 0xa7a50000a7a6, + 0xa7a70000a7a8, + 0xa7a90000a7aa, + 0xa7af0000a7b0, + 0xa7b50000a7b6, + 0xa7b70000a7b8, + 0xa7b90000a7ba, + 0xa7bb0000a7bc, + 0xa7bd0000a7be, + 0xa7bf0000a7c0, + 0xa7c10000a7c2, + 0xa7c30000a7c4, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7d10000a7d2, + 0xa7d30000a7d4, + 0xa7d50000a7d6, + 0xa7d70000a7d8, + 0xa7d90000a7da, + 0xa7f20000a7f5, + 0xa7f60000a7f8, + 0xa7fa0000a828, + 0xa82c0000a82d, + 0xa8400000a874, + 0xa8800000a8c6, + 0xa8d00000a8da, + 0xa8e00000a8f8, + 0xa8fb0000a8fc, + 0xa8fd0000a92e, + 0xa9300000a954, + 0xa9800000a9c1, + 0xa9cf0000a9da, + 0xa9e00000a9ff, + 0xaa000000aa37, + 0xaa400000aa4e, + 0xaa500000aa5a, + 0xaa600000aa77, + 0xaa7a0000aac3, + 0xaadb0000aade, + 0xaae00000aaf0, + 0xaaf20000aaf7, + 0xab010000ab07, + 0xab090000ab0f, + 0xab110000ab17, + 0xab200000ab27, + 0xab280000ab2f, + 0xab300000ab5b, + 0xab600000ab69, + 0xabc00000abeb, + 0xabec0000abee, + 0xabf00000abfa, + 0xac000000d7a4, + 0xfa0e0000fa10, + 0xfa110000fa12, + 0xfa130000fa15, + 0xfa1f0000fa20, + 0xfa210000fa22, + 0xfa230000fa25, + 0xfa270000fa2a, + 0xfb1e0000fb1f, + 0xfe200000fe30, + 0xfe730000fe74, + 0x100000001000c, + 0x1000d00010027, + 0x100280001003b, + 0x1003c0001003e, + 0x1003f0001004e, + 0x100500001005e, + 0x10080000100fb, + 0x101fd000101fe, + 0x102800001029d, + 0x102a0000102d1, + 0x102e0000102e1, + 0x1030000010320, + 0x1032d00010341, + 0x103420001034a, + 0x103500001037b, + 0x103800001039e, + 0x103a0000103c4, + 0x103c8000103d0, + 0x104280001049e, + 0x104a0000104aa, + 0x104d8000104fc, + 0x1050000010528, + 0x1053000010564, + 0x10597000105a2, + 0x105a3000105b2, + 0x105b3000105ba, + 0x105bb000105bd, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1078000010786, + 0x10787000107b1, + 0x107b2000107bb, + 0x1080000010806, + 0x1080800010809, + 0x1080a00010836, + 0x1083700010839, + 0x1083c0001083d, + 0x1083f00010856, + 0x1086000010877, + 0x108800001089f, + 0x108e0000108f3, + 0x108f4000108f6, + 0x1090000010916, + 0x109200001093a, + 0x10980000109b8, + 0x109be000109c0, + 0x10a0000010a04, + 0x10a0500010a07, + 0x10a0c00010a14, + 0x10a1500010a18, + 0x10a1900010a36, + 0x10a3800010a3b, + 0x10a3f00010a40, + 0x10a6000010a7d, + 0x10a8000010a9d, + 0x10ac000010ac8, + 0x10ac900010ae7, + 0x10b0000010b36, + 0x10b4000010b56, + 0x10b6000010b73, + 0x10b8000010b92, + 0x10c0000010c49, + 0x10cc000010cf3, + 0x10d0000010d28, + 0x10d3000010d3a, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, + 0x10efd00010f1d, + 0x10f2700010f28, + 0x10f3000010f51, + 0x10f7000010f86, + 0x10fb000010fc5, + 0x10fe000010ff7, + 0x1100000011047, + 0x1106600011076, + 0x1107f000110bb, + 0x110c2000110c3, + 0x110d0000110e9, + 0x110f0000110fa, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111c5, + 0x111c9000111cd, + 0x111ce000111db, + 0x111dc000111dd, + 0x1120000011212, + 0x1121300011238, + 0x1123e00011242, + 0x1128000011287, + 0x1128800011289, + 0x1128a0001128e, + 0x1128f0001129e, + 0x1129f000112a9, + 0x112b0000112eb, + 0x112f0000112fa, + 0x1130000011304, + 0x113050001130d, + 0x1130f00011311, + 0x1131300011329, + 0x1132a00011331, + 0x1133200011334, + 0x113350001133a, + 0x1133b00011345, + 0x1134700011349, + 0x1134b0001134e, + 0x1135000011351, + 0x1135700011358, + 0x1135d00011364, + 0x113660001136d, + 0x1137000011375, + 0x114000001144b, + 0x114500001145a, + 0x1145e00011462, + 0x11480000114c6, + 0x114c7000114c8, + 0x114d0000114da, + 0x11580000115b6, + 0x115b8000115c1, + 0x115d8000115de, + 0x1160000011641, + 0x1164400011645, + 0x116500001165a, + 0x11680000116b9, + 0x116c0000116ca, + 0x117000001171b, + 0x1171d0001172c, + 0x117300001173a, + 0x1174000011747, + 0x118000001183b, + 0x118c0000118ea, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, + 0x119a0000119a8, + 0x119aa000119d8, + 0x119da000119e2, + 0x119e3000119e5, + 0x11a0000011a3f, + 0x11a4700011a48, + 0x11a5000011a9a, + 0x11a9d00011a9e, + 0x11ab000011af9, + 0x11c0000011c09, + 0x11c0a00011c37, + 0x11c3800011c41, + 0x11c5000011c5a, + 0x11c7200011c90, + 0x11c9200011ca8, + 0x11ca900011cb7, + 0x11d0000011d07, + 0x11d0800011d0a, + 0x11d0b00011d37, + 0x11d3a00011d3b, + 0x11d3c00011d3e, + 0x11d3f00011d48, + 0x11d5000011d5a, + 0x11d6000011d66, + 0x11d6700011d69, + 0x11d6a00011d8f, + 0x11d9000011d92, + 0x11d9300011d99, + 0x11da000011daa, + 0x11ee000011ef7, + 0x11f0000011f11, + 0x11f1200011f3b, + 0x11f3e00011f43, + 0x11f5000011f5a, + 0x11fb000011fb1, + 0x120000001239a, + 0x1248000012544, + 0x12f9000012ff1, + 0x1300000013430, + 0x1344000013456, + 0x1440000014647, + 0x1680000016a39, + 0x16a4000016a5f, + 0x16a6000016a6a, + 0x16a7000016abf, + 0x16ac000016aca, + 0x16ad000016aee, + 0x16af000016af5, + 0x16b0000016b37, + 0x16b4000016b44, + 0x16b5000016b5a, + 0x16b6300016b78, + 0x16b7d00016b90, + 0x16e6000016e80, + 0x16f0000016f4b, + 0x16f4f00016f88, + 0x16f8f00016fa0, + 0x16fe000016fe2, + 0x16fe300016fe5, + 0x16ff000016ff2, + 0x17000000187f8, + 0x1880000018cd6, + 0x18d0000018d09, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b123, + 0x1b1320001b133, + 0x1b1500001b153, + 0x1b1550001b156, + 0x1b1640001b168, + 0x1b1700001b2fc, + 0x1bc000001bc6b, + 0x1bc700001bc7d, + 0x1bc800001bc89, + 0x1bc900001bc9a, + 0x1bc9d0001bc9f, + 0x1cf000001cf2e, + 0x1cf300001cf47, + 0x1da000001da37, + 0x1da3b0001da6d, + 0x1da750001da76, + 0x1da840001da85, + 0x1da9b0001daa0, + 0x1daa10001dab0, + 0x1df000001df1f, + 0x1df250001df2b, + 0x1e0000001e007, + 0x1e0080001e019, + 0x1e01b0001e022, + 0x1e0230001e025, + 0x1e0260001e02b, + 0x1e0300001e06e, + 0x1e08f0001e090, + 0x1e1000001e12d, + 0x1e1300001e13e, + 0x1e1400001e14a, + 0x1e14e0001e14f, + 0x1e2900001e2af, + 0x1e2c00001e2fa, + 0x1e4d00001e4fa, + 0x1e7e00001e7e7, + 0x1e7e80001e7ec, + 0x1e7ed0001e7ef, + 0x1e7f00001e7ff, + 0x1e8000001e8c5, + 0x1e8d00001e8d7, + 0x1e9220001e94c, + 0x1e9500001e95a, + 0x200000002a6e0, + 0x2a7000002b73a, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x300000003134b, + 0x31350000323b0, + ), + 'CONTEXTJ': ( + 0x200c0000200e, + ), + 'CONTEXTO': ( + 0xb7000000b8, + 0x37500000376, + 0x5f3000005f5, + 0x6600000066a, + 0x6f0000006fa, + 0x30fb000030fc, + ), +} diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/intranges.py b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/intranges.py new file mode 100644 index 0000000..6a43b04 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/intranges.py @@ -0,0 +1,54 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect +from typing import List, Tuple + +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + +def _encode_range(start: int, end: int) -> int: + return (start << 32) | end + +def _decode_range(r: int) -> Tuple[int, int]: + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos-1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/package_data.py b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/package_data.py new file mode 100644 index 0000000..8501893 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/package_data.py @@ -0,0 +1,2 @@ +__version__ = '3.4' + diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/idna/uts46data.py b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/uts46data.py new file mode 100644 index 0000000..186796c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/idna/uts46data.py @@ -0,0 +1,8600 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +from typing import List, Tuple, Union + + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = '15.0.0' +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', 'a'), + (0x42, 'M', 'b'), + (0x43, 'M', 'c'), + (0x44, 'M', 'd'), + (0x45, 'M', 'e'), + (0x46, 'M', 'f'), + (0x47, 'M', 'g'), + (0x48, 'M', 'h'), + (0x49, 'M', 'i'), + (0x4A, 'M', 'j'), + (0x4B, 'M', 'k'), + (0x4C, 'M', 'l'), + (0x4D, 'M', 'm'), + (0x4E, 'M', 'n'), + (0x4F, 'M', 'o'), + (0x50, 'M', 'p'), + (0x51, 'M', 'q'), + (0x52, 'M', 'r'), + (0x53, 'M', 's'), + (0x54, 'M', 't'), + (0x55, 'M', 'u'), + (0x56, 'M', 'v'), + (0x57, 'M', 'w'), + (0x58, 'M', 'x'), + (0x59, 'M', 'y'), + (0x5A, 'M', 'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', ' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', ' ̈'), + (0xA9, 'V'), + (0xAA, 'M', 'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', ' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', '2'), + (0xB3, 'M', '3'), + (0xB4, '3', ' ́'), + (0xB5, 'M', 'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', ' ̧'), + (0xB9, 'M', '1'), + (0xBA, 'M', 'o'), + (0xBB, 'V'), + (0xBC, 'M', '1⁄4'), + (0xBD, 'M', '1⁄2'), + (0xBE, 'M', '3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', 'à'), + (0xC1, 'M', 'á'), + (0xC2, 'M', 'â'), + (0xC3, 'M', 'ã'), + (0xC4, 'M', 'ä'), + (0xC5, 'M', 'å'), + (0xC6, 'M', 'æ'), + (0xC7, 'M', 'ç'), + ] + +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC8, 'M', 'è'), + (0xC9, 'M', 'é'), + (0xCA, 'M', 'ê'), + (0xCB, 'M', 'ë'), + (0xCC, 'M', 'ì'), + (0xCD, 'M', 'í'), + (0xCE, 'M', 'î'), + (0xCF, 'M', 'ï'), + (0xD0, 'M', 'ð'), + (0xD1, 'M', 'ñ'), + (0xD2, 'M', 'ò'), + (0xD3, 'M', 'ó'), + (0xD4, 'M', 'ô'), + (0xD5, 'M', 'õ'), + (0xD6, 'M', 'ö'), + (0xD7, 'V'), + (0xD8, 'M', 'ø'), + (0xD9, 'M', 'ù'), + (0xDA, 'M', 'ú'), + (0xDB, 'M', 'û'), + (0xDC, 'M', 'ü'), + (0xDD, 'M', 'ý'), + (0xDE, 'M', 'þ'), + (0xDF, 'D', 'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', 'ā'), + (0x101, 'V'), + (0x102, 'M', 'ă'), + (0x103, 'V'), + (0x104, 'M', 'ą'), + (0x105, 'V'), + (0x106, 'M', 'ć'), + (0x107, 'V'), + (0x108, 'M', 'ĉ'), + (0x109, 'V'), + (0x10A, 'M', 'ċ'), + (0x10B, 'V'), + (0x10C, 'M', 'č'), + (0x10D, 'V'), + (0x10E, 'M', 'ď'), + (0x10F, 'V'), + (0x110, 'M', 'đ'), + (0x111, 'V'), + (0x112, 'M', 'ē'), + (0x113, 'V'), + (0x114, 'M', 'ĕ'), + (0x115, 'V'), + (0x116, 'M', 'ė'), + (0x117, 'V'), + (0x118, 'M', 'ę'), + (0x119, 'V'), + (0x11A, 'M', 'ě'), + (0x11B, 'V'), + (0x11C, 'M', 'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', 'ğ'), + (0x11F, 'V'), + (0x120, 'M', 'ġ'), + (0x121, 'V'), + (0x122, 'M', 'ģ'), + (0x123, 'V'), + (0x124, 'M', 'ĥ'), + (0x125, 'V'), + (0x126, 'M', 'ħ'), + (0x127, 'V'), + (0x128, 'M', 'ĩ'), + (0x129, 'V'), + (0x12A, 'M', 'ī'), + (0x12B, 'V'), + ] + +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x12C, 'M', 'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', 'į'), + (0x12F, 'V'), + (0x130, 'M', 'i̇'), + (0x131, 'V'), + (0x132, 'M', 'ij'), + (0x134, 'M', 'ĵ'), + (0x135, 'V'), + (0x136, 'M', 'ķ'), + (0x137, 'V'), + (0x139, 'M', 'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', 'ļ'), + (0x13C, 'V'), + (0x13D, 'M', 'ľ'), + (0x13E, 'V'), + (0x13F, 'M', 'l·'), + (0x141, 'M', 'ł'), + (0x142, 'V'), + (0x143, 'M', 'ń'), + (0x144, 'V'), + (0x145, 'M', 'ņ'), + (0x146, 'V'), + (0x147, 'M', 'ň'), + (0x148, 'V'), + (0x149, 'M', 'ʼn'), + (0x14A, 'M', 'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', 'ō'), + (0x14D, 'V'), + (0x14E, 'M', 'ŏ'), + (0x14F, 'V'), + (0x150, 'M', 'ő'), + (0x151, 'V'), + (0x152, 'M', 'œ'), + (0x153, 'V'), + (0x154, 'M', 'ŕ'), + (0x155, 'V'), + (0x156, 'M', 'ŗ'), + (0x157, 'V'), + (0x158, 'M', 'ř'), + (0x159, 'V'), + (0x15A, 'M', 'ś'), + (0x15B, 'V'), + (0x15C, 'M', 'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', 'ş'), + (0x15F, 'V'), + (0x160, 'M', 'š'), + (0x161, 'V'), + (0x162, 'M', 'ţ'), + (0x163, 'V'), + (0x164, 'M', 'ť'), + (0x165, 'V'), + (0x166, 'M', 'ŧ'), + (0x167, 'V'), + (0x168, 'M', 'ũ'), + (0x169, 'V'), + (0x16A, 'M', 'ū'), + (0x16B, 'V'), + (0x16C, 'M', 'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', 'ů'), + (0x16F, 'V'), + (0x170, 'M', 'ű'), + (0x171, 'V'), + (0x172, 'M', 'ų'), + (0x173, 'V'), + (0x174, 'M', 'ŵ'), + (0x175, 'V'), + (0x176, 'M', 'ŷ'), + (0x177, 'V'), + (0x178, 'M', 'ÿ'), + (0x179, 'M', 'ź'), + (0x17A, 'V'), + (0x17B, 'M', 'ż'), + (0x17C, 'V'), + (0x17D, 'M', 'ž'), + (0x17E, 'V'), + (0x17F, 'M', 's'), + (0x180, 'V'), + (0x181, 'M', 'ɓ'), + (0x182, 'M', 'ƃ'), + (0x183, 'V'), + (0x184, 'M', 'ƅ'), + (0x185, 'V'), + (0x186, 'M', 'ɔ'), + (0x187, 'M', 'ƈ'), + (0x188, 'V'), + (0x189, 'M', 'ɖ'), + (0x18A, 'M', 'ɗ'), + (0x18B, 'M', 'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', 'ǝ'), + (0x18F, 'M', 'ə'), + (0x190, 'M', 'ɛ'), + (0x191, 'M', 'ƒ'), + (0x192, 'V'), + (0x193, 'M', 'ɠ'), + ] + +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x194, 'M', 'ɣ'), + (0x195, 'V'), + (0x196, 'M', 'ɩ'), + (0x197, 'M', 'ɨ'), + (0x198, 'M', 'ƙ'), + (0x199, 'V'), + (0x19C, 'M', 'ɯ'), + (0x19D, 'M', 'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', 'ɵ'), + (0x1A0, 'M', 'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', 'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', 'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', 'ʀ'), + (0x1A7, 'M', 'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', 'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', 'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', 'ʈ'), + (0x1AF, 'M', 'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', 'ʊ'), + (0x1B2, 'M', 'ʋ'), + (0x1B3, 'M', 'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', 'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', 'ʒ'), + (0x1B8, 'M', 'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', 'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', 'dž'), + (0x1C7, 'M', 'lj'), + (0x1CA, 'M', 'nj'), + (0x1CD, 'M', 'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', 'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', 'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', 'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', 'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', 'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', 'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', 'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', 'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', 'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', 'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', 'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', 'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', 'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', 'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', 'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', 'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', 'dz'), + (0x1F4, 'M', 'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', 'ƕ'), + (0x1F7, 'M', 'ƿ'), + (0x1F8, 'M', 'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', 'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', 'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', 'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', 'ȁ'), + (0x201, 'V'), + (0x202, 'M', 'ȃ'), + (0x203, 'V'), + (0x204, 'M', 'ȅ'), + (0x205, 'V'), + (0x206, 'M', 'ȇ'), + (0x207, 'V'), + (0x208, 'M', 'ȉ'), + (0x209, 'V'), + (0x20A, 'M', 'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', 'ȍ'), + ] + +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x20D, 'V'), + (0x20E, 'M', 'ȏ'), + (0x20F, 'V'), + (0x210, 'M', 'ȑ'), + (0x211, 'V'), + (0x212, 'M', 'ȓ'), + (0x213, 'V'), + (0x214, 'M', 'ȕ'), + (0x215, 'V'), + (0x216, 'M', 'ȗ'), + (0x217, 'V'), + (0x218, 'M', 'ș'), + (0x219, 'V'), + (0x21A, 'M', 'ț'), + (0x21B, 'V'), + (0x21C, 'M', 'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', 'ȟ'), + (0x21F, 'V'), + (0x220, 'M', 'ƞ'), + (0x221, 'V'), + (0x222, 'M', 'ȣ'), + (0x223, 'V'), + (0x224, 'M', 'ȥ'), + (0x225, 'V'), + (0x226, 'M', 'ȧ'), + (0x227, 'V'), + (0x228, 'M', 'ȩ'), + (0x229, 'V'), + (0x22A, 'M', 'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', 'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', 'ȯ'), + (0x22F, 'V'), + (0x230, 'M', 'ȱ'), + (0x231, 'V'), + (0x232, 'M', 'ȳ'), + (0x233, 'V'), + (0x23A, 'M', 'ⱥ'), + (0x23B, 'M', 'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', 'ƚ'), + (0x23E, 'M', 'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', 'ɂ'), + (0x242, 'V'), + (0x243, 'M', 'ƀ'), + (0x244, 'M', 'ʉ'), + (0x245, 'M', 'ʌ'), + (0x246, 'M', 'ɇ'), + (0x247, 'V'), + (0x248, 'M', 'ɉ'), + (0x249, 'V'), + (0x24A, 'M', 'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', 'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', 'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', 'h'), + (0x2B1, 'M', 'ɦ'), + (0x2B2, 'M', 'j'), + (0x2B3, 'M', 'r'), + (0x2B4, 'M', 'ɹ'), + (0x2B5, 'M', 'ɻ'), + (0x2B6, 'M', 'ʁ'), + (0x2B7, 'M', 'w'), + (0x2B8, 'M', 'y'), + (0x2B9, 'V'), + (0x2D8, '3', ' ̆'), + (0x2D9, '3', ' ̇'), + (0x2DA, '3', ' ̊'), + (0x2DB, '3', ' ̨'), + (0x2DC, '3', ' ̃'), + (0x2DD, '3', ' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', 'ɣ'), + (0x2E1, 'M', 'l'), + (0x2E2, 'M', 's'), + (0x2E3, 'M', 'x'), + (0x2E4, 'M', 'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', '̀'), + (0x341, 'M', '́'), + (0x342, 'V'), + (0x343, 'M', '̓'), + (0x344, 'M', '̈́'), + (0x345, 'M', 'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', 'ͱ'), + (0x371, 'V'), + (0x372, 'M', 'ͳ'), + (0x373, 'V'), + (0x374, 'M', 'ʹ'), + (0x375, 'V'), + (0x376, 'M', 'ͷ'), + (0x377, 'V'), + ] + +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x378, 'X'), + (0x37A, '3', ' ι'), + (0x37B, 'V'), + (0x37E, '3', ';'), + (0x37F, 'M', 'ϳ'), + (0x380, 'X'), + (0x384, '3', ' ́'), + (0x385, '3', ' ̈́'), + (0x386, 'M', 'ά'), + (0x387, 'M', '·'), + (0x388, 'M', 'έ'), + (0x389, 'M', 'ή'), + (0x38A, 'M', 'ί'), + (0x38B, 'X'), + (0x38C, 'M', 'ό'), + (0x38D, 'X'), + (0x38E, 'M', 'ύ'), + (0x38F, 'M', 'ώ'), + (0x390, 'V'), + (0x391, 'M', 'α'), + (0x392, 'M', 'β'), + (0x393, 'M', 'γ'), + (0x394, 'M', 'δ'), + (0x395, 'M', 'ε'), + (0x396, 'M', 'ζ'), + (0x397, 'M', 'η'), + (0x398, 'M', 'θ'), + (0x399, 'M', 'ι'), + (0x39A, 'M', 'κ'), + (0x39B, 'M', 'λ'), + (0x39C, 'M', 'μ'), + (0x39D, 'M', 'ν'), + (0x39E, 'M', 'ξ'), + (0x39F, 'M', 'ο'), + (0x3A0, 'M', 'π'), + (0x3A1, 'M', 'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', 'σ'), + (0x3A4, 'M', 'τ'), + (0x3A5, 'M', 'υ'), + (0x3A6, 'M', 'φ'), + (0x3A7, 'M', 'χ'), + (0x3A8, 'M', 'ψ'), + (0x3A9, 'M', 'ω'), + (0x3AA, 'M', 'ϊ'), + (0x3AB, 'M', 'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', 'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', 'ϗ'), + (0x3D0, 'M', 'β'), + (0x3D1, 'M', 'θ'), + (0x3D2, 'M', 'υ'), + (0x3D3, 'M', 'ύ'), + (0x3D4, 'M', 'ϋ'), + (0x3D5, 'M', 'φ'), + (0x3D6, 'M', 'π'), + (0x3D7, 'V'), + (0x3D8, 'M', 'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', 'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', 'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', 'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', 'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', 'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', 'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', 'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', 'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', 'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', 'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', 'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', 'κ'), + (0x3F1, 'M', 'ρ'), + (0x3F2, 'M', 'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', 'θ'), + (0x3F5, 'M', 'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', 'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', 'σ'), + (0x3FA, 'M', 'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', 'ͻ'), + (0x3FE, 'M', 'ͼ'), + (0x3FF, 'M', 'ͽ'), + (0x400, 'M', 'ѐ'), + (0x401, 'M', 'ё'), + (0x402, 'M', 'ђ'), + ] + +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x403, 'M', 'ѓ'), + (0x404, 'M', 'є'), + (0x405, 'M', 'ѕ'), + (0x406, 'M', 'і'), + (0x407, 'M', 'ї'), + (0x408, 'M', 'ј'), + (0x409, 'M', 'љ'), + (0x40A, 'M', 'њ'), + (0x40B, 'M', 'ћ'), + (0x40C, 'M', 'ќ'), + (0x40D, 'M', 'ѝ'), + (0x40E, 'M', 'ў'), + (0x40F, 'M', 'џ'), + (0x410, 'M', 'а'), + (0x411, 'M', 'б'), + (0x412, 'M', 'в'), + (0x413, 'M', 'г'), + (0x414, 'M', 'д'), + (0x415, 'M', 'е'), + (0x416, 'M', 'ж'), + (0x417, 'M', 'з'), + (0x418, 'M', 'и'), + (0x419, 'M', 'й'), + (0x41A, 'M', 'к'), + (0x41B, 'M', 'л'), + (0x41C, 'M', 'м'), + (0x41D, 'M', 'н'), + (0x41E, 'M', 'о'), + (0x41F, 'M', 'п'), + (0x420, 'M', 'р'), + (0x421, 'M', 'с'), + (0x422, 'M', 'т'), + (0x423, 'M', 'у'), + (0x424, 'M', 'ф'), + (0x425, 'M', 'х'), + (0x426, 'M', 'ц'), + (0x427, 'M', 'ч'), + (0x428, 'M', 'ш'), + (0x429, 'M', 'щ'), + (0x42A, 'M', 'ъ'), + (0x42B, 'M', 'ы'), + (0x42C, 'M', 'ь'), + (0x42D, 'M', 'э'), + (0x42E, 'M', 'ю'), + (0x42F, 'M', 'я'), + (0x430, 'V'), + (0x460, 'M', 'ѡ'), + (0x461, 'V'), + (0x462, 'M', 'ѣ'), + (0x463, 'V'), + (0x464, 'M', 'ѥ'), + (0x465, 'V'), + (0x466, 'M', 'ѧ'), + (0x467, 'V'), + (0x468, 'M', 'ѩ'), + (0x469, 'V'), + (0x46A, 'M', 'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', 'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', 'ѯ'), + (0x46F, 'V'), + (0x470, 'M', 'ѱ'), + (0x471, 'V'), + (0x472, 'M', 'ѳ'), + (0x473, 'V'), + (0x474, 'M', 'ѵ'), + (0x475, 'V'), + (0x476, 'M', 'ѷ'), + (0x477, 'V'), + (0x478, 'M', 'ѹ'), + (0x479, 'V'), + (0x47A, 'M', 'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', 'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', 'ѿ'), + (0x47F, 'V'), + (0x480, 'M', 'ҁ'), + (0x481, 'V'), + (0x48A, 'M', 'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', 'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', 'ҏ'), + (0x48F, 'V'), + (0x490, 'M', 'ґ'), + (0x491, 'V'), + (0x492, 'M', 'ғ'), + (0x493, 'V'), + (0x494, 'M', 'ҕ'), + (0x495, 'V'), + (0x496, 'M', 'җ'), + (0x497, 'V'), + (0x498, 'M', 'ҙ'), + (0x499, 'V'), + (0x49A, 'M', 'қ'), + (0x49B, 'V'), + (0x49C, 'M', 'ҝ'), + (0x49D, 'V'), + ] + +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x49E, 'M', 'ҟ'), + (0x49F, 'V'), + (0x4A0, 'M', 'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', 'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', 'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', 'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', 'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', 'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', 'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', 'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', 'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', 'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', 'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', 'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', 'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', 'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', 'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', 'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', 'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', 'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', 'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', 'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', 'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', 'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', 'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', 'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', 'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', 'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', 'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', 'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', 'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', 'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', 'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', 'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', 'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', 'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', 'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', 'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', 'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', 'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', 'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', 'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', 'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', 'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', 'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', 'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', 'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', 'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', 'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', 'ԁ'), + (0x501, 'V'), + (0x502, 'M', 'ԃ'), + ] + +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x503, 'V'), + (0x504, 'M', 'ԅ'), + (0x505, 'V'), + (0x506, 'M', 'ԇ'), + (0x507, 'V'), + (0x508, 'M', 'ԉ'), + (0x509, 'V'), + (0x50A, 'M', 'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', 'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', 'ԏ'), + (0x50F, 'V'), + (0x510, 'M', 'ԑ'), + (0x511, 'V'), + (0x512, 'M', 'ԓ'), + (0x513, 'V'), + (0x514, 'M', 'ԕ'), + (0x515, 'V'), + (0x516, 'M', 'ԗ'), + (0x517, 'V'), + (0x518, 'M', 'ԙ'), + (0x519, 'V'), + (0x51A, 'M', 'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', 'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', 'ԟ'), + (0x51F, 'V'), + (0x520, 'M', 'ԡ'), + (0x521, 'V'), + (0x522, 'M', 'ԣ'), + (0x523, 'V'), + (0x524, 'M', 'ԥ'), + (0x525, 'V'), + (0x526, 'M', 'ԧ'), + (0x527, 'V'), + (0x528, 'M', 'ԩ'), + (0x529, 'V'), + (0x52A, 'M', 'ԫ'), + (0x52B, 'V'), + (0x52C, 'M', 'ԭ'), + (0x52D, 'V'), + (0x52E, 'M', 'ԯ'), + (0x52F, 'V'), + (0x530, 'X'), + (0x531, 'M', 'ա'), + (0x532, 'M', 'բ'), + (0x533, 'M', 'գ'), + (0x534, 'M', 'դ'), + (0x535, 'M', 'ե'), + (0x536, 'M', 'զ'), + (0x537, 'M', 'է'), + (0x538, 'M', 'ը'), + (0x539, 'M', 'թ'), + (0x53A, 'M', 'ժ'), + (0x53B, 'M', 'ի'), + (0x53C, 'M', 'լ'), + (0x53D, 'M', 'խ'), + (0x53E, 'M', 'ծ'), + (0x53F, 'M', 'կ'), + (0x540, 'M', 'հ'), + (0x541, 'M', 'ձ'), + (0x542, 'M', 'ղ'), + (0x543, 'M', 'ճ'), + (0x544, 'M', 'մ'), + (0x545, 'M', 'յ'), + (0x546, 'M', 'ն'), + (0x547, 'M', 'շ'), + (0x548, 'M', 'ո'), + (0x549, 'M', 'չ'), + (0x54A, 'M', 'պ'), + (0x54B, 'M', 'ջ'), + (0x54C, 'M', 'ռ'), + (0x54D, 'M', 'ս'), + (0x54E, 'M', 'վ'), + (0x54F, 'M', 'տ'), + (0x550, 'M', 'ր'), + (0x551, 'M', 'ց'), + (0x552, 'M', 'ւ'), + (0x553, 'M', 'փ'), + (0x554, 'M', 'ք'), + (0x555, 'M', 'օ'), + (0x556, 'M', 'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x587, 'M', 'եւ'), + (0x588, 'V'), + (0x58B, 'X'), + (0x58D, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5EF, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61D, 'V'), + ] + +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x675, 'M', 'اٴ'), + (0x676, 'M', 'وٴ'), + (0x677, 'M', 'ۇٴ'), + (0x678, 'M', 'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x7FD, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), + (0x870, 'V'), + (0x88F, 'X'), + (0x898, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), + (0x958, 'M', 'क़'), + (0x959, 'M', 'ख़'), + (0x95A, 'M', 'ग़'), + (0x95B, 'M', 'ज़'), + (0x95C, 'M', 'ड़'), + (0x95D, 'M', 'ढ़'), + (0x95E, 'M', 'फ़'), + (0x95F, 'M', 'य़'), + (0x960, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', 'ড়'), + (0x9DD, 'M', 'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', 'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FF, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', 'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', 'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', 'ਖ਼'), + (0xA5A, 'M', 'ਗ਼'), + (0xA5B, 'M', 'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + ] + +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA5E, 'M', 'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA77, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB55, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', 'ଡ଼'), + (0xB5D, 'M', 'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC00, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + ] + +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC3A, 'X'), + (0xC3C, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5B, 'X'), + (0xC5D, 'V'), + (0xC5E, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC77, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDD, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF4, 'X'), + (0xD00, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD80, 'X'), + (0xD81, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', 'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE86, 'V'), + (0xE8B, 'X'), + (0xE8C, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEB3, 'M', 'ໍາ'), + (0xEB4, 'V'), + ] + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECF, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', 'ຫນ'), + (0xEDD, 'M', 'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', '་'), + (0xF0D, 'V'), + (0xF43, 'M', 'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', 'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', 'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', 'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', 'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', 'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', 'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', 'ཱུ'), + (0xF76, 'M', 'ྲྀ'), + (0xF77, 'M', 'ྲཱྀ'), + (0xF78, 'M', 'ླྀ'), + (0xF79, 'M', 'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', 'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', 'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', 'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', 'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', 'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', 'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', 'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', 'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', 'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', 'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + ] + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F6, 'X'), + (0x13F8, 'M', 'Ᏸ'), + (0x13F9, 'M', 'Ᏹ'), + (0x13FA, 'M', 'Ᏺ'), + (0x13FB, 'M', 'Ᏻ'), + (0x13FC, 'M', 'Ᏼ'), + (0x13FD, 'M', 'Ᏽ'), + (0x13FE, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F9, 'X'), + (0x1700, 'V'), + (0x1716, 'X'), + (0x171F, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x180F, 'I'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1879, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191F, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1ACF, 'X'), + (0x1B00, 'V'), + (0x1B4D, 'X'), + (0x1B50, 'V'), + (0x1B7F, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'M', 'в'), + ] + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1C81, 'M', 'д'), + (0x1C82, 'M', 'о'), + (0x1C83, 'M', 'с'), + (0x1C84, 'M', 'т'), + (0x1C86, 'M', 'ъ'), + (0x1C87, 'M', 'ѣ'), + (0x1C88, 'M', 'ꙋ'), + (0x1C89, 'X'), + (0x1C90, 'M', 'ა'), + (0x1C91, 'M', 'ბ'), + (0x1C92, 'M', 'გ'), + (0x1C93, 'M', 'დ'), + (0x1C94, 'M', 'ე'), + (0x1C95, 'M', 'ვ'), + (0x1C96, 'M', 'ზ'), + (0x1C97, 'M', 'თ'), + (0x1C98, 'M', 'ი'), + (0x1C99, 'M', 'კ'), + (0x1C9A, 'M', 'ლ'), + (0x1C9B, 'M', 'მ'), + (0x1C9C, 'M', 'ნ'), + (0x1C9D, 'M', 'ო'), + (0x1C9E, 'M', 'პ'), + (0x1C9F, 'M', 'ჟ'), + (0x1CA0, 'M', 'რ'), + (0x1CA1, 'M', 'ს'), + (0x1CA2, 'M', 'ტ'), + (0x1CA3, 'M', 'უ'), + (0x1CA4, 'M', 'ფ'), + (0x1CA5, 'M', 'ქ'), + (0x1CA6, 'M', 'ღ'), + (0x1CA7, 'M', 'ყ'), + (0x1CA8, 'M', 'შ'), + (0x1CA9, 'M', 'ჩ'), + (0x1CAA, 'M', 'ც'), + (0x1CAB, 'M', 'ძ'), + (0x1CAC, 'M', 'წ'), + (0x1CAD, 'M', 'ჭ'), + (0x1CAE, 'M', 'ხ'), + (0x1CAF, 'M', 'ჯ'), + (0x1CB0, 'M', 'ჰ'), + (0x1CB1, 'M', 'ჱ'), + (0x1CB2, 'M', 'ჲ'), + (0x1CB3, 'M', 'ჳ'), + (0x1CB4, 'M', 'ჴ'), + (0x1CB5, 'M', 'ჵ'), + (0x1CB6, 'M', 'ჶ'), + (0x1CB7, 'M', 'ჷ'), + (0x1CB8, 'M', 'ჸ'), + (0x1CB9, 'M', 'ჹ'), + (0x1CBA, 'M', 'ჺ'), + (0x1CBB, 'X'), + (0x1CBD, 'M', 'ჽ'), + (0x1CBE, 'M', 'ჾ'), + (0x1CBF, 'M', 'ჿ'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CFB, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', 'a'), + (0x1D2D, 'M', 'æ'), + (0x1D2E, 'M', 'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', 'd'), + (0x1D31, 'M', 'e'), + (0x1D32, 'M', 'ǝ'), + (0x1D33, 'M', 'g'), + (0x1D34, 'M', 'h'), + (0x1D35, 'M', 'i'), + (0x1D36, 'M', 'j'), + (0x1D37, 'M', 'k'), + (0x1D38, 'M', 'l'), + (0x1D39, 'M', 'm'), + (0x1D3A, 'M', 'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', 'o'), + (0x1D3D, 'M', 'ȣ'), + (0x1D3E, 'M', 'p'), + (0x1D3F, 'M', 'r'), + (0x1D40, 'M', 't'), + (0x1D41, 'M', 'u'), + (0x1D42, 'M', 'w'), + (0x1D43, 'M', 'a'), + (0x1D44, 'M', 'ɐ'), + (0x1D45, 'M', 'ɑ'), + (0x1D46, 'M', 'ᴂ'), + (0x1D47, 'M', 'b'), + (0x1D48, 'M', 'd'), + (0x1D49, 'M', 'e'), + (0x1D4A, 'M', 'ə'), + (0x1D4B, 'M', 'ɛ'), + (0x1D4C, 'M', 'ɜ'), + (0x1D4D, 'M', 'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', 'k'), + (0x1D50, 'M', 'm'), + (0x1D51, 'M', 'ŋ'), + (0x1D52, 'M', 'o'), + (0x1D53, 'M', 'ɔ'), + ] + +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D54, 'M', 'ᴖ'), + (0x1D55, 'M', 'ᴗ'), + (0x1D56, 'M', 'p'), + (0x1D57, 'M', 't'), + (0x1D58, 'M', 'u'), + (0x1D59, 'M', 'ᴝ'), + (0x1D5A, 'M', 'ɯ'), + (0x1D5B, 'M', 'v'), + (0x1D5C, 'M', 'ᴥ'), + (0x1D5D, 'M', 'β'), + (0x1D5E, 'M', 'γ'), + (0x1D5F, 'M', 'δ'), + (0x1D60, 'M', 'φ'), + (0x1D61, 'M', 'χ'), + (0x1D62, 'M', 'i'), + (0x1D63, 'M', 'r'), + (0x1D64, 'M', 'u'), + (0x1D65, 'M', 'v'), + (0x1D66, 'M', 'β'), + (0x1D67, 'M', 'γ'), + (0x1D68, 'M', 'ρ'), + (0x1D69, 'M', 'φ'), + (0x1D6A, 'M', 'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', 'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', 'ɒ'), + (0x1D9C, 'M', 'c'), + (0x1D9D, 'M', 'ɕ'), + (0x1D9E, 'M', 'ð'), + (0x1D9F, 'M', 'ɜ'), + (0x1DA0, 'M', 'f'), + (0x1DA1, 'M', 'ɟ'), + (0x1DA2, 'M', 'ɡ'), + (0x1DA3, 'M', 'ɥ'), + (0x1DA4, 'M', 'ɨ'), + (0x1DA5, 'M', 'ɩ'), + (0x1DA6, 'M', 'ɪ'), + (0x1DA7, 'M', 'ᵻ'), + (0x1DA8, 'M', 'ʝ'), + (0x1DA9, 'M', 'ɭ'), + (0x1DAA, 'M', 'ᶅ'), + (0x1DAB, 'M', 'ʟ'), + (0x1DAC, 'M', 'ɱ'), + (0x1DAD, 'M', 'ɰ'), + (0x1DAE, 'M', 'ɲ'), + (0x1DAF, 'M', 'ɳ'), + (0x1DB0, 'M', 'ɴ'), + (0x1DB1, 'M', 'ɵ'), + (0x1DB2, 'M', 'ɸ'), + (0x1DB3, 'M', 'ʂ'), + (0x1DB4, 'M', 'ʃ'), + (0x1DB5, 'M', 'ƫ'), + (0x1DB6, 'M', 'ʉ'), + (0x1DB7, 'M', 'ʊ'), + (0x1DB8, 'M', 'ᴜ'), + (0x1DB9, 'M', 'ʋ'), + (0x1DBA, 'M', 'ʌ'), + (0x1DBB, 'M', 'z'), + (0x1DBC, 'M', 'ʐ'), + (0x1DBD, 'M', 'ʑ'), + (0x1DBE, 'M', 'ʒ'), + (0x1DBF, 'M', 'θ'), + (0x1DC0, 'V'), + (0x1E00, 'M', 'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', 'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', 'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', 'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', 'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', 'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', 'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', 'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', 'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', 'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', 'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', 'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', 'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', 'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', 'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', 'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', 'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', 'ḣ'), + (0x1E23, 'V'), + ] + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E24, 'M', 'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', 'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', 'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', 'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', 'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', 'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', 'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', 'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', 'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', 'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', 'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', 'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', 'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', 'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', 'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', 'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', 'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', 'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', 'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', 'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', 'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', 'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', 'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', 'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', 'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', 'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', 'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', 'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', 'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', 'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', 'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', 'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', 'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', 'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', 'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', 'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', 'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', 'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', 'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', 'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', 'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', 'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', 'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', 'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', 'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', 'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', 'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', 'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', 'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', 'ẇ'), + (0x1E87, 'V'), + ] + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E88, 'M', 'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', 'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', 'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', 'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', 'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', 'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', 'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', 'aʾ'), + (0x1E9B, 'M', 'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', 'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', 'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', 'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', 'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', 'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', 'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', 'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', 'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', 'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', 'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', 'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', 'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', 'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', 'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', 'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', 'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', 'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', 'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', 'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', 'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', 'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', 'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', 'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', 'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', 'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', 'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', 'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', 'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', 'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', 'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', 'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', 'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', 'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', 'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', 'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', 'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', 'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', 'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', 'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', 'ử'), + (0x1EED, 'V'), + (0x1EEE, 'M', 'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', 'ự'), + ] + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EF1, 'V'), + (0x1EF2, 'M', 'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', 'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', 'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', 'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', 'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', 'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', 'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', 'ἀ'), + (0x1F09, 'M', 'ἁ'), + (0x1F0A, 'M', 'ἂ'), + (0x1F0B, 'M', 'ἃ'), + (0x1F0C, 'M', 'ἄ'), + (0x1F0D, 'M', 'ἅ'), + (0x1F0E, 'M', 'ἆ'), + (0x1F0F, 'M', 'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', 'ἐ'), + (0x1F19, 'M', 'ἑ'), + (0x1F1A, 'M', 'ἒ'), + (0x1F1B, 'M', 'ἓ'), + (0x1F1C, 'M', 'ἔ'), + (0x1F1D, 'M', 'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', 'ἠ'), + (0x1F29, 'M', 'ἡ'), + (0x1F2A, 'M', 'ἢ'), + (0x1F2B, 'M', 'ἣ'), + (0x1F2C, 'M', 'ἤ'), + (0x1F2D, 'M', 'ἥ'), + (0x1F2E, 'M', 'ἦ'), + (0x1F2F, 'M', 'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', 'ἰ'), + (0x1F39, 'M', 'ἱ'), + (0x1F3A, 'M', 'ἲ'), + (0x1F3B, 'M', 'ἳ'), + (0x1F3C, 'M', 'ἴ'), + (0x1F3D, 'M', 'ἵ'), + (0x1F3E, 'M', 'ἶ'), + (0x1F3F, 'M', 'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', 'ὀ'), + (0x1F49, 'M', 'ὁ'), + (0x1F4A, 'M', 'ὂ'), + (0x1F4B, 'M', 'ὃ'), + (0x1F4C, 'M', 'ὄ'), + (0x1F4D, 'M', 'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', 'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', 'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', 'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', 'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', 'ὠ'), + (0x1F69, 'M', 'ὡ'), + (0x1F6A, 'M', 'ὢ'), + (0x1F6B, 'M', 'ὣ'), + (0x1F6C, 'M', 'ὤ'), + (0x1F6D, 'M', 'ὥ'), + (0x1F6E, 'M', 'ὦ'), + (0x1F6F, 'M', 'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', 'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', 'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', 'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', 'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', 'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', 'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', 'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', 'ἀι'), + (0x1F81, 'M', 'ἁι'), + (0x1F82, 'M', 'ἂι'), + (0x1F83, 'M', 'ἃι'), + (0x1F84, 'M', 'ἄι'), + (0x1F85, 'M', 'ἅι'), + (0x1F86, 'M', 'ἆι'), + (0x1F87, 'M', 'ἇι'), + ] + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F88, 'M', 'ἀι'), + (0x1F89, 'M', 'ἁι'), + (0x1F8A, 'M', 'ἂι'), + (0x1F8B, 'M', 'ἃι'), + (0x1F8C, 'M', 'ἄι'), + (0x1F8D, 'M', 'ἅι'), + (0x1F8E, 'M', 'ἆι'), + (0x1F8F, 'M', 'ἇι'), + (0x1F90, 'M', 'ἠι'), + (0x1F91, 'M', 'ἡι'), + (0x1F92, 'M', 'ἢι'), + (0x1F93, 'M', 'ἣι'), + (0x1F94, 'M', 'ἤι'), + (0x1F95, 'M', 'ἥι'), + (0x1F96, 'M', 'ἦι'), + (0x1F97, 'M', 'ἧι'), + (0x1F98, 'M', 'ἠι'), + (0x1F99, 'M', 'ἡι'), + (0x1F9A, 'M', 'ἢι'), + (0x1F9B, 'M', 'ἣι'), + (0x1F9C, 'M', 'ἤι'), + (0x1F9D, 'M', 'ἥι'), + (0x1F9E, 'M', 'ἦι'), + (0x1F9F, 'M', 'ἧι'), + (0x1FA0, 'M', 'ὠι'), + (0x1FA1, 'M', 'ὡι'), + (0x1FA2, 'M', 'ὢι'), + (0x1FA3, 'M', 'ὣι'), + (0x1FA4, 'M', 'ὤι'), + (0x1FA5, 'M', 'ὥι'), + (0x1FA6, 'M', 'ὦι'), + (0x1FA7, 'M', 'ὧι'), + (0x1FA8, 'M', 'ὠι'), + (0x1FA9, 'M', 'ὡι'), + (0x1FAA, 'M', 'ὢι'), + (0x1FAB, 'M', 'ὣι'), + (0x1FAC, 'M', 'ὤι'), + (0x1FAD, 'M', 'ὥι'), + (0x1FAE, 'M', 'ὦι'), + (0x1FAF, 'M', 'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', 'ὰι'), + (0x1FB3, 'M', 'αι'), + (0x1FB4, 'M', 'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', 'ᾶι'), + (0x1FB8, 'M', 'ᾰ'), + (0x1FB9, 'M', 'ᾱ'), + (0x1FBA, 'M', 'ὰ'), + (0x1FBB, 'M', 'ά'), + (0x1FBC, 'M', 'αι'), + (0x1FBD, '3', ' ̓'), + (0x1FBE, 'M', 'ι'), + (0x1FBF, '3', ' ̓'), + (0x1FC0, '3', ' ͂'), + (0x1FC1, '3', ' ̈͂'), + (0x1FC2, 'M', 'ὴι'), + (0x1FC3, 'M', 'ηι'), + (0x1FC4, 'M', 'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', 'ῆι'), + (0x1FC8, 'M', 'ὲ'), + (0x1FC9, 'M', 'έ'), + (0x1FCA, 'M', 'ὴ'), + (0x1FCB, 'M', 'ή'), + (0x1FCC, 'M', 'ηι'), + (0x1FCD, '3', ' ̓̀'), + (0x1FCE, '3', ' ̓́'), + (0x1FCF, '3', ' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', 'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', 'ῐ'), + (0x1FD9, 'M', 'ῑ'), + (0x1FDA, 'M', 'ὶ'), + (0x1FDB, 'M', 'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', ' ̔̀'), + (0x1FDE, '3', ' ̔́'), + (0x1FDF, '3', ' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', 'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', 'ῠ'), + (0x1FE9, 'M', 'ῡ'), + (0x1FEA, 'M', 'ὺ'), + (0x1FEB, 'M', 'ύ'), + (0x1FEC, 'M', 'ῥ'), + (0x1FED, '3', ' ̈̀'), + (0x1FEE, '3', ' ̈́'), + (0x1FEF, '3', '`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', 'ὼι'), + (0x1FF3, 'M', 'ωι'), + (0x1FF4, 'M', 'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + ] + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FF7, 'M', 'ῶι'), + (0x1FF8, 'M', 'ὸ'), + (0x1FF9, 'M', 'ό'), + (0x1FFA, 'M', 'ὼ'), + (0x1FFB, 'M', 'ώ'), + (0x1FFC, 'M', 'ωι'), + (0x1FFD, '3', ' ́'), + (0x1FFE, '3', ' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', ' '), + (0x200B, 'I'), + (0x200C, 'D', ''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', '‐'), + (0x2012, 'V'), + (0x2017, '3', ' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', ' '), + (0x2030, 'V'), + (0x2033, 'M', '′′'), + (0x2034, 'M', '′′′'), + (0x2035, 'V'), + (0x2036, 'M', '‵‵'), + (0x2037, 'M', '‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', '!!'), + (0x203D, 'V'), + (0x203E, '3', ' ̅'), + (0x203F, 'V'), + (0x2047, '3', '??'), + (0x2048, '3', '?!'), + (0x2049, '3', '!?'), + (0x204A, 'V'), + (0x2057, 'M', '′′′′'), + (0x2058, 'V'), + (0x205F, '3', ' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', '0'), + (0x2071, 'M', 'i'), + (0x2072, 'X'), + (0x2074, 'M', '4'), + (0x2075, 'M', '5'), + (0x2076, 'M', '6'), + (0x2077, 'M', '7'), + (0x2078, 'M', '8'), + (0x2079, 'M', '9'), + (0x207A, '3', '+'), + (0x207B, 'M', '−'), + (0x207C, '3', '='), + (0x207D, '3', '('), + (0x207E, '3', ')'), + (0x207F, 'M', 'n'), + (0x2080, 'M', '0'), + (0x2081, 'M', '1'), + (0x2082, 'M', '2'), + (0x2083, 'M', '3'), + (0x2084, 'M', '4'), + (0x2085, 'M', '5'), + (0x2086, 'M', '6'), + (0x2087, 'M', '7'), + (0x2088, 'M', '8'), + (0x2089, 'M', '9'), + (0x208A, '3', '+'), + (0x208B, 'M', '−'), + (0x208C, '3', '='), + (0x208D, '3', '('), + (0x208E, '3', ')'), + (0x208F, 'X'), + (0x2090, 'M', 'a'), + (0x2091, 'M', 'e'), + (0x2092, 'M', 'o'), + (0x2093, 'M', 'x'), + (0x2094, 'M', 'ə'), + (0x2095, 'M', 'h'), + (0x2096, 'M', 'k'), + (0x2097, 'M', 'l'), + (0x2098, 'M', 'm'), + (0x2099, 'M', 'n'), + (0x209A, 'M', 'p'), + (0x209B, 'M', 's'), + (0x209C, 'M', 't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', 'rs'), + (0x20A9, 'V'), + (0x20C1, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', 'a/c'), + (0x2101, '3', 'a/s'), + (0x2102, 'M', 'c'), + (0x2103, 'M', '°c'), + (0x2104, 'V'), + ] + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2105, '3', 'c/o'), + (0x2106, '3', 'c/u'), + (0x2107, 'M', 'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', '°f'), + (0x210A, 'M', 'g'), + (0x210B, 'M', 'h'), + (0x210F, 'M', 'ħ'), + (0x2110, 'M', 'i'), + (0x2112, 'M', 'l'), + (0x2114, 'V'), + (0x2115, 'M', 'n'), + (0x2116, 'M', 'no'), + (0x2117, 'V'), + (0x2119, 'M', 'p'), + (0x211A, 'M', 'q'), + (0x211B, 'M', 'r'), + (0x211E, 'V'), + (0x2120, 'M', 'sm'), + (0x2121, 'M', 'tel'), + (0x2122, 'M', 'tm'), + (0x2123, 'V'), + (0x2124, 'M', 'z'), + (0x2125, 'V'), + (0x2126, 'M', 'ω'), + (0x2127, 'V'), + (0x2128, 'M', 'z'), + (0x2129, 'V'), + (0x212A, 'M', 'k'), + (0x212B, 'M', 'å'), + (0x212C, 'M', 'b'), + (0x212D, 'M', 'c'), + (0x212E, 'V'), + (0x212F, 'M', 'e'), + (0x2131, 'M', 'f'), + (0x2132, 'X'), + (0x2133, 'M', 'm'), + (0x2134, 'M', 'o'), + (0x2135, 'M', 'א'), + (0x2136, 'M', 'ב'), + (0x2137, 'M', 'ג'), + (0x2138, 'M', 'ד'), + (0x2139, 'M', 'i'), + (0x213A, 'V'), + (0x213B, 'M', 'fax'), + (0x213C, 'M', 'π'), + (0x213D, 'M', 'γ'), + (0x213F, 'M', 'π'), + (0x2140, 'M', '∑'), + (0x2141, 'V'), + (0x2145, 'M', 'd'), + (0x2147, 'M', 'e'), + (0x2148, 'M', 'i'), + (0x2149, 'M', 'j'), + (0x214A, 'V'), + (0x2150, 'M', '1⁄7'), + (0x2151, 'M', '1⁄9'), + (0x2152, 'M', '1⁄10'), + (0x2153, 'M', '1⁄3'), + (0x2154, 'M', '2⁄3'), + (0x2155, 'M', '1⁄5'), + (0x2156, 'M', '2⁄5'), + (0x2157, 'M', '3⁄5'), + (0x2158, 'M', '4⁄5'), + (0x2159, 'M', '1⁄6'), + (0x215A, 'M', '5⁄6'), + (0x215B, 'M', '1⁄8'), + (0x215C, 'M', '3⁄8'), + (0x215D, 'M', '5⁄8'), + (0x215E, 'M', '7⁄8'), + (0x215F, 'M', '1⁄'), + (0x2160, 'M', 'i'), + (0x2161, 'M', 'ii'), + (0x2162, 'M', 'iii'), + (0x2163, 'M', 'iv'), + (0x2164, 'M', 'v'), + (0x2165, 'M', 'vi'), + (0x2166, 'M', 'vii'), + (0x2167, 'M', 'viii'), + (0x2168, 'M', 'ix'), + (0x2169, 'M', 'x'), + (0x216A, 'M', 'xi'), + (0x216B, 'M', 'xii'), + (0x216C, 'M', 'l'), + (0x216D, 'M', 'c'), + (0x216E, 'M', 'd'), + (0x216F, 'M', 'm'), + (0x2170, 'M', 'i'), + (0x2171, 'M', 'ii'), + (0x2172, 'M', 'iii'), + (0x2173, 'M', 'iv'), + (0x2174, 'M', 'v'), + (0x2175, 'M', 'vi'), + (0x2176, 'M', 'vii'), + (0x2177, 'M', 'viii'), + (0x2178, 'M', 'ix'), + (0x2179, 'M', 'x'), + (0x217A, 'M', 'xi'), + (0x217B, 'M', 'xii'), + (0x217C, 'M', 'l'), + ] + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x217D, 'M', 'c'), + (0x217E, 'M', 'd'), + (0x217F, 'M', 'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', '0⁄3'), + (0x218A, 'V'), + (0x218C, 'X'), + (0x2190, 'V'), + (0x222C, 'M', '∫∫'), + (0x222D, 'M', '∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', '∮∮'), + (0x2230, 'M', '∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', '〈'), + (0x232A, 'M', '〉'), + (0x232B, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', '1'), + (0x2461, 'M', '2'), + (0x2462, 'M', '3'), + (0x2463, 'M', '4'), + (0x2464, 'M', '5'), + (0x2465, 'M', '6'), + (0x2466, 'M', '7'), + (0x2467, 'M', '8'), + (0x2468, 'M', '9'), + (0x2469, 'M', '10'), + (0x246A, 'M', '11'), + (0x246B, 'M', '12'), + (0x246C, 'M', '13'), + (0x246D, 'M', '14'), + (0x246E, 'M', '15'), + (0x246F, 'M', '16'), + (0x2470, 'M', '17'), + (0x2471, 'M', '18'), + (0x2472, 'M', '19'), + (0x2473, 'M', '20'), + (0x2474, '3', '(1)'), + (0x2475, '3', '(2)'), + (0x2476, '3', '(3)'), + (0x2477, '3', '(4)'), + (0x2478, '3', '(5)'), + (0x2479, '3', '(6)'), + (0x247A, '3', '(7)'), + (0x247B, '3', '(8)'), + (0x247C, '3', '(9)'), + (0x247D, '3', '(10)'), + (0x247E, '3', '(11)'), + (0x247F, '3', '(12)'), + (0x2480, '3', '(13)'), + (0x2481, '3', '(14)'), + (0x2482, '3', '(15)'), + (0x2483, '3', '(16)'), + (0x2484, '3', '(17)'), + (0x2485, '3', '(18)'), + (0x2486, '3', '(19)'), + (0x2487, '3', '(20)'), + (0x2488, 'X'), + (0x249C, '3', '(a)'), + (0x249D, '3', '(b)'), + (0x249E, '3', '(c)'), + (0x249F, '3', '(d)'), + (0x24A0, '3', '(e)'), + (0x24A1, '3', '(f)'), + (0x24A2, '3', '(g)'), + (0x24A3, '3', '(h)'), + (0x24A4, '3', '(i)'), + (0x24A5, '3', '(j)'), + (0x24A6, '3', '(k)'), + (0x24A7, '3', '(l)'), + (0x24A8, '3', '(m)'), + (0x24A9, '3', '(n)'), + (0x24AA, '3', '(o)'), + (0x24AB, '3', '(p)'), + (0x24AC, '3', '(q)'), + (0x24AD, '3', '(r)'), + (0x24AE, '3', '(s)'), + (0x24AF, '3', '(t)'), + (0x24B0, '3', '(u)'), + (0x24B1, '3', '(v)'), + (0x24B2, '3', '(w)'), + (0x24B3, '3', '(x)'), + (0x24B4, '3', '(y)'), + (0x24B5, '3', '(z)'), + (0x24B6, 'M', 'a'), + (0x24B7, 'M', 'b'), + (0x24B8, 'M', 'c'), + (0x24B9, 'M', 'd'), + (0x24BA, 'M', 'e'), + (0x24BB, 'M', 'f'), + (0x24BC, 'M', 'g'), + ] + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x24BD, 'M', 'h'), + (0x24BE, 'M', 'i'), + (0x24BF, 'M', 'j'), + (0x24C0, 'M', 'k'), + (0x24C1, 'M', 'l'), + (0x24C2, 'M', 'm'), + (0x24C3, 'M', 'n'), + (0x24C4, 'M', 'o'), + (0x24C5, 'M', 'p'), + (0x24C6, 'M', 'q'), + (0x24C7, 'M', 'r'), + (0x24C8, 'M', 's'), + (0x24C9, 'M', 't'), + (0x24CA, 'M', 'u'), + (0x24CB, 'M', 'v'), + (0x24CC, 'M', 'w'), + (0x24CD, 'M', 'x'), + (0x24CE, 'M', 'y'), + (0x24CF, 'M', 'z'), + (0x24D0, 'M', 'a'), + (0x24D1, 'M', 'b'), + (0x24D2, 'M', 'c'), + (0x24D3, 'M', 'd'), + (0x24D4, 'M', 'e'), + (0x24D5, 'M', 'f'), + (0x24D6, 'M', 'g'), + (0x24D7, 'M', 'h'), + (0x24D8, 'M', 'i'), + (0x24D9, 'M', 'j'), + (0x24DA, 'M', 'k'), + (0x24DB, 'M', 'l'), + (0x24DC, 'M', 'm'), + (0x24DD, 'M', 'n'), + (0x24DE, 'M', 'o'), + (0x24DF, 'M', 'p'), + (0x24E0, 'M', 'q'), + (0x24E1, 'M', 'r'), + (0x24E2, 'M', 's'), + (0x24E3, 'M', 't'), + (0x24E4, 'M', 'u'), + (0x24E5, 'M', 'v'), + (0x24E6, 'M', 'w'), + (0x24E7, 'M', 'x'), + (0x24E8, 'M', 'y'), + (0x24E9, 'M', 'z'), + (0x24EA, 'M', '0'), + (0x24EB, 'V'), + (0x2A0C, 'M', '∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', '::='), + (0x2A75, '3', '=='), + (0x2A76, '3', '==='), + (0x2A77, 'V'), + (0x2ADC, 'M', '⫝̸'), + (0x2ADD, 'V'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B97, 'V'), + (0x2C00, 'M', 'ⰰ'), + (0x2C01, 'M', 'ⰱ'), + (0x2C02, 'M', 'ⰲ'), + (0x2C03, 'M', 'ⰳ'), + (0x2C04, 'M', 'ⰴ'), + (0x2C05, 'M', 'ⰵ'), + (0x2C06, 'M', 'ⰶ'), + (0x2C07, 'M', 'ⰷ'), + (0x2C08, 'M', 'ⰸ'), + (0x2C09, 'M', 'ⰹ'), + (0x2C0A, 'M', 'ⰺ'), + (0x2C0B, 'M', 'ⰻ'), + (0x2C0C, 'M', 'ⰼ'), + (0x2C0D, 'M', 'ⰽ'), + (0x2C0E, 'M', 'ⰾ'), + (0x2C0F, 'M', 'ⰿ'), + (0x2C10, 'M', 'ⱀ'), + (0x2C11, 'M', 'ⱁ'), + (0x2C12, 'M', 'ⱂ'), + (0x2C13, 'M', 'ⱃ'), + (0x2C14, 'M', 'ⱄ'), + (0x2C15, 'M', 'ⱅ'), + (0x2C16, 'M', 'ⱆ'), + (0x2C17, 'M', 'ⱇ'), + (0x2C18, 'M', 'ⱈ'), + (0x2C19, 'M', 'ⱉ'), + (0x2C1A, 'M', 'ⱊ'), + (0x2C1B, 'M', 'ⱋ'), + (0x2C1C, 'M', 'ⱌ'), + (0x2C1D, 'M', 'ⱍ'), + (0x2C1E, 'M', 'ⱎ'), + (0x2C1F, 'M', 'ⱏ'), + (0x2C20, 'M', 'ⱐ'), + (0x2C21, 'M', 'ⱑ'), + (0x2C22, 'M', 'ⱒ'), + (0x2C23, 'M', 'ⱓ'), + (0x2C24, 'M', 'ⱔ'), + (0x2C25, 'M', 'ⱕ'), + (0x2C26, 'M', 'ⱖ'), + (0x2C27, 'M', 'ⱗ'), + (0x2C28, 'M', 'ⱘ'), + ] + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C29, 'M', 'ⱙ'), + (0x2C2A, 'M', 'ⱚ'), + (0x2C2B, 'M', 'ⱛ'), + (0x2C2C, 'M', 'ⱜ'), + (0x2C2D, 'M', 'ⱝ'), + (0x2C2E, 'M', 'ⱞ'), + (0x2C2F, 'M', 'ⱟ'), + (0x2C30, 'V'), + (0x2C60, 'M', 'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', 'ɫ'), + (0x2C63, 'M', 'ᵽ'), + (0x2C64, 'M', 'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', 'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', 'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', 'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', 'ɑ'), + (0x2C6E, 'M', 'ɱ'), + (0x2C6F, 'M', 'ɐ'), + (0x2C70, 'M', 'ɒ'), + (0x2C71, 'V'), + (0x2C72, 'M', 'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', 'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', 'j'), + (0x2C7D, 'M', 'v'), + (0x2C7E, 'M', 'ȿ'), + (0x2C7F, 'M', 'ɀ'), + (0x2C80, 'M', 'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', 'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', 'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', 'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', 'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', 'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', 'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', 'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', 'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', 'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', 'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', 'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', 'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', 'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', 'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', 'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', 'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', 'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', 'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', 'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', 'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', 'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', 'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', 'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', 'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', 'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', 'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', 'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', 'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', 'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', 'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', 'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', 'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', 'ⳃ'), + ] + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2CC3, 'V'), + (0x2CC4, 'M', 'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', 'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', 'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', 'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', 'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', 'ⳏ'), + (0x2CCF, 'V'), + (0x2CD0, 'M', 'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', 'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', 'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', 'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', 'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', 'ⳛ'), + (0x2CDB, 'V'), + (0x2CDC, 'M', 'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', 'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', 'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', 'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', 'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', 'ⳮ'), + (0x2CEE, 'V'), + (0x2CF2, 'M', 'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', 'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E5E, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', '母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', '龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', '一'), + (0x2F01, 'M', '丨'), + (0x2F02, 'M', '丶'), + (0x2F03, 'M', '丿'), + (0x2F04, 'M', '乙'), + (0x2F05, 'M', '亅'), + (0x2F06, 'M', '二'), + (0x2F07, 'M', '亠'), + (0x2F08, 'M', '人'), + (0x2F09, 'M', '儿'), + (0x2F0A, 'M', '入'), + (0x2F0B, 'M', '八'), + (0x2F0C, 'M', '冂'), + (0x2F0D, 'M', '冖'), + (0x2F0E, 'M', '冫'), + (0x2F0F, 'M', '几'), + (0x2F10, 'M', '凵'), + (0x2F11, 'M', '刀'), + (0x2F12, 'M', '力'), + (0x2F13, 'M', '勹'), + (0x2F14, 'M', '匕'), + (0x2F15, 'M', '匚'), + ] + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F16, 'M', '匸'), + (0x2F17, 'M', '十'), + (0x2F18, 'M', '卜'), + (0x2F19, 'M', '卩'), + (0x2F1A, 'M', '厂'), + (0x2F1B, 'M', '厶'), + (0x2F1C, 'M', '又'), + (0x2F1D, 'M', '口'), + (0x2F1E, 'M', '囗'), + (0x2F1F, 'M', '土'), + (0x2F20, 'M', '士'), + (0x2F21, 'M', '夂'), + (0x2F22, 'M', '夊'), + (0x2F23, 'M', '夕'), + (0x2F24, 'M', '大'), + (0x2F25, 'M', '女'), + (0x2F26, 'M', '子'), + (0x2F27, 'M', '宀'), + (0x2F28, 'M', '寸'), + (0x2F29, 'M', '小'), + (0x2F2A, 'M', '尢'), + (0x2F2B, 'M', '尸'), + (0x2F2C, 'M', '屮'), + (0x2F2D, 'M', '山'), + (0x2F2E, 'M', '巛'), + (0x2F2F, 'M', '工'), + (0x2F30, 'M', '己'), + (0x2F31, 'M', '巾'), + (0x2F32, 'M', '干'), + (0x2F33, 'M', '幺'), + (0x2F34, 'M', '广'), + (0x2F35, 'M', '廴'), + (0x2F36, 'M', '廾'), + (0x2F37, 'M', '弋'), + (0x2F38, 'M', '弓'), + (0x2F39, 'M', '彐'), + (0x2F3A, 'M', '彡'), + (0x2F3B, 'M', '彳'), + (0x2F3C, 'M', '心'), + (0x2F3D, 'M', '戈'), + (0x2F3E, 'M', '戶'), + (0x2F3F, 'M', '手'), + (0x2F40, 'M', '支'), + (0x2F41, 'M', '攴'), + (0x2F42, 'M', '文'), + (0x2F43, 'M', '斗'), + (0x2F44, 'M', '斤'), + (0x2F45, 'M', '方'), + (0x2F46, 'M', '无'), + (0x2F47, 'M', '日'), + (0x2F48, 'M', '曰'), + (0x2F49, 'M', '月'), + (0x2F4A, 'M', '木'), + (0x2F4B, 'M', '欠'), + (0x2F4C, 'M', '止'), + (0x2F4D, 'M', '歹'), + (0x2F4E, 'M', '殳'), + (0x2F4F, 'M', '毋'), + (0x2F50, 'M', '比'), + (0x2F51, 'M', '毛'), + (0x2F52, 'M', '氏'), + (0x2F53, 'M', '气'), + (0x2F54, 'M', '水'), + (0x2F55, 'M', '火'), + (0x2F56, 'M', '爪'), + (0x2F57, 'M', '父'), + (0x2F58, 'M', '爻'), + (0x2F59, 'M', '爿'), + (0x2F5A, 'M', '片'), + (0x2F5B, 'M', '牙'), + (0x2F5C, 'M', '牛'), + (0x2F5D, 'M', '犬'), + (0x2F5E, 'M', '玄'), + (0x2F5F, 'M', '玉'), + (0x2F60, 'M', '瓜'), + (0x2F61, 'M', '瓦'), + (0x2F62, 'M', '甘'), + (0x2F63, 'M', '生'), + (0x2F64, 'M', '用'), + (0x2F65, 'M', '田'), + (0x2F66, 'M', '疋'), + (0x2F67, 'M', '疒'), + (0x2F68, 'M', '癶'), + (0x2F69, 'M', '白'), + (0x2F6A, 'M', '皮'), + (0x2F6B, 'M', '皿'), + (0x2F6C, 'M', '目'), + (0x2F6D, 'M', '矛'), + (0x2F6E, 'M', '矢'), + (0x2F6F, 'M', '石'), + (0x2F70, 'M', '示'), + (0x2F71, 'M', '禸'), + (0x2F72, 'M', '禾'), + (0x2F73, 'M', '穴'), + (0x2F74, 'M', '立'), + (0x2F75, 'M', '竹'), + (0x2F76, 'M', '米'), + (0x2F77, 'M', '糸'), + (0x2F78, 'M', '缶'), + (0x2F79, 'M', '网'), + ] + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F7A, 'M', '羊'), + (0x2F7B, 'M', '羽'), + (0x2F7C, 'M', '老'), + (0x2F7D, 'M', '而'), + (0x2F7E, 'M', '耒'), + (0x2F7F, 'M', '耳'), + (0x2F80, 'M', '聿'), + (0x2F81, 'M', '肉'), + (0x2F82, 'M', '臣'), + (0x2F83, 'M', '自'), + (0x2F84, 'M', '至'), + (0x2F85, 'M', '臼'), + (0x2F86, 'M', '舌'), + (0x2F87, 'M', '舛'), + (0x2F88, 'M', '舟'), + (0x2F89, 'M', '艮'), + (0x2F8A, 'M', '色'), + (0x2F8B, 'M', '艸'), + (0x2F8C, 'M', '虍'), + (0x2F8D, 'M', '虫'), + (0x2F8E, 'M', '血'), + (0x2F8F, 'M', '行'), + (0x2F90, 'M', '衣'), + (0x2F91, 'M', '襾'), + (0x2F92, 'M', '見'), + (0x2F93, 'M', '角'), + (0x2F94, 'M', '言'), + (0x2F95, 'M', '谷'), + (0x2F96, 'M', '豆'), + (0x2F97, 'M', '豕'), + (0x2F98, 'M', '豸'), + (0x2F99, 'M', '貝'), + (0x2F9A, 'M', '赤'), + (0x2F9B, 'M', '走'), + (0x2F9C, 'M', '足'), + (0x2F9D, 'M', '身'), + (0x2F9E, 'M', '車'), + (0x2F9F, 'M', '辛'), + (0x2FA0, 'M', '辰'), + (0x2FA1, 'M', '辵'), + (0x2FA2, 'M', '邑'), + (0x2FA3, 'M', '酉'), + (0x2FA4, 'M', '釆'), + (0x2FA5, 'M', '里'), + (0x2FA6, 'M', '金'), + (0x2FA7, 'M', '長'), + (0x2FA8, 'M', '門'), + (0x2FA9, 'M', '阜'), + (0x2FAA, 'M', '隶'), + (0x2FAB, 'M', '隹'), + (0x2FAC, 'M', '雨'), + (0x2FAD, 'M', '靑'), + (0x2FAE, 'M', '非'), + (0x2FAF, 'M', '面'), + (0x2FB0, 'M', '革'), + (0x2FB1, 'M', '韋'), + (0x2FB2, 'M', '韭'), + (0x2FB3, 'M', '音'), + (0x2FB4, 'M', '頁'), + (0x2FB5, 'M', '風'), + (0x2FB6, 'M', '飛'), + (0x2FB7, 'M', '食'), + (0x2FB8, 'M', '首'), + (0x2FB9, 'M', '香'), + (0x2FBA, 'M', '馬'), + (0x2FBB, 'M', '骨'), + (0x2FBC, 'M', '高'), + (0x2FBD, 'M', '髟'), + (0x2FBE, 'M', '鬥'), + (0x2FBF, 'M', '鬯'), + (0x2FC0, 'M', '鬲'), + (0x2FC1, 'M', '鬼'), + (0x2FC2, 'M', '魚'), + (0x2FC3, 'M', '鳥'), + (0x2FC4, 'M', '鹵'), + (0x2FC5, 'M', '鹿'), + (0x2FC6, 'M', '麥'), + (0x2FC7, 'M', '麻'), + (0x2FC8, 'M', '黃'), + (0x2FC9, 'M', '黍'), + (0x2FCA, 'M', '黑'), + (0x2FCB, 'M', '黹'), + (0x2FCC, 'M', '黽'), + (0x2FCD, 'M', '鼎'), + (0x2FCE, 'M', '鼓'), + (0x2FCF, 'M', '鼠'), + (0x2FD0, 'M', '鼻'), + (0x2FD1, 'M', '齊'), + (0x2FD2, 'M', '齒'), + (0x2FD3, 'M', '龍'), + (0x2FD4, 'M', '龜'), + (0x2FD5, 'M', '龠'), + (0x2FD6, 'X'), + (0x3000, '3', ' '), + (0x3001, 'V'), + (0x3002, 'M', '.'), + (0x3003, 'V'), + (0x3036, 'M', '〒'), + (0x3037, 'V'), + (0x3038, 'M', '十'), + ] + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3039, 'M', '卄'), + (0x303A, 'M', '卅'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', ' ゙'), + (0x309C, '3', ' ゚'), + (0x309D, 'V'), + (0x309F, 'M', 'より'), + (0x30A0, 'V'), + (0x30FF, 'M', 'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x3130, 'X'), + (0x3131, 'M', 'ᄀ'), + (0x3132, 'M', 'ᄁ'), + (0x3133, 'M', 'ᆪ'), + (0x3134, 'M', 'ᄂ'), + (0x3135, 'M', 'ᆬ'), + (0x3136, 'M', 'ᆭ'), + (0x3137, 'M', 'ᄃ'), + (0x3138, 'M', 'ᄄ'), + (0x3139, 'M', 'ᄅ'), + (0x313A, 'M', 'ᆰ'), + (0x313B, 'M', 'ᆱ'), + (0x313C, 'M', 'ᆲ'), + (0x313D, 'M', 'ᆳ'), + (0x313E, 'M', 'ᆴ'), + (0x313F, 'M', 'ᆵ'), + (0x3140, 'M', 'ᄚ'), + (0x3141, 'M', 'ᄆ'), + (0x3142, 'M', 'ᄇ'), + (0x3143, 'M', 'ᄈ'), + (0x3144, 'M', 'ᄡ'), + (0x3145, 'M', 'ᄉ'), + (0x3146, 'M', 'ᄊ'), + (0x3147, 'M', 'ᄋ'), + (0x3148, 'M', 'ᄌ'), + (0x3149, 'M', 'ᄍ'), + (0x314A, 'M', 'ᄎ'), + (0x314B, 'M', 'ᄏ'), + (0x314C, 'M', 'ᄐ'), + (0x314D, 'M', 'ᄑ'), + (0x314E, 'M', 'ᄒ'), + (0x314F, 'M', 'ᅡ'), + (0x3150, 'M', 'ᅢ'), + (0x3151, 'M', 'ᅣ'), + (0x3152, 'M', 'ᅤ'), + (0x3153, 'M', 'ᅥ'), + (0x3154, 'M', 'ᅦ'), + (0x3155, 'M', 'ᅧ'), + (0x3156, 'M', 'ᅨ'), + (0x3157, 'M', 'ᅩ'), + (0x3158, 'M', 'ᅪ'), + (0x3159, 'M', 'ᅫ'), + (0x315A, 'M', 'ᅬ'), + (0x315B, 'M', 'ᅭ'), + (0x315C, 'M', 'ᅮ'), + (0x315D, 'M', 'ᅯ'), + (0x315E, 'M', 'ᅰ'), + (0x315F, 'M', 'ᅱ'), + (0x3160, 'M', 'ᅲ'), + (0x3161, 'M', 'ᅳ'), + (0x3162, 'M', 'ᅴ'), + (0x3163, 'M', 'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', 'ᄔ'), + (0x3166, 'M', 'ᄕ'), + (0x3167, 'M', 'ᇇ'), + (0x3168, 'M', 'ᇈ'), + (0x3169, 'M', 'ᇌ'), + (0x316A, 'M', 'ᇎ'), + (0x316B, 'M', 'ᇓ'), + (0x316C, 'M', 'ᇗ'), + (0x316D, 'M', 'ᇙ'), + (0x316E, 'M', 'ᄜ'), + (0x316F, 'M', 'ᇝ'), + (0x3170, 'M', 'ᇟ'), + (0x3171, 'M', 'ᄝ'), + (0x3172, 'M', 'ᄞ'), + (0x3173, 'M', 'ᄠ'), + (0x3174, 'M', 'ᄢ'), + (0x3175, 'M', 'ᄣ'), + (0x3176, 'M', 'ᄧ'), + (0x3177, 'M', 'ᄩ'), + (0x3178, 'M', 'ᄫ'), + (0x3179, 'M', 'ᄬ'), + (0x317A, 'M', 'ᄭ'), + (0x317B, 'M', 'ᄮ'), + (0x317C, 'M', 'ᄯ'), + (0x317D, 'M', 'ᄲ'), + (0x317E, 'M', 'ᄶ'), + (0x317F, 'M', 'ᅀ'), + (0x3180, 'M', 'ᅇ'), + (0x3181, 'M', 'ᅌ'), + (0x3182, 'M', 'ᇱ'), + (0x3183, 'M', 'ᇲ'), + (0x3184, 'M', 'ᅗ'), + ] + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3185, 'M', 'ᅘ'), + (0x3186, 'M', 'ᅙ'), + (0x3187, 'M', 'ᆄ'), + (0x3188, 'M', 'ᆅ'), + (0x3189, 'M', 'ᆈ'), + (0x318A, 'M', 'ᆑ'), + (0x318B, 'M', 'ᆒ'), + (0x318C, 'M', 'ᆔ'), + (0x318D, 'M', 'ᆞ'), + (0x318E, 'M', 'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', '一'), + (0x3193, 'M', '二'), + (0x3194, 'M', '三'), + (0x3195, 'M', '四'), + (0x3196, 'M', '上'), + (0x3197, 'M', '中'), + (0x3198, 'M', '下'), + (0x3199, 'M', '甲'), + (0x319A, 'M', '乙'), + (0x319B, 'M', '丙'), + (0x319C, 'M', '丁'), + (0x319D, 'M', '天'), + (0x319E, 'M', '地'), + (0x319F, 'M', '人'), + (0x31A0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', '(ᄀ)'), + (0x3201, '3', '(ᄂ)'), + (0x3202, '3', '(ᄃ)'), + (0x3203, '3', '(ᄅ)'), + (0x3204, '3', '(ᄆ)'), + (0x3205, '3', '(ᄇ)'), + (0x3206, '3', '(ᄉ)'), + (0x3207, '3', '(ᄋ)'), + (0x3208, '3', '(ᄌ)'), + (0x3209, '3', '(ᄎ)'), + (0x320A, '3', '(ᄏ)'), + (0x320B, '3', '(ᄐ)'), + (0x320C, '3', '(ᄑ)'), + (0x320D, '3', '(ᄒ)'), + (0x320E, '3', '(가)'), + (0x320F, '3', '(나)'), + (0x3210, '3', '(다)'), + (0x3211, '3', '(라)'), + (0x3212, '3', '(마)'), + (0x3213, '3', '(바)'), + (0x3214, '3', '(사)'), + (0x3215, '3', '(아)'), + (0x3216, '3', '(자)'), + (0x3217, '3', '(차)'), + (0x3218, '3', '(카)'), + (0x3219, '3', '(타)'), + (0x321A, '3', '(파)'), + (0x321B, '3', '(하)'), + (0x321C, '3', '(주)'), + (0x321D, '3', '(오전)'), + (0x321E, '3', '(오후)'), + (0x321F, 'X'), + (0x3220, '3', '(一)'), + (0x3221, '3', '(二)'), + (0x3222, '3', '(三)'), + (0x3223, '3', '(四)'), + (0x3224, '3', '(五)'), + (0x3225, '3', '(六)'), + (0x3226, '3', '(七)'), + (0x3227, '3', '(八)'), + (0x3228, '3', '(九)'), + (0x3229, '3', '(十)'), + (0x322A, '3', '(月)'), + (0x322B, '3', '(火)'), + (0x322C, '3', '(水)'), + (0x322D, '3', '(木)'), + (0x322E, '3', '(金)'), + (0x322F, '3', '(土)'), + (0x3230, '3', '(日)'), + (0x3231, '3', '(株)'), + (0x3232, '3', '(有)'), + (0x3233, '3', '(社)'), + (0x3234, '3', '(名)'), + (0x3235, '3', '(特)'), + (0x3236, '3', '(財)'), + (0x3237, '3', '(祝)'), + (0x3238, '3', '(労)'), + (0x3239, '3', '(代)'), + (0x323A, '3', '(呼)'), + (0x323B, '3', '(学)'), + (0x323C, '3', '(監)'), + (0x323D, '3', '(企)'), + (0x323E, '3', '(資)'), + (0x323F, '3', '(協)'), + (0x3240, '3', '(祭)'), + (0x3241, '3', '(休)'), + (0x3242, '3', '(自)'), + (0x3243, '3', '(至)'), + (0x3244, 'M', '問'), + (0x3245, 'M', '幼'), + (0x3246, 'M', '文'), + ] + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3247, 'M', '箏'), + (0x3248, 'V'), + (0x3250, 'M', 'pte'), + (0x3251, 'M', '21'), + (0x3252, 'M', '22'), + (0x3253, 'M', '23'), + (0x3254, 'M', '24'), + (0x3255, 'M', '25'), + (0x3256, 'M', '26'), + (0x3257, 'M', '27'), + (0x3258, 'M', '28'), + (0x3259, 'M', '29'), + (0x325A, 'M', '30'), + (0x325B, 'M', '31'), + (0x325C, 'M', '32'), + (0x325D, 'M', '33'), + (0x325E, 'M', '34'), + (0x325F, 'M', '35'), + (0x3260, 'M', 'ᄀ'), + (0x3261, 'M', 'ᄂ'), + (0x3262, 'M', 'ᄃ'), + (0x3263, 'M', 'ᄅ'), + (0x3264, 'M', 'ᄆ'), + (0x3265, 'M', 'ᄇ'), + (0x3266, 'M', 'ᄉ'), + (0x3267, 'M', 'ᄋ'), + (0x3268, 'M', 'ᄌ'), + (0x3269, 'M', 'ᄎ'), + (0x326A, 'M', 'ᄏ'), + (0x326B, 'M', 'ᄐ'), + (0x326C, 'M', 'ᄑ'), + (0x326D, 'M', 'ᄒ'), + (0x326E, 'M', '가'), + (0x326F, 'M', '나'), + (0x3270, 'M', '다'), + (0x3271, 'M', '라'), + (0x3272, 'M', '마'), + (0x3273, 'M', '바'), + (0x3274, 'M', '사'), + (0x3275, 'M', '아'), + (0x3276, 'M', '자'), + (0x3277, 'M', '차'), + (0x3278, 'M', '카'), + (0x3279, 'M', '타'), + (0x327A, 'M', '파'), + (0x327B, 'M', '하'), + (0x327C, 'M', '참고'), + (0x327D, 'M', '주의'), + (0x327E, 'M', '우'), + (0x327F, 'V'), + (0x3280, 'M', '一'), + (0x3281, 'M', '二'), + (0x3282, 'M', '三'), + (0x3283, 'M', '四'), + (0x3284, 'M', '五'), + (0x3285, 'M', '六'), + (0x3286, 'M', '七'), + (0x3287, 'M', '八'), + (0x3288, 'M', '九'), + (0x3289, 'M', '十'), + (0x328A, 'M', '月'), + (0x328B, 'M', '火'), + (0x328C, 'M', '水'), + (0x328D, 'M', '木'), + (0x328E, 'M', '金'), + (0x328F, 'M', '土'), + (0x3290, 'M', '日'), + (0x3291, 'M', '株'), + (0x3292, 'M', '有'), + (0x3293, 'M', '社'), + (0x3294, 'M', '名'), + (0x3295, 'M', '特'), + (0x3296, 'M', '財'), + (0x3297, 'M', '祝'), + (0x3298, 'M', '労'), + (0x3299, 'M', '秘'), + (0x329A, 'M', '男'), + (0x329B, 'M', '女'), + (0x329C, 'M', '適'), + (0x329D, 'M', '優'), + (0x329E, 'M', '印'), + (0x329F, 'M', '注'), + (0x32A0, 'M', '項'), + (0x32A1, 'M', '休'), + (0x32A2, 'M', '写'), + (0x32A3, 'M', '正'), + (0x32A4, 'M', '上'), + (0x32A5, 'M', '中'), + (0x32A6, 'M', '下'), + (0x32A7, 'M', '左'), + (0x32A8, 'M', '右'), + (0x32A9, 'M', '医'), + (0x32AA, 'M', '宗'), + (0x32AB, 'M', '学'), + (0x32AC, 'M', '監'), + (0x32AD, 'M', '企'), + (0x32AE, 'M', '資'), + (0x32AF, 'M', '協'), + (0x32B0, 'M', '夜'), + (0x32B1, 'M', '36'), + ] + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x32B2, 'M', '37'), + (0x32B3, 'M', '38'), + (0x32B4, 'M', '39'), + (0x32B5, 'M', '40'), + (0x32B6, 'M', '41'), + (0x32B7, 'M', '42'), + (0x32B8, 'M', '43'), + (0x32B9, 'M', '44'), + (0x32BA, 'M', '45'), + (0x32BB, 'M', '46'), + (0x32BC, 'M', '47'), + (0x32BD, 'M', '48'), + (0x32BE, 'M', '49'), + (0x32BF, 'M', '50'), + (0x32C0, 'M', '1月'), + (0x32C1, 'M', '2月'), + (0x32C2, 'M', '3月'), + (0x32C3, 'M', '4月'), + (0x32C4, 'M', '5月'), + (0x32C5, 'M', '6月'), + (0x32C6, 'M', '7月'), + (0x32C7, 'M', '8月'), + (0x32C8, 'M', '9月'), + (0x32C9, 'M', '10月'), + (0x32CA, 'M', '11月'), + (0x32CB, 'M', '12月'), + (0x32CC, 'M', 'hg'), + (0x32CD, 'M', 'erg'), + (0x32CE, 'M', 'ev'), + (0x32CF, 'M', 'ltd'), + (0x32D0, 'M', 'ア'), + (0x32D1, 'M', 'イ'), + (0x32D2, 'M', 'ウ'), + (0x32D3, 'M', 'エ'), + (0x32D4, 'M', 'オ'), + (0x32D5, 'M', 'カ'), + (0x32D6, 'M', 'キ'), + (0x32D7, 'M', 'ク'), + (0x32D8, 'M', 'ケ'), + (0x32D9, 'M', 'コ'), + (0x32DA, 'M', 'サ'), + (0x32DB, 'M', 'シ'), + (0x32DC, 'M', 'ス'), + (0x32DD, 'M', 'セ'), + (0x32DE, 'M', 'ソ'), + (0x32DF, 'M', 'タ'), + (0x32E0, 'M', 'チ'), + (0x32E1, 'M', 'ツ'), + (0x32E2, 'M', 'テ'), + (0x32E3, 'M', 'ト'), + (0x32E4, 'M', 'ナ'), + (0x32E5, 'M', 'ニ'), + (0x32E6, 'M', 'ヌ'), + (0x32E7, 'M', 'ネ'), + (0x32E8, 'M', 'ノ'), + (0x32E9, 'M', 'ハ'), + (0x32EA, 'M', 'ヒ'), + (0x32EB, 'M', 'フ'), + (0x32EC, 'M', 'ヘ'), + (0x32ED, 'M', 'ホ'), + (0x32EE, 'M', 'マ'), + (0x32EF, 'M', 'ミ'), + (0x32F0, 'M', 'ム'), + (0x32F1, 'M', 'メ'), + (0x32F2, 'M', 'モ'), + (0x32F3, 'M', 'ヤ'), + (0x32F4, 'M', 'ユ'), + (0x32F5, 'M', 'ヨ'), + (0x32F6, 'M', 'ラ'), + (0x32F7, 'M', 'リ'), + (0x32F8, 'M', 'ル'), + (0x32F9, 'M', 'レ'), + (0x32FA, 'M', 'ロ'), + (0x32FB, 'M', 'ワ'), + (0x32FC, 'M', 'ヰ'), + (0x32FD, 'M', 'ヱ'), + (0x32FE, 'M', 'ヲ'), + (0x32FF, 'M', '令和'), + (0x3300, 'M', 'アパート'), + (0x3301, 'M', 'アルファ'), + (0x3302, 'M', 'アンペア'), + (0x3303, 'M', 'アール'), + (0x3304, 'M', 'イニング'), + (0x3305, 'M', 'インチ'), + (0x3306, 'M', 'ウォン'), + (0x3307, 'M', 'エスクード'), + (0x3308, 'M', 'エーカー'), + (0x3309, 'M', 'オンス'), + (0x330A, 'M', 'オーム'), + (0x330B, 'M', 'カイリ'), + (0x330C, 'M', 'カラット'), + (0x330D, 'M', 'カロリー'), + (0x330E, 'M', 'ガロン'), + (0x330F, 'M', 'ガンマ'), + (0x3310, 'M', 'ギガ'), + (0x3311, 'M', 'ギニー'), + (0x3312, 'M', 'キュリー'), + (0x3313, 'M', 'ギルダー'), + (0x3314, 'M', 'キロ'), + (0x3315, 'M', 'キログラム'), + ] + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3316, 'M', 'キロメートル'), + (0x3317, 'M', 'キロワット'), + (0x3318, 'M', 'グラム'), + (0x3319, 'M', 'グラムトン'), + (0x331A, 'M', 'クルゼイロ'), + (0x331B, 'M', 'クローネ'), + (0x331C, 'M', 'ケース'), + (0x331D, 'M', 'コルナ'), + (0x331E, 'M', 'コーポ'), + (0x331F, 'M', 'サイクル'), + (0x3320, 'M', 'サンチーム'), + (0x3321, 'M', 'シリング'), + (0x3322, 'M', 'センチ'), + (0x3323, 'M', 'セント'), + (0x3324, 'M', 'ダース'), + (0x3325, 'M', 'デシ'), + (0x3326, 'M', 'ドル'), + (0x3327, 'M', 'トン'), + (0x3328, 'M', 'ナノ'), + (0x3329, 'M', 'ノット'), + (0x332A, 'M', 'ハイツ'), + (0x332B, 'M', 'パーセント'), + (0x332C, 'M', 'パーツ'), + (0x332D, 'M', 'バーレル'), + (0x332E, 'M', 'ピアストル'), + (0x332F, 'M', 'ピクル'), + (0x3330, 'M', 'ピコ'), + (0x3331, 'M', 'ビル'), + (0x3332, 'M', 'ファラッド'), + (0x3333, 'M', 'フィート'), + (0x3334, 'M', 'ブッシェル'), + (0x3335, 'M', 'フラン'), + (0x3336, 'M', 'ヘクタール'), + (0x3337, 'M', 'ペソ'), + (0x3338, 'M', 'ペニヒ'), + (0x3339, 'M', 'ヘルツ'), + (0x333A, 'M', 'ペンス'), + (0x333B, 'M', 'ページ'), + (0x333C, 'M', 'ベータ'), + (0x333D, 'M', 'ポイント'), + (0x333E, 'M', 'ボルト'), + (0x333F, 'M', 'ホン'), + (0x3340, 'M', 'ポンド'), + (0x3341, 'M', 'ホール'), + (0x3342, 'M', 'ホーン'), + (0x3343, 'M', 'マイクロ'), + (0x3344, 'M', 'マイル'), + (0x3345, 'M', 'マッハ'), + (0x3346, 'M', 'マルク'), + (0x3347, 'M', 'マンション'), + (0x3348, 'M', 'ミクロン'), + (0x3349, 'M', 'ミリ'), + (0x334A, 'M', 'ミリバール'), + (0x334B, 'M', 'メガ'), + (0x334C, 'M', 'メガトン'), + (0x334D, 'M', 'メートル'), + (0x334E, 'M', 'ヤード'), + (0x334F, 'M', 'ヤール'), + (0x3350, 'M', 'ユアン'), + (0x3351, 'M', 'リットル'), + (0x3352, 'M', 'リラ'), + (0x3353, 'M', 'ルピー'), + (0x3354, 'M', 'ルーブル'), + (0x3355, 'M', 'レム'), + (0x3356, 'M', 'レントゲン'), + (0x3357, 'M', 'ワット'), + (0x3358, 'M', '0点'), + (0x3359, 'M', '1点'), + (0x335A, 'M', '2点'), + (0x335B, 'M', '3点'), + (0x335C, 'M', '4点'), + (0x335D, 'M', '5点'), + (0x335E, 'M', '6点'), + (0x335F, 'M', '7点'), + (0x3360, 'M', '8点'), + (0x3361, 'M', '9点'), + (0x3362, 'M', '10点'), + (0x3363, 'M', '11点'), + (0x3364, 'M', '12点'), + (0x3365, 'M', '13点'), + (0x3366, 'M', '14点'), + (0x3367, 'M', '15点'), + (0x3368, 'M', '16点'), + (0x3369, 'M', '17点'), + (0x336A, 'M', '18点'), + (0x336B, 'M', '19点'), + (0x336C, 'M', '20点'), + (0x336D, 'M', '21点'), + (0x336E, 'M', '22点'), + (0x336F, 'M', '23点'), + (0x3370, 'M', '24点'), + (0x3371, 'M', 'hpa'), + (0x3372, 'M', 'da'), + (0x3373, 'M', 'au'), + (0x3374, 'M', 'bar'), + (0x3375, 'M', 'ov'), + (0x3376, 'M', 'pc'), + (0x3377, 'M', 'dm'), + (0x3378, 'M', 'dm2'), + (0x3379, 'M', 'dm3'), + ] + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x337A, 'M', 'iu'), + (0x337B, 'M', '平成'), + (0x337C, 'M', '昭和'), + (0x337D, 'M', '大正'), + (0x337E, 'M', '明治'), + (0x337F, 'M', '株式会社'), + (0x3380, 'M', 'pa'), + (0x3381, 'M', 'na'), + (0x3382, 'M', 'μa'), + (0x3383, 'M', 'ma'), + (0x3384, 'M', 'ka'), + (0x3385, 'M', 'kb'), + (0x3386, 'M', 'mb'), + (0x3387, 'M', 'gb'), + (0x3388, 'M', 'cal'), + (0x3389, 'M', 'kcal'), + (0x338A, 'M', 'pf'), + (0x338B, 'M', 'nf'), + (0x338C, 'M', 'μf'), + (0x338D, 'M', 'μg'), + (0x338E, 'M', 'mg'), + (0x338F, 'M', 'kg'), + (0x3390, 'M', 'hz'), + (0x3391, 'M', 'khz'), + (0x3392, 'M', 'mhz'), + (0x3393, 'M', 'ghz'), + (0x3394, 'M', 'thz'), + (0x3395, 'M', 'μl'), + (0x3396, 'M', 'ml'), + (0x3397, 'M', 'dl'), + (0x3398, 'M', 'kl'), + (0x3399, 'M', 'fm'), + (0x339A, 'M', 'nm'), + (0x339B, 'M', 'μm'), + (0x339C, 'M', 'mm'), + (0x339D, 'M', 'cm'), + (0x339E, 'M', 'km'), + (0x339F, 'M', 'mm2'), + (0x33A0, 'M', 'cm2'), + (0x33A1, 'M', 'm2'), + (0x33A2, 'M', 'km2'), + (0x33A3, 'M', 'mm3'), + (0x33A4, 'M', 'cm3'), + (0x33A5, 'M', 'm3'), + (0x33A6, 'M', 'km3'), + (0x33A7, 'M', 'm∕s'), + (0x33A8, 'M', 'm∕s2'), + (0x33A9, 'M', 'pa'), + (0x33AA, 'M', 'kpa'), + (0x33AB, 'M', 'mpa'), + (0x33AC, 'M', 'gpa'), + (0x33AD, 'M', 'rad'), + (0x33AE, 'M', 'rad∕s'), + (0x33AF, 'M', 'rad∕s2'), + (0x33B0, 'M', 'ps'), + (0x33B1, 'M', 'ns'), + (0x33B2, 'M', 'μs'), + (0x33B3, 'M', 'ms'), + (0x33B4, 'M', 'pv'), + (0x33B5, 'M', 'nv'), + (0x33B6, 'M', 'μv'), + (0x33B7, 'M', 'mv'), + (0x33B8, 'M', 'kv'), + (0x33B9, 'M', 'mv'), + (0x33BA, 'M', 'pw'), + (0x33BB, 'M', 'nw'), + (0x33BC, 'M', 'μw'), + (0x33BD, 'M', 'mw'), + (0x33BE, 'M', 'kw'), + (0x33BF, 'M', 'mw'), + (0x33C0, 'M', 'kω'), + (0x33C1, 'M', 'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', 'bq'), + (0x33C4, 'M', 'cc'), + (0x33C5, 'M', 'cd'), + (0x33C6, 'M', 'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', 'db'), + (0x33C9, 'M', 'gy'), + (0x33CA, 'M', 'ha'), + (0x33CB, 'M', 'hp'), + (0x33CC, 'M', 'in'), + (0x33CD, 'M', 'kk'), + (0x33CE, 'M', 'km'), + (0x33CF, 'M', 'kt'), + (0x33D0, 'M', 'lm'), + (0x33D1, 'M', 'ln'), + (0x33D2, 'M', 'log'), + (0x33D3, 'M', 'lx'), + (0x33D4, 'M', 'mb'), + (0x33D5, 'M', 'mil'), + (0x33D6, 'M', 'mol'), + (0x33D7, 'M', 'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', 'ppm'), + (0x33DA, 'M', 'pr'), + (0x33DB, 'M', 'sr'), + (0x33DC, 'M', 'sv'), + (0x33DD, 'M', 'wb'), + ] + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x33DE, 'M', 'v∕m'), + (0x33DF, 'M', 'a∕m'), + (0x33E0, 'M', '1日'), + (0x33E1, 'M', '2日'), + (0x33E2, 'M', '3日'), + (0x33E3, 'M', '4日'), + (0x33E4, 'M', '5日'), + (0x33E5, 'M', '6日'), + (0x33E6, 'M', '7日'), + (0x33E7, 'M', '8日'), + (0x33E8, 'M', '9日'), + (0x33E9, 'M', '10日'), + (0x33EA, 'M', '11日'), + (0x33EB, 'M', '12日'), + (0x33EC, 'M', '13日'), + (0x33ED, 'M', '14日'), + (0x33EE, 'M', '15日'), + (0x33EF, 'M', '16日'), + (0x33F0, 'M', '17日'), + (0x33F1, 'M', '18日'), + (0x33F2, 'M', '19日'), + (0x33F3, 'M', '20日'), + (0x33F4, 'M', '21日'), + (0x33F5, 'M', '22日'), + (0x33F6, 'M', '23日'), + (0x33F7, 'M', '24日'), + (0x33F8, 'M', '25日'), + (0x33F9, 'M', '26日'), + (0x33FA, 'M', '27日'), + (0x33FB, 'M', '28日'), + (0x33FC, 'M', '29日'), + (0x33FD, 'M', '30日'), + (0x33FE, 'M', '31日'), + (0x33FF, 'M', 'gal'), + (0x3400, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', 'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', 'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', 'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', 'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', 'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', 'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', 'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', 'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', 'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', 'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', 'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', 'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', 'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', 'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', 'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', 'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', 'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', 'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', 'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', 'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', 'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', 'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', 'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', 'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', 'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', 'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', 'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', 'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', 'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', 'ꚍ'), + (0xA68D, 'V'), + ] + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA68E, 'M', 'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', 'ꚑ'), + (0xA691, 'V'), + (0xA692, 'M', 'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', 'ꚕ'), + (0xA695, 'V'), + (0xA696, 'M', 'ꚗ'), + (0xA697, 'V'), + (0xA698, 'M', 'ꚙ'), + (0xA699, 'V'), + (0xA69A, 'M', 'ꚛ'), + (0xA69B, 'V'), + (0xA69C, 'M', 'ъ'), + (0xA69D, 'M', 'ь'), + (0xA69E, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', 'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', 'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', 'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', 'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', 'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', 'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', 'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', 'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', 'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', 'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', 'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', 'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', 'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', 'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', 'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', 'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', 'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', 'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', 'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', 'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', 'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', 'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', 'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', 'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', 'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', 'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', 'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', 'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', 'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', 'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', 'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', 'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', 'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', 'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', 'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', 'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', 'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', 'ꝯ'), + (0xA76F, 'V'), + (0xA770, 'M', 'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', 'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', 'ꝼ'), + ] + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA77C, 'V'), + (0xA77D, 'M', 'ᵹ'), + (0xA77E, 'M', 'ꝿ'), + (0xA77F, 'V'), + (0xA780, 'M', 'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', 'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', 'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', 'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', 'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', 'ɥ'), + (0xA78E, 'V'), + (0xA790, 'M', 'ꞑ'), + (0xA791, 'V'), + (0xA792, 'M', 'ꞓ'), + (0xA793, 'V'), + (0xA796, 'M', 'ꞗ'), + (0xA797, 'V'), + (0xA798, 'M', 'ꞙ'), + (0xA799, 'V'), + (0xA79A, 'M', 'ꞛ'), + (0xA79B, 'V'), + (0xA79C, 'M', 'ꞝ'), + (0xA79D, 'V'), + (0xA79E, 'M', 'ꞟ'), + (0xA79F, 'V'), + (0xA7A0, 'M', 'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', 'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', 'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', 'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', 'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', 'ɦ'), + (0xA7AB, 'M', 'ɜ'), + (0xA7AC, 'M', 'ɡ'), + (0xA7AD, 'M', 'ɬ'), + (0xA7AE, 'M', 'ɪ'), + (0xA7AF, 'V'), + (0xA7B0, 'M', 'ʞ'), + (0xA7B1, 'M', 'ʇ'), + (0xA7B2, 'M', 'ʝ'), + (0xA7B3, 'M', 'ꭓ'), + (0xA7B4, 'M', 'ꞵ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', 'ꞷ'), + (0xA7B7, 'V'), + (0xA7B8, 'M', 'ꞹ'), + (0xA7B9, 'V'), + (0xA7BA, 'M', 'ꞻ'), + (0xA7BB, 'V'), + (0xA7BC, 'M', 'ꞽ'), + (0xA7BD, 'V'), + (0xA7BE, 'M', 'ꞿ'), + (0xA7BF, 'V'), + (0xA7C0, 'M', 'ꟁ'), + (0xA7C1, 'V'), + (0xA7C2, 'M', 'ꟃ'), + (0xA7C3, 'V'), + (0xA7C4, 'M', 'ꞔ'), + (0xA7C5, 'M', 'ʂ'), + (0xA7C6, 'M', 'ᶎ'), + (0xA7C7, 'M', 'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', 'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7D0, 'M', 'ꟑ'), + (0xA7D1, 'V'), + (0xA7D2, 'X'), + (0xA7D3, 'V'), + (0xA7D4, 'X'), + (0xA7D5, 'V'), + (0xA7D6, 'M', 'ꟗ'), + (0xA7D7, 'V'), + (0xA7D8, 'M', 'ꟙ'), + (0xA7D9, 'V'), + (0xA7DA, 'X'), + (0xA7F2, 'M', 'c'), + (0xA7F3, 'M', 'f'), + (0xA7F4, 'M', 'q'), + (0xA7F5, 'M', 'ꟶ'), + (0xA7F6, 'V'), + (0xA7F8, 'M', 'ħ'), + (0xA7F9, 'M', 'œ'), + (0xA7FA, 'V'), + (0xA82D, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C6, 'X'), + ] + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9FF, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', 'ꜧ'), + (0xAB5D, 'M', 'ꬷ'), + (0xAB5E, 'M', 'ɫ'), + (0xAB5F, 'M', 'ꭒ'), + (0xAB60, 'V'), + (0xAB69, 'M', 'ʍ'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), + (0xAB70, 'M', 'Ꭰ'), + (0xAB71, 'M', 'Ꭱ'), + (0xAB72, 'M', 'Ꭲ'), + (0xAB73, 'M', 'Ꭳ'), + (0xAB74, 'M', 'Ꭴ'), + (0xAB75, 'M', 'Ꭵ'), + (0xAB76, 'M', 'Ꭶ'), + (0xAB77, 'M', 'Ꭷ'), + (0xAB78, 'M', 'Ꭸ'), + (0xAB79, 'M', 'Ꭹ'), + (0xAB7A, 'M', 'Ꭺ'), + (0xAB7B, 'M', 'Ꭻ'), + (0xAB7C, 'M', 'Ꭼ'), + (0xAB7D, 'M', 'Ꭽ'), + (0xAB7E, 'M', 'Ꭾ'), + (0xAB7F, 'M', 'Ꭿ'), + (0xAB80, 'M', 'Ꮀ'), + (0xAB81, 'M', 'Ꮁ'), + (0xAB82, 'M', 'Ꮂ'), + (0xAB83, 'M', 'Ꮃ'), + (0xAB84, 'M', 'Ꮄ'), + (0xAB85, 'M', 'Ꮅ'), + (0xAB86, 'M', 'Ꮆ'), + (0xAB87, 'M', 'Ꮇ'), + (0xAB88, 'M', 'Ꮈ'), + (0xAB89, 'M', 'Ꮉ'), + (0xAB8A, 'M', 'Ꮊ'), + (0xAB8B, 'M', 'Ꮋ'), + (0xAB8C, 'M', 'Ꮌ'), + (0xAB8D, 'M', 'Ꮍ'), + (0xAB8E, 'M', 'Ꮎ'), + (0xAB8F, 'M', 'Ꮏ'), + (0xAB90, 'M', 'Ꮐ'), + (0xAB91, 'M', 'Ꮑ'), + (0xAB92, 'M', 'Ꮒ'), + (0xAB93, 'M', 'Ꮓ'), + (0xAB94, 'M', 'Ꮔ'), + (0xAB95, 'M', 'Ꮕ'), + (0xAB96, 'M', 'Ꮖ'), + (0xAB97, 'M', 'Ꮗ'), + (0xAB98, 'M', 'Ꮘ'), + (0xAB99, 'M', 'Ꮙ'), + (0xAB9A, 'M', 'Ꮚ'), + (0xAB9B, 'M', 'Ꮛ'), + (0xAB9C, 'M', 'Ꮜ'), + (0xAB9D, 'M', 'Ꮝ'), + (0xAB9E, 'M', 'Ꮞ'), + (0xAB9F, 'M', 'Ꮟ'), + (0xABA0, 'M', 'Ꮠ'), + (0xABA1, 'M', 'Ꮡ'), + (0xABA2, 'M', 'Ꮢ'), + (0xABA3, 'M', 'Ꮣ'), + (0xABA4, 'M', 'Ꮤ'), + (0xABA5, 'M', 'Ꮥ'), + (0xABA6, 'M', 'Ꮦ'), + (0xABA7, 'M', 'Ꮧ'), + (0xABA8, 'M', 'Ꮨ'), + (0xABA9, 'M', 'Ꮩ'), + (0xABAA, 'M', 'Ꮪ'), + ] + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xABAB, 'M', 'Ꮫ'), + (0xABAC, 'M', 'Ꮬ'), + (0xABAD, 'M', 'Ꮭ'), + (0xABAE, 'M', 'Ꮮ'), + (0xABAF, 'M', 'Ꮯ'), + (0xABB0, 'M', 'Ꮰ'), + (0xABB1, 'M', 'Ꮱ'), + (0xABB2, 'M', 'Ꮲ'), + (0xABB3, 'M', 'Ꮳ'), + (0xABB4, 'M', 'Ꮴ'), + (0xABB5, 'M', 'Ꮵ'), + (0xABB6, 'M', 'Ꮶ'), + (0xABB7, 'M', 'Ꮷ'), + (0xABB8, 'M', 'Ꮸ'), + (0xABB9, 'M', 'Ꮹ'), + (0xABBA, 'M', 'Ꮺ'), + (0xABBB, 'M', 'Ꮻ'), + (0xABBC, 'M', 'Ꮼ'), + (0xABBD, 'M', 'Ꮽ'), + (0xABBE, 'M', 'Ꮾ'), + (0xABBF, 'M', 'Ꮿ'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', '豈'), + (0xF901, 'M', '更'), + (0xF902, 'M', '車'), + (0xF903, 'M', '賈'), + (0xF904, 'M', '滑'), + (0xF905, 'M', '串'), + (0xF906, 'M', '句'), + (0xF907, 'M', '龜'), + (0xF909, 'M', '契'), + (0xF90A, 'M', '金'), + (0xF90B, 'M', '喇'), + (0xF90C, 'M', '奈'), + (0xF90D, 'M', '懶'), + (0xF90E, 'M', '癩'), + (0xF90F, 'M', '羅'), + (0xF910, 'M', '蘿'), + (0xF911, 'M', '螺'), + (0xF912, 'M', '裸'), + (0xF913, 'M', '邏'), + (0xF914, 'M', '樂'), + (0xF915, 'M', '洛'), + (0xF916, 'M', '烙'), + (0xF917, 'M', '珞'), + (0xF918, 'M', '落'), + (0xF919, 'M', '酪'), + (0xF91A, 'M', '駱'), + (0xF91B, 'M', '亂'), + (0xF91C, 'M', '卵'), + (0xF91D, 'M', '欄'), + (0xF91E, 'M', '爛'), + (0xF91F, 'M', '蘭'), + (0xF920, 'M', '鸞'), + (0xF921, 'M', '嵐'), + (0xF922, 'M', '濫'), + (0xF923, 'M', '藍'), + (0xF924, 'M', '襤'), + (0xF925, 'M', '拉'), + (0xF926, 'M', '臘'), + (0xF927, 'M', '蠟'), + (0xF928, 'M', '廊'), + (0xF929, 'M', '朗'), + (0xF92A, 'M', '浪'), + (0xF92B, 'M', '狼'), + (0xF92C, 'M', '郎'), + (0xF92D, 'M', '來'), + (0xF92E, 'M', '冷'), + (0xF92F, 'M', '勞'), + (0xF930, 'M', '擄'), + (0xF931, 'M', '櫓'), + (0xF932, 'M', '爐'), + (0xF933, 'M', '盧'), + (0xF934, 'M', '老'), + (0xF935, 'M', '蘆'), + (0xF936, 'M', '虜'), + (0xF937, 'M', '路'), + (0xF938, 'M', '露'), + (0xF939, 'M', '魯'), + (0xF93A, 'M', '鷺'), + (0xF93B, 'M', '碌'), + (0xF93C, 'M', '祿'), + (0xF93D, 'M', '綠'), + (0xF93E, 'M', '菉'), + (0xF93F, 'M', '錄'), + (0xF940, 'M', '鹿'), + (0xF941, 'M', '論'), + (0xF942, 'M', '壟'), + (0xF943, 'M', '弄'), + (0xF944, 'M', '籠'), + (0xF945, 'M', '聾'), + ] + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF946, 'M', '牢'), + (0xF947, 'M', '磊'), + (0xF948, 'M', '賂'), + (0xF949, 'M', '雷'), + (0xF94A, 'M', '壘'), + (0xF94B, 'M', '屢'), + (0xF94C, 'M', '樓'), + (0xF94D, 'M', '淚'), + (0xF94E, 'M', '漏'), + (0xF94F, 'M', '累'), + (0xF950, 'M', '縷'), + (0xF951, 'M', '陋'), + (0xF952, 'M', '勒'), + (0xF953, 'M', '肋'), + (0xF954, 'M', '凜'), + (0xF955, 'M', '凌'), + (0xF956, 'M', '稜'), + (0xF957, 'M', '綾'), + (0xF958, 'M', '菱'), + (0xF959, 'M', '陵'), + (0xF95A, 'M', '讀'), + (0xF95B, 'M', '拏'), + (0xF95C, 'M', '樂'), + (0xF95D, 'M', '諾'), + (0xF95E, 'M', '丹'), + (0xF95F, 'M', '寧'), + (0xF960, 'M', '怒'), + (0xF961, 'M', '率'), + (0xF962, 'M', '異'), + (0xF963, 'M', '北'), + (0xF964, 'M', '磻'), + (0xF965, 'M', '便'), + (0xF966, 'M', '復'), + (0xF967, 'M', '不'), + (0xF968, 'M', '泌'), + (0xF969, 'M', '數'), + (0xF96A, 'M', '索'), + (0xF96B, 'M', '參'), + (0xF96C, 'M', '塞'), + (0xF96D, 'M', '省'), + (0xF96E, 'M', '葉'), + (0xF96F, 'M', '說'), + (0xF970, 'M', '殺'), + (0xF971, 'M', '辰'), + (0xF972, 'M', '沈'), + (0xF973, 'M', '拾'), + (0xF974, 'M', '若'), + (0xF975, 'M', '掠'), + (0xF976, 'M', '略'), + (0xF977, 'M', '亮'), + (0xF978, 'M', '兩'), + (0xF979, 'M', '凉'), + (0xF97A, 'M', '梁'), + (0xF97B, 'M', '糧'), + (0xF97C, 'M', '良'), + (0xF97D, 'M', '諒'), + (0xF97E, 'M', '量'), + (0xF97F, 'M', '勵'), + (0xF980, 'M', '呂'), + (0xF981, 'M', '女'), + (0xF982, 'M', '廬'), + (0xF983, 'M', '旅'), + (0xF984, 'M', '濾'), + (0xF985, 'M', '礪'), + (0xF986, 'M', '閭'), + (0xF987, 'M', '驪'), + (0xF988, 'M', '麗'), + (0xF989, 'M', '黎'), + (0xF98A, 'M', '力'), + (0xF98B, 'M', '曆'), + (0xF98C, 'M', '歷'), + (0xF98D, 'M', '轢'), + (0xF98E, 'M', '年'), + (0xF98F, 'M', '憐'), + (0xF990, 'M', '戀'), + (0xF991, 'M', '撚'), + (0xF992, 'M', '漣'), + (0xF993, 'M', '煉'), + (0xF994, 'M', '璉'), + (0xF995, 'M', '秊'), + (0xF996, 'M', '練'), + (0xF997, 'M', '聯'), + (0xF998, 'M', '輦'), + (0xF999, 'M', '蓮'), + (0xF99A, 'M', '連'), + (0xF99B, 'M', '鍊'), + (0xF99C, 'M', '列'), + (0xF99D, 'M', '劣'), + (0xF99E, 'M', '咽'), + (0xF99F, 'M', '烈'), + (0xF9A0, 'M', '裂'), + (0xF9A1, 'M', '說'), + (0xF9A2, 'M', '廉'), + (0xF9A3, 'M', '念'), + (0xF9A4, 'M', '捻'), + (0xF9A5, 'M', '殮'), + (0xF9A6, 'M', '簾'), + (0xF9A7, 'M', '獵'), + (0xF9A8, 'M', '令'), + (0xF9A9, 'M', '囹'), + ] + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF9AA, 'M', '寧'), + (0xF9AB, 'M', '嶺'), + (0xF9AC, 'M', '怜'), + (0xF9AD, 'M', '玲'), + (0xF9AE, 'M', '瑩'), + (0xF9AF, 'M', '羚'), + (0xF9B0, 'M', '聆'), + (0xF9B1, 'M', '鈴'), + (0xF9B2, 'M', '零'), + (0xF9B3, 'M', '靈'), + (0xF9B4, 'M', '領'), + (0xF9B5, 'M', '例'), + (0xF9B6, 'M', '禮'), + (0xF9B7, 'M', '醴'), + (0xF9B8, 'M', '隸'), + (0xF9B9, 'M', '惡'), + (0xF9BA, 'M', '了'), + (0xF9BB, 'M', '僚'), + (0xF9BC, 'M', '寮'), + (0xF9BD, 'M', '尿'), + (0xF9BE, 'M', '料'), + (0xF9BF, 'M', '樂'), + (0xF9C0, 'M', '燎'), + (0xF9C1, 'M', '療'), + (0xF9C2, 'M', '蓼'), + (0xF9C3, 'M', '遼'), + (0xF9C4, 'M', '龍'), + (0xF9C5, 'M', '暈'), + (0xF9C6, 'M', '阮'), + (0xF9C7, 'M', '劉'), + (0xF9C8, 'M', '杻'), + (0xF9C9, 'M', '柳'), + (0xF9CA, 'M', '流'), + (0xF9CB, 'M', '溜'), + (0xF9CC, 'M', '琉'), + (0xF9CD, 'M', '留'), + (0xF9CE, 'M', '硫'), + (0xF9CF, 'M', '紐'), + (0xF9D0, 'M', '類'), + (0xF9D1, 'M', '六'), + (0xF9D2, 'M', '戮'), + (0xF9D3, 'M', '陸'), + (0xF9D4, 'M', '倫'), + (0xF9D5, 'M', '崙'), + (0xF9D6, 'M', '淪'), + (0xF9D7, 'M', '輪'), + (0xF9D8, 'M', '律'), + (0xF9D9, 'M', '慄'), + (0xF9DA, 'M', '栗'), + (0xF9DB, 'M', '率'), + (0xF9DC, 'M', '隆'), + (0xF9DD, 'M', '利'), + (0xF9DE, 'M', '吏'), + (0xF9DF, 'M', '履'), + (0xF9E0, 'M', '易'), + (0xF9E1, 'M', '李'), + (0xF9E2, 'M', '梨'), + (0xF9E3, 'M', '泥'), + (0xF9E4, 'M', '理'), + (0xF9E5, 'M', '痢'), + (0xF9E6, 'M', '罹'), + (0xF9E7, 'M', '裏'), + (0xF9E8, 'M', '裡'), + (0xF9E9, 'M', '里'), + (0xF9EA, 'M', '離'), + (0xF9EB, 'M', '匿'), + (0xF9EC, 'M', '溺'), + (0xF9ED, 'M', '吝'), + (0xF9EE, 'M', '燐'), + (0xF9EF, 'M', '璘'), + (0xF9F0, 'M', '藺'), + (0xF9F1, 'M', '隣'), + (0xF9F2, 'M', '鱗'), + (0xF9F3, 'M', '麟'), + (0xF9F4, 'M', '林'), + (0xF9F5, 'M', '淋'), + (0xF9F6, 'M', '臨'), + (0xF9F7, 'M', '立'), + (0xF9F8, 'M', '笠'), + (0xF9F9, 'M', '粒'), + (0xF9FA, 'M', '狀'), + (0xF9FB, 'M', '炙'), + (0xF9FC, 'M', '識'), + (0xF9FD, 'M', '什'), + (0xF9FE, 'M', '茶'), + (0xF9FF, 'M', '刺'), + (0xFA00, 'M', '切'), + (0xFA01, 'M', '度'), + (0xFA02, 'M', '拓'), + (0xFA03, 'M', '糖'), + (0xFA04, 'M', '宅'), + (0xFA05, 'M', '洞'), + (0xFA06, 'M', '暴'), + (0xFA07, 'M', '輻'), + (0xFA08, 'M', '行'), + (0xFA09, 'M', '降'), + (0xFA0A, 'M', '見'), + (0xFA0B, 'M', '廓'), + (0xFA0C, 'M', '兀'), + (0xFA0D, 'M', '嗀'), + ] + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA0E, 'V'), + (0xFA10, 'M', '塚'), + (0xFA11, 'V'), + (0xFA12, 'M', '晴'), + (0xFA13, 'V'), + (0xFA15, 'M', '凞'), + (0xFA16, 'M', '猪'), + (0xFA17, 'M', '益'), + (0xFA18, 'M', '礼'), + (0xFA19, 'M', '神'), + (0xFA1A, 'M', '祥'), + (0xFA1B, 'M', '福'), + (0xFA1C, 'M', '靖'), + (0xFA1D, 'M', '精'), + (0xFA1E, 'M', '羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', '蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', '諸'), + (0xFA23, 'V'), + (0xFA25, 'M', '逸'), + (0xFA26, 'M', '都'), + (0xFA27, 'V'), + (0xFA2A, 'M', '飯'), + (0xFA2B, 'M', '飼'), + (0xFA2C, 'M', '館'), + (0xFA2D, 'M', '鶴'), + (0xFA2E, 'M', '郞'), + (0xFA2F, 'M', '隷'), + (0xFA30, 'M', '侮'), + (0xFA31, 'M', '僧'), + (0xFA32, 'M', '免'), + (0xFA33, 'M', '勉'), + (0xFA34, 'M', '勤'), + (0xFA35, 'M', '卑'), + (0xFA36, 'M', '喝'), + (0xFA37, 'M', '嘆'), + (0xFA38, 'M', '器'), + (0xFA39, 'M', '塀'), + (0xFA3A, 'M', '墨'), + (0xFA3B, 'M', '層'), + (0xFA3C, 'M', '屮'), + (0xFA3D, 'M', '悔'), + (0xFA3E, 'M', '慨'), + (0xFA3F, 'M', '憎'), + (0xFA40, 'M', '懲'), + (0xFA41, 'M', '敏'), + (0xFA42, 'M', '既'), + (0xFA43, 'M', '暑'), + (0xFA44, 'M', '梅'), + (0xFA45, 'M', '海'), + (0xFA46, 'M', '渚'), + (0xFA47, 'M', '漢'), + (0xFA48, 'M', '煮'), + (0xFA49, 'M', '爫'), + (0xFA4A, 'M', '琢'), + (0xFA4B, 'M', '碑'), + (0xFA4C, 'M', '社'), + (0xFA4D, 'M', '祉'), + (0xFA4E, 'M', '祈'), + (0xFA4F, 'M', '祐'), + (0xFA50, 'M', '祖'), + (0xFA51, 'M', '祝'), + (0xFA52, 'M', '禍'), + (0xFA53, 'M', '禎'), + (0xFA54, 'M', '穀'), + (0xFA55, 'M', '突'), + (0xFA56, 'M', '節'), + (0xFA57, 'M', '練'), + (0xFA58, 'M', '縉'), + (0xFA59, 'M', '繁'), + (0xFA5A, 'M', '署'), + (0xFA5B, 'M', '者'), + (0xFA5C, 'M', '臭'), + (0xFA5D, 'M', '艹'), + (0xFA5F, 'M', '著'), + (0xFA60, 'M', '褐'), + (0xFA61, 'M', '視'), + (0xFA62, 'M', '謁'), + (0xFA63, 'M', '謹'), + (0xFA64, 'M', '賓'), + (0xFA65, 'M', '贈'), + (0xFA66, 'M', '辶'), + (0xFA67, 'M', '逸'), + (0xFA68, 'M', '難'), + (0xFA69, 'M', '響'), + (0xFA6A, 'M', '頻'), + (0xFA6B, 'M', '恵'), + (0xFA6C, 'M', '𤋮'), + (0xFA6D, 'M', '舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', '並'), + (0xFA71, 'M', '况'), + (0xFA72, 'M', '全'), + (0xFA73, 'M', '侀'), + (0xFA74, 'M', '充'), + (0xFA75, 'M', '冀'), + (0xFA76, 'M', '勇'), + (0xFA77, 'M', '勺'), + (0xFA78, 'M', '喝'), + ] + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA79, 'M', '啕'), + (0xFA7A, 'M', '喙'), + (0xFA7B, 'M', '嗢'), + (0xFA7C, 'M', '塚'), + (0xFA7D, 'M', '墳'), + (0xFA7E, 'M', '奄'), + (0xFA7F, 'M', '奔'), + (0xFA80, 'M', '婢'), + (0xFA81, 'M', '嬨'), + (0xFA82, 'M', '廒'), + (0xFA83, 'M', '廙'), + (0xFA84, 'M', '彩'), + (0xFA85, 'M', '徭'), + (0xFA86, 'M', '惘'), + (0xFA87, 'M', '慎'), + (0xFA88, 'M', '愈'), + (0xFA89, 'M', '憎'), + (0xFA8A, 'M', '慠'), + (0xFA8B, 'M', '懲'), + (0xFA8C, 'M', '戴'), + (0xFA8D, 'M', '揄'), + (0xFA8E, 'M', '搜'), + (0xFA8F, 'M', '摒'), + (0xFA90, 'M', '敖'), + (0xFA91, 'M', '晴'), + (0xFA92, 'M', '朗'), + (0xFA93, 'M', '望'), + (0xFA94, 'M', '杖'), + (0xFA95, 'M', '歹'), + (0xFA96, 'M', '殺'), + (0xFA97, 'M', '流'), + (0xFA98, 'M', '滛'), + (0xFA99, 'M', '滋'), + (0xFA9A, 'M', '漢'), + (0xFA9B, 'M', '瀞'), + (0xFA9C, 'M', '煮'), + (0xFA9D, 'M', '瞧'), + (0xFA9E, 'M', '爵'), + (0xFA9F, 'M', '犯'), + (0xFAA0, 'M', '猪'), + (0xFAA1, 'M', '瑱'), + (0xFAA2, 'M', '甆'), + (0xFAA3, 'M', '画'), + (0xFAA4, 'M', '瘝'), + (0xFAA5, 'M', '瘟'), + (0xFAA6, 'M', '益'), + (0xFAA7, 'M', '盛'), + (0xFAA8, 'M', '直'), + (0xFAA9, 'M', '睊'), + (0xFAAA, 'M', '着'), + (0xFAAB, 'M', '磌'), + (0xFAAC, 'M', '窱'), + (0xFAAD, 'M', '節'), + (0xFAAE, 'M', '类'), + (0xFAAF, 'M', '絛'), + (0xFAB0, 'M', '練'), + (0xFAB1, 'M', '缾'), + (0xFAB2, 'M', '者'), + (0xFAB3, 'M', '荒'), + (0xFAB4, 'M', '華'), + (0xFAB5, 'M', '蝹'), + (0xFAB6, 'M', '襁'), + (0xFAB7, 'M', '覆'), + (0xFAB8, 'M', '視'), + (0xFAB9, 'M', '調'), + (0xFABA, 'M', '諸'), + (0xFABB, 'M', '請'), + (0xFABC, 'M', '謁'), + (0xFABD, 'M', '諾'), + (0xFABE, 'M', '諭'), + (0xFABF, 'M', '謹'), + (0xFAC0, 'M', '變'), + (0xFAC1, 'M', '贈'), + (0xFAC2, 'M', '輸'), + (0xFAC3, 'M', '遲'), + (0xFAC4, 'M', '醙'), + (0xFAC5, 'M', '鉶'), + (0xFAC6, 'M', '陼'), + (0xFAC7, 'M', '難'), + (0xFAC8, 'M', '靖'), + (0xFAC9, 'M', '韛'), + (0xFACA, 'M', '響'), + (0xFACB, 'M', '頋'), + (0xFACC, 'M', '頻'), + (0xFACD, 'M', '鬒'), + (0xFACE, 'M', '龜'), + (0xFACF, 'M', '𢡊'), + (0xFAD0, 'M', '𢡄'), + (0xFAD1, 'M', '𣏕'), + (0xFAD2, 'M', '㮝'), + (0xFAD3, 'M', '䀘'), + (0xFAD4, 'M', '䀹'), + (0xFAD5, 'M', '𥉉'), + (0xFAD6, 'M', '𥳐'), + (0xFAD7, 'M', '𧻓'), + (0xFAD8, 'M', '齃'), + (0xFAD9, 'M', '龎'), + (0xFADA, 'X'), + (0xFB00, 'M', 'ff'), + (0xFB01, 'M', 'fi'), + ] + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFB02, 'M', 'fl'), + (0xFB03, 'M', 'ffi'), + (0xFB04, 'M', 'ffl'), + (0xFB05, 'M', 'st'), + (0xFB07, 'X'), + (0xFB13, 'M', 'մն'), + (0xFB14, 'M', 'մե'), + (0xFB15, 'M', 'մի'), + (0xFB16, 'M', 'վն'), + (0xFB17, 'M', 'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', 'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', 'ײַ'), + (0xFB20, 'M', 'ע'), + (0xFB21, 'M', 'א'), + (0xFB22, 'M', 'ד'), + (0xFB23, 'M', 'ה'), + (0xFB24, 'M', 'כ'), + (0xFB25, 'M', 'ל'), + (0xFB26, 'M', 'ם'), + (0xFB27, 'M', 'ר'), + (0xFB28, 'M', 'ת'), + (0xFB29, '3', '+'), + (0xFB2A, 'M', 'שׁ'), + (0xFB2B, 'M', 'שׂ'), + (0xFB2C, 'M', 'שּׁ'), + (0xFB2D, 'M', 'שּׂ'), + (0xFB2E, 'M', 'אַ'), + (0xFB2F, 'M', 'אָ'), + (0xFB30, 'M', 'אּ'), + (0xFB31, 'M', 'בּ'), + (0xFB32, 'M', 'גּ'), + (0xFB33, 'M', 'דּ'), + (0xFB34, 'M', 'הּ'), + (0xFB35, 'M', 'וּ'), + (0xFB36, 'M', 'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', 'טּ'), + (0xFB39, 'M', 'יּ'), + (0xFB3A, 'M', 'ךּ'), + (0xFB3B, 'M', 'כּ'), + (0xFB3C, 'M', 'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', 'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', 'נּ'), + (0xFB41, 'M', 'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', 'ףּ'), + (0xFB44, 'M', 'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', 'צּ'), + (0xFB47, 'M', 'קּ'), + (0xFB48, 'M', 'רּ'), + (0xFB49, 'M', 'שּ'), + (0xFB4A, 'M', 'תּ'), + (0xFB4B, 'M', 'וֹ'), + (0xFB4C, 'M', 'בֿ'), + (0xFB4D, 'M', 'כֿ'), + (0xFB4E, 'M', 'פֿ'), + (0xFB4F, 'M', 'אל'), + (0xFB50, 'M', 'ٱ'), + (0xFB52, 'M', 'ٻ'), + (0xFB56, 'M', 'پ'), + (0xFB5A, 'M', 'ڀ'), + (0xFB5E, 'M', 'ٺ'), + (0xFB62, 'M', 'ٿ'), + (0xFB66, 'M', 'ٹ'), + (0xFB6A, 'M', 'ڤ'), + (0xFB6E, 'M', 'ڦ'), + (0xFB72, 'M', 'ڄ'), + (0xFB76, 'M', 'ڃ'), + (0xFB7A, 'M', 'چ'), + (0xFB7E, 'M', 'ڇ'), + (0xFB82, 'M', 'ڍ'), + (0xFB84, 'M', 'ڌ'), + (0xFB86, 'M', 'ڎ'), + (0xFB88, 'M', 'ڈ'), + (0xFB8A, 'M', 'ژ'), + (0xFB8C, 'M', 'ڑ'), + (0xFB8E, 'M', 'ک'), + (0xFB92, 'M', 'گ'), + (0xFB96, 'M', 'ڳ'), + (0xFB9A, 'M', 'ڱ'), + (0xFB9E, 'M', 'ں'), + (0xFBA0, 'M', 'ڻ'), + (0xFBA4, 'M', 'ۀ'), + (0xFBA6, 'M', 'ہ'), + (0xFBAA, 'M', 'ھ'), + (0xFBAE, 'M', 'ے'), + (0xFBB0, 'M', 'ۓ'), + (0xFBB2, 'V'), + (0xFBC3, 'X'), + (0xFBD3, 'M', 'ڭ'), + (0xFBD7, 'M', 'ۇ'), + (0xFBD9, 'M', 'ۆ'), + (0xFBDB, 'M', 'ۈ'), + (0xFBDD, 'M', 'ۇٴ'), + (0xFBDE, 'M', 'ۋ'), + ] + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFBE0, 'M', 'ۅ'), + (0xFBE2, 'M', 'ۉ'), + (0xFBE4, 'M', 'ې'), + (0xFBE8, 'M', 'ى'), + (0xFBEA, 'M', 'ئا'), + (0xFBEC, 'M', 'ئە'), + (0xFBEE, 'M', 'ئو'), + (0xFBF0, 'M', 'ئۇ'), + (0xFBF2, 'M', 'ئۆ'), + (0xFBF4, 'M', 'ئۈ'), + (0xFBF6, 'M', 'ئې'), + (0xFBF9, 'M', 'ئى'), + (0xFBFC, 'M', 'ی'), + (0xFC00, 'M', 'ئج'), + (0xFC01, 'M', 'ئح'), + (0xFC02, 'M', 'ئم'), + (0xFC03, 'M', 'ئى'), + (0xFC04, 'M', 'ئي'), + (0xFC05, 'M', 'بج'), + (0xFC06, 'M', 'بح'), + (0xFC07, 'M', 'بخ'), + (0xFC08, 'M', 'بم'), + (0xFC09, 'M', 'بى'), + (0xFC0A, 'M', 'بي'), + (0xFC0B, 'M', 'تج'), + (0xFC0C, 'M', 'تح'), + (0xFC0D, 'M', 'تخ'), + (0xFC0E, 'M', 'تم'), + (0xFC0F, 'M', 'تى'), + (0xFC10, 'M', 'تي'), + (0xFC11, 'M', 'ثج'), + (0xFC12, 'M', 'ثم'), + (0xFC13, 'M', 'ثى'), + (0xFC14, 'M', 'ثي'), + (0xFC15, 'M', 'جح'), + (0xFC16, 'M', 'جم'), + (0xFC17, 'M', 'حج'), + (0xFC18, 'M', 'حم'), + (0xFC19, 'M', 'خج'), + (0xFC1A, 'M', 'خح'), + (0xFC1B, 'M', 'خم'), + (0xFC1C, 'M', 'سج'), + (0xFC1D, 'M', 'سح'), + (0xFC1E, 'M', 'سخ'), + (0xFC1F, 'M', 'سم'), + (0xFC20, 'M', 'صح'), + (0xFC21, 'M', 'صم'), + (0xFC22, 'M', 'ضج'), + (0xFC23, 'M', 'ضح'), + (0xFC24, 'M', 'ضخ'), + (0xFC25, 'M', 'ضم'), + (0xFC26, 'M', 'طح'), + (0xFC27, 'M', 'طم'), + (0xFC28, 'M', 'ظم'), + (0xFC29, 'M', 'عج'), + (0xFC2A, 'M', 'عم'), + (0xFC2B, 'M', 'غج'), + (0xFC2C, 'M', 'غم'), + (0xFC2D, 'M', 'فج'), + (0xFC2E, 'M', 'فح'), + (0xFC2F, 'M', 'فخ'), + (0xFC30, 'M', 'فم'), + (0xFC31, 'M', 'فى'), + (0xFC32, 'M', 'في'), + (0xFC33, 'M', 'قح'), + (0xFC34, 'M', 'قم'), + (0xFC35, 'M', 'قى'), + (0xFC36, 'M', 'قي'), + (0xFC37, 'M', 'كا'), + (0xFC38, 'M', 'كج'), + (0xFC39, 'M', 'كح'), + (0xFC3A, 'M', 'كخ'), + (0xFC3B, 'M', 'كل'), + (0xFC3C, 'M', 'كم'), + (0xFC3D, 'M', 'كى'), + (0xFC3E, 'M', 'كي'), + (0xFC3F, 'M', 'لج'), + (0xFC40, 'M', 'لح'), + (0xFC41, 'M', 'لخ'), + (0xFC42, 'M', 'لم'), + (0xFC43, 'M', 'لى'), + (0xFC44, 'M', 'لي'), + (0xFC45, 'M', 'مج'), + (0xFC46, 'M', 'مح'), + (0xFC47, 'M', 'مخ'), + (0xFC48, 'M', 'مم'), + (0xFC49, 'M', 'مى'), + (0xFC4A, 'M', 'مي'), + (0xFC4B, 'M', 'نج'), + (0xFC4C, 'M', 'نح'), + (0xFC4D, 'M', 'نخ'), + (0xFC4E, 'M', 'نم'), + (0xFC4F, 'M', 'نى'), + (0xFC50, 'M', 'ني'), + (0xFC51, 'M', 'هج'), + (0xFC52, 'M', 'هم'), + (0xFC53, 'M', 'هى'), + (0xFC54, 'M', 'هي'), + (0xFC55, 'M', 'يج'), + (0xFC56, 'M', 'يح'), + ] + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC57, 'M', 'يخ'), + (0xFC58, 'M', 'يم'), + (0xFC59, 'M', 'يى'), + (0xFC5A, 'M', 'يي'), + (0xFC5B, 'M', 'ذٰ'), + (0xFC5C, 'M', 'رٰ'), + (0xFC5D, 'M', 'ىٰ'), + (0xFC5E, '3', ' ٌّ'), + (0xFC5F, '3', ' ٍّ'), + (0xFC60, '3', ' َّ'), + (0xFC61, '3', ' ُّ'), + (0xFC62, '3', ' ِّ'), + (0xFC63, '3', ' ّٰ'), + (0xFC64, 'M', 'ئر'), + (0xFC65, 'M', 'ئز'), + (0xFC66, 'M', 'ئم'), + (0xFC67, 'M', 'ئن'), + (0xFC68, 'M', 'ئى'), + (0xFC69, 'M', 'ئي'), + (0xFC6A, 'M', 'بر'), + (0xFC6B, 'M', 'بز'), + (0xFC6C, 'M', 'بم'), + (0xFC6D, 'M', 'بن'), + (0xFC6E, 'M', 'بى'), + (0xFC6F, 'M', 'بي'), + (0xFC70, 'M', 'تر'), + (0xFC71, 'M', 'تز'), + (0xFC72, 'M', 'تم'), + (0xFC73, 'M', 'تن'), + (0xFC74, 'M', 'تى'), + (0xFC75, 'M', 'تي'), + (0xFC76, 'M', 'ثر'), + (0xFC77, 'M', 'ثز'), + (0xFC78, 'M', 'ثم'), + (0xFC79, 'M', 'ثن'), + (0xFC7A, 'M', 'ثى'), + (0xFC7B, 'M', 'ثي'), + (0xFC7C, 'M', 'فى'), + (0xFC7D, 'M', 'في'), + (0xFC7E, 'M', 'قى'), + (0xFC7F, 'M', 'قي'), + (0xFC80, 'M', 'كا'), + (0xFC81, 'M', 'كل'), + (0xFC82, 'M', 'كم'), + (0xFC83, 'M', 'كى'), + (0xFC84, 'M', 'كي'), + (0xFC85, 'M', 'لم'), + (0xFC86, 'M', 'لى'), + (0xFC87, 'M', 'لي'), + (0xFC88, 'M', 'ما'), + (0xFC89, 'M', 'مم'), + (0xFC8A, 'M', 'نر'), + (0xFC8B, 'M', 'نز'), + (0xFC8C, 'M', 'نم'), + (0xFC8D, 'M', 'نن'), + (0xFC8E, 'M', 'نى'), + (0xFC8F, 'M', 'ني'), + (0xFC90, 'M', 'ىٰ'), + (0xFC91, 'M', 'ير'), + (0xFC92, 'M', 'يز'), + (0xFC93, 'M', 'يم'), + (0xFC94, 'M', 'ين'), + (0xFC95, 'M', 'يى'), + (0xFC96, 'M', 'يي'), + (0xFC97, 'M', 'ئج'), + (0xFC98, 'M', 'ئح'), + (0xFC99, 'M', 'ئخ'), + (0xFC9A, 'M', 'ئم'), + (0xFC9B, 'M', 'ئه'), + (0xFC9C, 'M', 'بج'), + (0xFC9D, 'M', 'بح'), + (0xFC9E, 'M', 'بخ'), + (0xFC9F, 'M', 'بم'), + (0xFCA0, 'M', 'به'), + (0xFCA1, 'M', 'تج'), + (0xFCA2, 'M', 'تح'), + (0xFCA3, 'M', 'تخ'), + (0xFCA4, 'M', 'تم'), + (0xFCA5, 'M', 'ته'), + (0xFCA6, 'M', 'ثم'), + (0xFCA7, 'M', 'جح'), + (0xFCA8, 'M', 'جم'), + (0xFCA9, 'M', 'حج'), + (0xFCAA, 'M', 'حم'), + (0xFCAB, 'M', 'خج'), + (0xFCAC, 'M', 'خم'), + (0xFCAD, 'M', 'سج'), + (0xFCAE, 'M', 'سح'), + (0xFCAF, 'M', 'سخ'), + (0xFCB0, 'M', 'سم'), + (0xFCB1, 'M', 'صح'), + (0xFCB2, 'M', 'صخ'), + (0xFCB3, 'M', 'صم'), + (0xFCB4, 'M', 'ضج'), + (0xFCB5, 'M', 'ضح'), + (0xFCB6, 'M', 'ضخ'), + (0xFCB7, 'M', 'ضم'), + (0xFCB8, 'M', 'طح'), + (0xFCB9, 'M', 'ظم'), + (0xFCBA, 'M', 'عج'), + ] + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFCBB, 'M', 'عم'), + (0xFCBC, 'M', 'غج'), + (0xFCBD, 'M', 'غم'), + (0xFCBE, 'M', 'فج'), + (0xFCBF, 'M', 'فح'), + (0xFCC0, 'M', 'فخ'), + (0xFCC1, 'M', 'فم'), + (0xFCC2, 'M', 'قح'), + (0xFCC3, 'M', 'قم'), + (0xFCC4, 'M', 'كج'), + (0xFCC5, 'M', 'كح'), + (0xFCC6, 'M', 'كخ'), + (0xFCC7, 'M', 'كل'), + (0xFCC8, 'M', 'كم'), + (0xFCC9, 'M', 'لج'), + (0xFCCA, 'M', 'لح'), + (0xFCCB, 'M', 'لخ'), + (0xFCCC, 'M', 'لم'), + (0xFCCD, 'M', 'له'), + (0xFCCE, 'M', 'مج'), + (0xFCCF, 'M', 'مح'), + (0xFCD0, 'M', 'مخ'), + (0xFCD1, 'M', 'مم'), + (0xFCD2, 'M', 'نج'), + (0xFCD3, 'M', 'نح'), + (0xFCD4, 'M', 'نخ'), + (0xFCD5, 'M', 'نم'), + (0xFCD6, 'M', 'نه'), + (0xFCD7, 'M', 'هج'), + (0xFCD8, 'M', 'هم'), + (0xFCD9, 'M', 'هٰ'), + (0xFCDA, 'M', 'يج'), + (0xFCDB, 'M', 'يح'), + (0xFCDC, 'M', 'يخ'), + (0xFCDD, 'M', 'يم'), + (0xFCDE, 'M', 'يه'), + (0xFCDF, 'M', 'ئم'), + (0xFCE0, 'M', 'ئه'), + (0xFCE1, 'M', 'بم'), + (0xFCE2, 'M', 'به'), + (0xFCE3, 'M', 'تم'), + (0xFCE4, 'M', 'ته'), + (0xFCE5, 'M', 'ثم'), + (0xFCE6, 'M', 'ثه'), + (0xFCE7, 'M', 'سم'), + (0xFCE8, 'M', 'سه'), + (0xFCE9, 'M', 'شم'), + (0xFCEA, 'M', 'شه'), + (0xFCEB, 'M', 'كل'), + (0xFCEC, 'M', 'كم'), + (0xFCED, 'M', 'لم'), + (0xFCEE, 'M', 'نم'), + (0xFCEF, 'M', 'نه'), + (0xFCF0, 'M', 'يم'), + (0xFCF1, 'M', 'يه'), + (0xFCF2, 'M', 'ـَّ'), + (0xFCF3, 'M', 'ـُّ'), + (0xFCF4, 'M', 'ـِّ'), + (0xFCF5, 'M', 'طى'), + (0xFCF6, 'M', 'طي'), + (0xFCF7, 'M', 'عى'), + (0xFCF8, 'M', 'عي'), + (0xFCF9, 'M', 'غى'), + (0xFCFA, 'M', 'غي'), + (0xFCFB, 'M', 'سى'), + (0xFCFC, 'M', 'سي'), + (0xFCFD, 'M', 'شى'), + (0xFCFE, 'M', 'شي'), + (0xFCFF, 'M', 'حى'), + (0xFD00, 'M', 'حي'), + (0xFD01, 'M', 'جى'), + (0xFD02, 'M', 'جي'), + (0xFD03, 'M', 'خى'), + (0xFD04, 'M', 'خي'), + (0xFD05, 'M', 'صى'), + (0xFD06, 'M', 'صي'), + (0xFD07, 'M', 'ضى'), + (0xFD08, 'M', 'ضي'), + (0xFD09, 'M', 'شج'), + (0xFD0A, 'M', 'شح'), + (0xFD0B, 'M', 'شخ'), + (0xFD0C, 'M', 'شم'), + (0xFD0D, 'M', 'شر'), + (0xFD0E, 'M', 'سر'), + (0xFD0F, 'M', 'صر'), + (0xFD10, 'M', 'ضر'), + (0xFD11, 'M', 'طى'), + (0xFD12, 'M', 'طي'), + (0xFD13, 'M', 'عى'), + (0xFD14, 'M', 'عي'), + (0xFD15, 'M', 'غى'), + (0xFD16, 'M', 'غي'), + (0xFD17, 'M', 'سى'), + (0xFD18, 'M', 'سي'), + (0xFD19, 'M', 'شى'), + (0xFD1A, 'M', 'شي'), + (0xFD1B, 'M', 'حى'), + (0xFD1C, 'M', 'حي'), + (0xFD1D, 'M', 'جى'), + (0xFD1E, 'M', 'جي'), + ] + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFD1F, 'M', 'خى'), + (0xFD20, 'M', 'خي'), + (0xFD21, 'M', 'صى'), + (0xFD22, 'M', 'صي'), + (0xFD23, 'M', 'ضى'), + (0xFD24, 'M', 'ضي'), + (0xFD25, 'M', 'شج'), + (0xFD26, 'M', 'شح'), + (0xFD27, 'M', 'شخ'), + (0xFD28, 'M', 'شم'), + (0xFD29, 'M', 'شر'), + (0xFD2A, 'M', 'سر'), + (0xFD2B, 'M', 'صر'), + (0xFD2C, 'M', 'ضر'), + (0xFD2D, 'M', 'شج'), + (0xFD2E, 'M', 'شح'), + (0xFD2F, 'M', 'شخ'), + (0xFD30, 'M', 'شم'), + (0xFD31, 'M', 'سه'), + (0xFD32, 'M', 'شه'), + (0xFD33, 'M', 'طم'), + (0xFD34, 'M', 'سج'), + (0xFD35, 'M', 'سح'), + (0xFD36, 'M', 'سخ'), + (0xFD37, 'M', 'شج'), + (0xFD38, 'M', 'شح'), + (0xFD39, 'M', 'شخ'), + (0xFD3A, 'M', 'طم'), + (0xFD3B, 'M', 'ظم'), + (0xFD3C, 'M', 'اً'), + (0xFD3E, 'V'), + (0xFD50, 'M', 'تجم'), + (0xFD51, 'M', 'تحج'), + (0xFD53, 'M', 'تحم'), + (0xFD54, 'M', 'تخم'), + (0xFD55, 'M', 'تمج'), + (0xFD56, 'M', 'تمح'), + (0xFD57, 'M', 'تمخ'), + (0xFD58, 'M', 'جمح'), + (0xFD5A, 'M', 'حمي'), + (0xFD5B, 'M', 'حمى'), + (0xFD5C, 'M', 'سحج'), + (0xFD5D, 'M', 'سجح'), + (0xFD5E, 'M', 'سجى'), + (0xFD5F, 'M', 'سمح'), + (0xFD61, 'M', 'سمج'), + (0xFD62, 'M', 'سمم'), + (0xFD64, 'M', 'صحح'), + (0xFD66, 'M', 'صمم'), + (0xFD67, 'M', 'شحم'), + (0xFD69, 'M', 'شجي'), + (0xFD6A, 'M', 'شمخ'), + (0xFD6C, 'M', 'شمم'), + (0xFD6E, 'M', 'ضحى'), + (0xFD6F, 'M', 'ضخم'), + (0xFD71, 'M', 'طمح'), + (0xFD73, 'M', 'طمم'), + (0xFD74, 'M', 'طمي'), + (0xFD75, 'M', 'عجم'), + (0xFD76, 'M', 'عمم'), + (0xFD78, 'M', 'عمى'), + (0xFD79, 'M', 'غمم'), + (0xFD7A, 'M', 'غمي'), + (0xFD7B, 'M', 'غمى'), + (0xFD7C, 'M', 'فخم'), + (0xFD7E, 'M', 'قمح'), + (0xFD7F, 'M', 'قمم'), + (0xFD80, 'M', 'لحم'), + (0xFD81, 'M', 'لحي'), + (0xFD82, 'M', 'لحى'), + (0xFD83, 'M', 'لجج'), + (0xFD85, 'M', 'لخم'), + (0xFD87, 'M', 'لمح'), + (0xFD89, 'M', 'محج'), + (0xFD8A, 'M', 'محم'), + (0xFD8B, 'M', 'محي'), + (0xFD8C, 'M', 'مجح'), + (0xFD8D, 'M', 'مجم'), + (0xFD8E, 'M', 'مخج'), + (0xFD8F, 'M', 'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', 'مجخ'), + (0xFD93, 'M', 'همج'), + (0xFD94, 'M', 'همم'), + (0xFD95, 'M', 'نحم'), + (0xFD96, 'M', 'نحى'), + (0xFD97, 'M', 'نجم'), + (0xFD99, 'M', 'نجى'), + (0xFD9A, 'M', 'نمي'), + (0xFD9B, 'M', 'نمى'), + (0xFD9C, 'M', 'يمم'), + (0xFD9E, 'M', 'بخي'), + (0xFD9F, 'M', 'تجي'), + (0xFDA0, 'M', 'تجى'), + (0xFDA1, 'M', 'تخي'), + (0xFDA2, 'M', 'تخى'), + (0xFDA3, 'M', 'تمي'), + (0xFDA4, 'M', 'تمى'), + (0xFDA5, 'M', 'جمي'), + (0xFDA6, 'M', 'جحى'), + ] + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFDA7, 'M', 'جمى'), + (0xFDA8, 'M', 'سخى'), + (0xFDA9, 'M', 'صحي'), + (0xFDAA, 'M', 'شحي'), + (0xFDAB, 'M', 'ضحي'), + (0xFDAC, 'M', 'لجي'), + (0xFDAD, 'M', 'لمي'), + (0xFDAE, 'M', 'يحي'), + (0xFDAF, 'M', 'يجي'), + (0xFDB0, 'M', 'يمي'), + (0xFDB1, 'M', 'ممي'), + (0xFDB2, 'M', 'قمي'), + (0xFDB3, 'M', 'نحي'), + (0xFDB4, 'M', 'قمح'), + (0xFDB5, 'M', 'لحم'), + (0xFDB6, 'M', 'عمي'), + (0xFDB7, 'M', 'كمي'), + (0xFDB8, 'M', 'نجح'), + (0xFDB9, 'M', 'مخي'), + (0xFDBA, 'M', 'لجم'), + (0xFDBB, 'M', 'كمم'), + (0xFDBC, 'M', 'لجم'), + (0xFDBD, 'M', 'نجح'), + (0xFDBE, 'M', 'جحي'), + (0xFDBF, 'M', 'حجي'), + (0xFDC0, 'M', 'مجي'), + (0xFDC1, 'M', 'فمي'), + (0xFDC2, 'M', 'بحي'), + (0xFDC3, 'M', 'كمم'), + (0xFDC4, 'M', 'عجم'), + (0xFDC5, 'M', 'صمم'), + (0xFDC6, 'M', 'سخي'), + (0xFDC7, 'M', 'نجي'), + (0xFDC8, 'X'), + (0xFDCF, 'V'), + (0xFDD0, 'X'), + (0xFDF0, 'M', 'صلے'), + (0xFDF1, 'M', 'قلے'), + (0xFDF2, 'M', 'الله'), + (0xFDF3, 'M', 'اكبر'), + (0xFDF4, 'M', 'محمد'), + (0xFDF5, 'M', 'صلعم'), + (0xFDF6, 'M', 'رسول'), + (0xFDF7, 'M', 'عليه'), + (0xFDF8, 'M', 'وسلم'), + (0xFDF9, 'M', 'صلى'), + (0xFDFA, '3', 'صلى الله عليه وسلم'), + (0xFDFB, '3', 'جل جلاله'), + (0xFDFC, 'M', 'ریال'), + (0xFDFD, 'V'), + (0xFE00, 'I'), + (0xFE10, '3', ','), + (0xFE11, 'M', '、'), + (0xFE12, 'X'), + (0xFE13, '3', ':'), + (0xFE14, '3', ';'), + (0xFE15, '3', '!'), + (0xFE16, '3', '?'), + (0xFE17, 'M', '〖'), + (0xFE18, 'M', '〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE30, 'X'), + (0xFE31, 'M', '—'), + (0xFE32, 'M', '–'), + (0xFE33, '3', '_'), + (0xFE35, '3', '('), + (0xFE36, '3', ')'), + (0xFE37, '3', '{'), + (0xFE38, '3', '}'), + (0xFE39, 'M', '〔'), + (0xFE3A, 'M', '〕'), + (0xFE3B, 'M', '【'), + (0xFE3C, 'M', '】'), + (0xFE3D, 'M', '《'), + (0xFE3E, 'M', '》'), + (0xFE3F, 'M', '〈'), + (0xFE40, 'M', '〉'), + (0xFE41, 'M', '「'), + (0xFE42, 'M', '」'), + (0xFE43, 'M', '『'), + (0xFE44, 'M', '』'), + (0xFE45, 'V'), + (0xFE47, '3', '['), + (0xFE48, '3', ']'), + (0xFE49, '3', ' ̅'), + (0xFE4D, '3', '_'), + (0xFE50, '3', ','), + (0xFE51, 'M', '、'), + (0xFE52, 'X'), + (0xFE54, '3', ';'), + (0xFE55, '3', ':'), + (0xFE56, '3', '?'), + (0xFE57, '3', '!'), + (0xFE58, 'M', '—'), + (0xFE59, '3', '('), + (0xFE5A, '3', ')'), + (0xFE5B, '3', '{'), + (0xFE5C, '3', '}'), + (0xFE5D, 'M', '〔'), + ] + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE5E, 'M', '〕'), + (0xFE5F, '3', '#'), + (0xFE60, '3', '&'), + (0xFE61, '3', '*'), + (0xFE62, '3', '+'), + (0xFE63, 'M', '-'), + (0xFE64, '3', '<'), + (0xFE65, '3', '>'), + (0xFE66, '3', '='), + (0xFE67, 'X'), + (0xFE68, '3', '\\'), + (0xFE69, '3', '$'), + (0xFE6A, '3', '%'), + (0xFE6B, '3', '@'), + (0xFE6C, 'X'), + (0xFE70, '3', ' ً'), + (0xFE71, 'M', 'ـً'), + (0xFE72, '3', ' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', ' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', ' َ'), + (0xFE77, 'M', 'ـَ'), + (0xFE78, '3', ' ُ'), + (0xFE79, 'M', 'ـُ'), + (0xFE7A, '3', ' ِ'), + (0xFE7B, 'M', 'ـِ'), + (0xFE7C, '3', ' ّ'), + (0xFE7D, 'M', 'ـّ'), + (0xFE7E, '3', ' ْ'), + (0xFE7F, 'M', 'ـْ'), + (0xFE80, 'M', 'ء'), + (0xFE81, 'M', 'آ'), + (0xFE83, 'M', 'أ'), + (0xFE85, 'M', 'ؤ'), + (0xFE87, 'M', 'إ'), + (0xFE89, 'M', 'ئ'), + (0xFE8D, 'M', 'ا'), + (0xFE8F, 'M', 'ب'), + (0xFE93, 'M', 'ة'), + (0xFE95, 'M', 'ت'), + (0xFE99, 'M', 'ث'), + (0xFE9D, 'M', 'ج'), + (0xFEA1, 'M', 'ح'), + (0xFEA5, 'M', 'خ'), + (0xFEA9, 'M', 'د'), + (0xFEAB, 'M', 'ذ'), + (0xFEAD, 'M', 'ر'), + (0xFEAF, 'M', 'ز'), + (0xFEB1, 'M', 'س'), + (0xFEB5, 'M', 'ش'), + (0xFEB9, 'M', 'ص'), + (0xFEBD, 'M', 'ض'), + (0xFEC1, 'M', 'ط'), + (0xFEC5, 'M', 'ظ'), + (0xFEC9, 'M', 'ع'), + (0xFECD, 'M', 'غ'), + (0xFED1, 'M', 'ف'), + (0xFED5, 'M', 'ق'), + (0xFED9, 'M', 'ك'), + (0xFEDD, 'M', 'ل'), + (0xFEE1, 'M', 'م'), + (0xFEE5, 'M', 'ن'), + (0xFEE9, 'M', 'ه'), + (0xFEED, 'M', 'و'), + (0xFEEF, 'M', 'ى'), + (0xFEF1, 'M', 'ي'), + (0xFEF5, 'M', 'لآ'), + (0xFEF7, 'M', 'لأ'), + (0xFEF9, 'M', 'لإ'), + (0xFEFB, 'M', 'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', '!'), + (0xFF02, '3', '"'), + (0xFF03, '3', '#'), + (0xFF04, '3', '$'), + (0xFF05, '3', '%'), + (0xFF06, '3', '&'), + (0xFF07, '3', '\''), + (0xFF08, '3', '('), + (0xFF09, '3', ')'), + (0xFF0A, '3', '*'), + (0xFF0B, '3', '+'), + (0xFF0C, '3', ','), + (0xFF0D, 'M', '-'), + (0xFF0E, 'M', '.'), + (0xFF0F, '3', '/'), + (0xFF10, 'M', '0'), + (0xFF11, 'M', '1'), + (0xFF12, 'M', '2'), + (0xFF13, 'M', '3'), + (0xFF14, 'M', '4'), + (0xFF15, 'M', '5'), + (0xFF16, 'M', '6'), + (0xFF17, 'M', '7'), + (0xFF18, 'M', '8'), + (0xFF19, 'M', '9'), + (0xFF1A, '3', ':'), + ] + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF1B, '3', ';'), + (0xFF1C, '3', '<'), + (0xFF1D, '3', '='), + (0xFF1E, '3', '>'), + (0xFF1F, '3', '?'), + (0xFF20, '3', '@'), + (0xFF21, 'M', 'a'), + (0xFF22, 'M', 'b'), + (0xFF23, 'M', 'c'), + (0xFF24, 'M', 'd'), + (0xFF25, 'M', 'e'), + (0xFF26, 'M', 'f'), + (0xFF27, 'M', 'g'), + (0xFF28, 'M', 'h'), + (0xFF29, 'M', 'i'), + (0xFF2A, 'M', 'j'), + (0xFF2B, 'M', 'k'), + (0xFF2C, 'M', 'l'), + (0xFF2D, 'M', 'm'), + (0xFF2E, 'M', 'n'), + (0xFF2F, 'M', 'o'), + (0xFF30, 'M', 'p'), + (0xFF31, 'M', 'q'), + (0xFF32, 'M', 'r'), + (0xFF33, 'M', 's'), + (0xFF34, 'M', 't'), + (0xFF35, 'M', 'u'), + (0xFF36, 'M', 'v'), + (0xFF37, 'M', 'w'), + (0xFF38, 'M', 'x'), + (0xFF39, 'M', 'y'), + (0xFF3A, 'M', 'z'), + (0xFF3B, '3', '['), + (0xFF3C, '3', '\\'), + (0xFF3D, '3', ']'), + (0xFF3E, '3', '^'), + (0xFF3F, '3', '_'), + (0xFF40, '3', '`'), + (0xFF41, 'M', 'a'), + (0xFF42, 'M', 'b'), + (0xFF43, 'M', 'c'), + (0xFF44, 'M', 'd'), + (0xFF45, 'M', 'e'), + (0xFF46, 'M', 'f'), + (0xFF47, 'M', 'g'), + (0xFF48, 'M', 'h'), + (0xFF49, 'M', 'i'), + (0xFF4A, 'M', 'j'), + (0xFF4B, 'M', 'k'), + (0xFF4C, 'M', 'l'), + (0xFF4D, 'M', 'm'), + (0xFF4E, 'M', 'n'), + (0xFF4F, 'M', 'o'), + (0xFF50, 'M', 'p'), + (0xFF51, 'M', 'q'), + (0xFF52, 'M', 'r'), + (0xFF53, 'M', 's'), + (0xFF54, 'M', 't'), + (0xFF55, 'M', 'u'), + (0xFF56, 'M', 'v'), + (0xFF57, 'M', 'w'), + (0xFF58, 'M', 'x'), + (0xFF59, 'M', 'y'), + (0xFF5A, 'M', 'z'), + (0xFF5B, '3', '{'), + (0xFF5C, '3', '|'), + (0xFF5D, '3', '}'), + (0xFF5E, '3', '~'), + (0xFF5F, 'M', '⦅'), + (0xFF60, 'M', '⦆'), + (0xFF61, 'M', '.'), + (0xFF62, 'M', '「'), + (0xFF63, 'M', '」'), + (0xFF64, 'M', '、'), + (0xFF65, 'M', '・'), + (0xFF66, 'M', 'ヲ'), + (0xFF67, 'M', 'ァ'), + (0xFF68, 'M', 'ィ'), + (0xFF69, 'M', 'ゥ'), + (0xFF6A, 'M', 'ェ'), + (0xFF6B, 'M', 'ォ'), + (0xFF6C, 'M', 'ャ'), + (0xFF6D, 'M', 'ュ'), + (0xFF6E, 'M', 'ョ'), + (0xFF6F, 'M', 'ッ'), + (0xFF70, 'M', 'ー'), + (0xFF71, 'M', 'ア'), + (0xFF72, 'M', 'イ'), + (0xFF73, 'M', 'ウ'), + (0xFF74, 'M', 'エ'), + (0xFF75, 'M', 'オ'), + (0xFF76, 'M', 'カ'), + (0xFF77, 'M', 'キ'), + (0xFF78, 'M', 'ク'), + (0xFF79, 'M', 'ケ'), + (0xFF7A, 'M', 'コ'), + (0xFF7B, 'M', 'サ'), + (0xFF7C, 'M', 'シ'), + (0xFF7D, 'M', 'ス'), + (0xFF7E, 'M', 'セ'), + ] + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF7F, 'M', 'ソ'), + (0xFF80, 'M', 'タ'), + (0xFF81, 'M', 'チ'), + (0xFF82, 'M', 'ツ'), + (0xFF83, 'M', 'テ'), + (0xFF84, 'M', 'ト'), + (0xFF85, 'M', 'ナ'), + (0xFF86, 'M', 'ニ'), + (0xFF87, 'M', 'ヌ'), + (0xFF88, 'M', 'ネ'), + (0xFF89, 'M', 'ノ'), + (0xFF8A, 'M', 'ハ'), + (0xFF8B, 'M', 'ヒ'), + (0xFF8C, 'M', 'フ'), + (0xFF8D, 'M', 'ヘ'), + (0xFF8E, 'M', 'ホ'), + (0xFF8F, 'M', 'マ'), + (0xFF90, 'M', 'ミ'), + (0xFF91, 'M', 'ム'), + (0xFF92, 'M', 'メ'), + (0xFF93, 'M', 'モ'), + (0xFF94, 'M', 'ヤ'), + (0xFF95, 'M', 'ユ'), + (0xFF96, 'M', 'ヨ'), + (0xFF97, 'M', 'ラ'), + (0xFF98, 'M', 'リ'), + (0xFF99, 'M', 'ル'), + (0xFF9A, 'M', 'レ'), + (0xFF9B, 'M', 'ロ'), + (0xFF9C, 'M', 'ワ'), + (0xFF9D, 'M', 'ン'), + (0xFF9E, 'M', '゙'), + (0xFF9F, 'M', '゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', 'ᄀ'), + (0xFFA2, 'M', 'ᄁ'), + (0xFFA3, 'M', 'ᆪ'), + (0xFFA4, 'M', 'ᄂ'), + (0xFFA5, 'M', 'ᆬ'), + (0xFFA6, 'M', 'ᆭ'), + (0xFFA7, 'M', 'ᄃ'), + (0xFFA8, 'M', 'ᄄ'), + (0xFFA9, 'M', 'ᄅ'), + (0xFFAA, 'M', 'ᆰ'), + (0xFFAB, 'M', 'ᆱ'), + (0xFFAC, 'M', 'ᆲ'), + (0xFFAD, 'M', 'ᆳ'), + (0xFFAE, 'M', 'ᆴ'), + (0xFFAF, 'M', 'ᆵ'), + (0xFFB0, 'M', 'ᄚ'), + (0xFFB1, 'M', 'ᄆ'), + (0xFFB2, 'M', 'ᄇ'), + (0xFFB3, 'M', 'ᄈ'), + (0xFFB4, 'M', 'ᄡ'), + (0xFFB5, 'M', 'ᄉ'), + (0xFFB6, 'M', 'ᄊ'), + (0xFFB7, 'M', 'ᄋ'), + (0xFFB8, 'M', 'ᄌ'), + (0xFFB9, 'M', 'ᄍ'), + (0xFFBA, 'M', 'ᄎ'), + (0xFFBB, 'M', 'ᄏ'), + (0xFFBC, 'M', 'ᄐ'), + (0xFFBD, 'M', 'ᄑ'), + (0xFFBE, 'M', 'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', 'ᅡ'), + (0xFFC3, 'M', 'ᅢ'), + (0xFFC4, 'M', 'ᅣ'), + (0xFFC5, 'M', 'ᅤ'), + (0xFFC6, 'M', 'ᅥ'), + (0xFFC7, 'M', 'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', 'ᅧ'), + (0xFFCB, 'M', 'ᅨ'), + (0xFFCC, 'M', 'ᅩ'), + (0xFFCD, 'M', 'ᅪ'), + (0xFFCE, 'M', 'ᅫ'), + (0xFFCF, 'M', 'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', 'ᅭ'), + (0xFFD3, 'M', 'ᅮ'), + (0xFFD4, 'M', 'ᅯ'), + (0xFFD5, 'M', 'ᅰ'), + (0xFFD6, 'M', 'ᅱ'), + (0xFFD7, 'M', 'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', 'ᅳ'), + (0xFFDB, 'M', 'ᅴ'), + (0xFFDC, 'M', 'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', '¢'), + (0xFFE1, 'M', '£'), + (0xFFE2, 'M', '¬'), + (0xFFE3, '3', ' ̄'), + (0xFFE4, 'M', '¦'), + (0xFFE5, 'M', '¥'), + (0xFFE6, 'M', '₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', '│'), + (0xFFE9, 'M', '←'), + ] + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFEA, 'M', '↑'), + (0xFFEB, 'M', '→'), + (0xFFEC, 'M', '↓'), + (0xFFED, 'M', '■'), + (0xFFEE, 'M', '○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018F, 'X'), + (0x10190, 'V'), + (0x1019D, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), + (0x10300, 'V'), + (0x10324, 'X'), + (0x1032D, 'V'), + (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', '𐐨'), + (0x10401, 'M', '𐐩'), + (0x10402, 'M', '𐐪'), + (0x10403, 'M', '𐐫'), + (0x10404, 'M', '𐐬'), + (0x10405, 'M', '𐐭'), + (0x10406, 'M', '𐐮'), + (0x10407, 'M', '𐐯'), + (0x10408, 'M', '𐐰'), + (0x10409, 'M', '𐐱'), + (0x1040A, 'M', '𐐲'), + (0x1040B, 'M', '𐐳'), + (0x1040C, 'M', '𐐴'), + (0x1040D, 'M', '𐐵'), + (0x1040E, 'M', '𐐶'), + (0x1040F, 'M', '𐐷'), + (0x10410, 'M', '𐐸'), + (0x10411, 'M', '𐐹'), + (0x10412, 'M', '𐐺'), + (0x10413, 'M', '𐐻'), + (0x10414, 'M', '𐐼'), + (0x10415, 'M', '𐐽'), + (0x10416, 'M', '𐐾'), + (0x10417, 'M', '𐐿'), + (0x10418, 'M', '𐑀'), + (0x10419, 'M', '𐑁'), + (0x1041A, 'M', '𐑂'), + (0x1041B, 'M', '𐑃'), + (0x1041C, 'M', '𐑄'), + (0x1041D, 'M', '𐑅'), + (0x1041E, 'M', '𐑆'), + (0x1041F, 'M', '𐑇'), + (0x10420, 'M', '𐑈'), + (0x10421, 'M', '𐑉'), + (0x10422, 'M', '𐑊'), + (0x10423, 'M', '𐑋'), + (0x10424, 'M', '𐑌'), + (0x10425, 'M', '𐑍'), + (0x10426, 'M', '𐑎'), + (0x10427, 'M', '𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x104B0, 'M', '𐓘'), + (0x104B1, 'M', '𐓙'), + (0x104B2, 'M', '𐓚'), + (0x104B3, 'M', '𐓛'), + (0x104B4, 'M', '𐓜'), + (0x104B5, 'M', '𐓝'), + ] + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x104B6, 'M', '𐓞'), + (0x104B7, 'M', '𐓟'), + (0x104B8, 'M', '𐓠'), + (0x104B9, 'M', '𐓡'), + (0x104BA, 'M', '𐓢'), + (0x104BB, 'M', '𐓣'), + (0x104BC, 'M', '𐓤'), + (0x104BD, 'M', '𐓥'), + (0x104BE, 'M', '𐓦'), + (0x104BF, 'M', '𐓧'), + (0x104C0, 'M', '𐓨'), + (0x104C1, 'M', '𐓩'), + (0x104C2, 'M', '𐓪'), + (0x104C3, 'M', '𐓫'), + (0x104C4, 'M', '𐓬'), + (0x104C5, 'M', '𐓭'), + (0x104C6, 'M', '𐓮'), + (0x104C7, 'M', '𐓯'), + (0x104C8, 'M', '𐓰'), + (0x104C9, 'M', '𐓱'), + (0x104CA, 'M', '𐓲'), + (0x104CB, 'M', '𐓳'), + (0x104CC, 'M', '𐓴'), + (0x104CD, 'M', '𐓵'), + (0x104CE, 'M', '𐓶'), + (0x104CF, 'M', '𐓷'), + (0x104D0, 'M', '𐓸'), + (0x104D1, 'M', '𐓹'), + (0x104D2, 'M', '𐓺'), + (0x104D3, 'M', '𐓻'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'M', '𐖗'), + (0x10571, 'M', '𐖘'), + (0x10572, 'M', '𐖙'), + (0x10573, 'M', '𐖚'), + (0x10574, 'M', '𐖛'), + (0x10575, 'M', '𐖜'), + (0x10576, 'M', '𐖝'), + (0x10577, 'M', '𐖞'), + (0x10578, 'M', '𐖟'), + (0x10579, 'M', '𐖠'), + (0x1057A, 'M', '𐖡'), + (0x1057B, 'X'), + (0x1057C, 'M', '𐖣'), + (0x1057D, 'M', '𐖤'), + (0x1057E, 'M', '𐖥'), + (0x1057F, 'M', '𐖦'), + (0x10580, 'M', '𐖧'), + (0x10581, 'M', '𐖨'), + (0x10582, 'M', '𐖩'), + (0x10583, 'M', '𐖪'), + (0x10584, 'M', '𐖫'), + (0x10585, 'M', '𐖬'), + (0x10586, 'M', '𐖭'), + (0x10587, 'M', '𐖮'), + (0x10588, 'M', '𐖯'), + (0x10589, 'M', '𐖰'), + (0x1058A, 'M', '𐖱'), + (0x1058B, 'X'), + (0x1058C, 'M', '𐖳'), + (0x1058D, 'M', '𐖴'), + (0x1058E, 'M', '𐖵'), + (0x1058F, 'M', '𐖶'), + (0x10590, 'M', '𐖷'), + (0x10591, 'M', '𐖸'), + (0x10592, 'M', '𐖹'), + (0x10593, 'X'), + (0x10594, 'M', '𐖻'), + (0x10595, 'M', '𐖼'), + (0x10596, 'X'), + (0x10597, 'V'), + (0x105A2, 'X'), + (0x105A3, 'V'), + (0x105B2, 'X'), + (0x105B3, 'V'), + (0x105BA, 'X'), + (0x105BB, 'V'), + (0x105BD, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), + (0x10780, 'V'), + (0x10781, 'M', 'ː'), + (0x10782, 'M', 'ˑ'), + (0x10783, 'M', 'æ'), + (0x10784, 'M', 'ʙ'), + (0x10785, 'M', 'ɓ'), + (0x10786, 'X'), + (0x10787, 'M', 'ʣ'), + (0x10788, 'M', 'ꭦ'), + ] + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10789, 'M', 'ʥ'), + (0x1078A, 'M', 'ʤ'), + (0x1078B, 'M', 'ɖ'), + (0x1078C, 'M', 'ɗ'), + (0x1078D, 'M', 'ᶑ'), + (0x1078E, 'M', 'ɘ'), + (0x1078F, 'M', 'ɞ'), + (0x10790, 'M', 'ʩ'), + (0x10791, 'M', 'ɤ'), + (0x10792, 'M', 'ɢ'), + (0x10793, 'M', 'ɠ'), + (0x10794, 'M', 'ʛ'), + (0x10795, 'M', 'ħ'), + (0x10796, 'M', 'ʜ'), + (0x10797, 'M', 'ɧ'), + (0x10798, 'M', 'ʄ'), + (0x10799, 'M', 'ʪ'), + (0x1079A, 'M', 'ʫ'), + (0x1079B, 'M', 'ɬ'), + (0x1079C, 'M', '𝼄'), + (0x1079D, 'M', 'ꞎ'), + (0x1079E, 'M', 'ɮ'), + (0x1079F, 'M', '𝼅'), + (0x107A0, 'M', 'ʎ'), + (0x107A1, 'M', '𝼆'), + (0x107A2, 'M', 'ø'), + (0x107A3, 'M', 'ɶ'), + (0x107A4, 'M', 'ɷ'), + (0x107A5, 'M', 'q'), + (0x107A6, 'M', 'ɺ'), + (0x107A7, 'M', '𝼈'), + (0x107A8, 'M', 'ɽ'), + (0x107A9, 'M', 'ɾ'), + (0x107AA, 'M', 'ʀ'), + (0x107AB, 'M', 'ʨ'), + (0x107AC, 'M', 'ʦ'), + (0x107AD, 'M', 'ꭧ'), + (0x107AE, 'M', 'ʧ'), + (0x107AF, 'M', 'ʈ'), + (0x107B0, 'M', 'ⱱ'), + (0x107B1, 'X'), + (0x107B2, 'M', 'ʏ'), + (0x107B3, 'M', 'ʡ'), + (0x107B4, 'M', 'ʢ'), + (0x107B5, 'M', 'ʘ'), + (0x107B6, 'M', 'ǀ'), + (0x107B7, 'M', 'ǁ'), + (0x107B8, 'M', 'ǂ'), + (0x107B9, 'M', '𝼊'), + (0x107BA, 'M', '𝼞'), + (0x107BB, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A36, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A49, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + ] + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10C80, 'M', '𐳀'), + (0x10C81, 'M', '𐳁'), + (0x10C82, 'M', '𐳂'), + (0x10C83, 'M', '𐳃'), + (0x10C84, 'M', '𐳄'), + (0x10C85, 'M', '𐳅'), + (0x10C86, 'M', '𐳆'), + (0x10C87, 'M', '𐳇'), + (0x10C88, 'M', '𐳈'), + (0x10C89, 'M', '𐳉'), + (0x10C8A, 'M', '𐳊'), + (0x10C8B, 'M', '𐳋'), + (0x10C8C, 'M', '𐳌'), + (0x10C8D, 'M', '𐳍'), + (0x10C8E, 'M', '𐳎'), + (0x10C8F, 'M', '𐳏'), + (0x10C90, 'M', '𐳐'), + (0x10C91, 'M', '𐳑'), + (0x10C92, 'M', '𐳒'), + (0x10C93, 'M', '𐳓'), + (0x10C94, 'M', '𐳔'), + (0x10C95, 'M', '𐳕'), + (0x10C96, 'M', '𐳖'), + (0x10C97, 'M', '𐳗'), + (0x10C98, 'M', '𐳘'), + (0x10C99, 'M', '𐳙'), + (0x10C9A, 'M', '𐳚'), + (0x10C9B, 'M', '𐳛'), + (0x10C9C, 'M', '𐳜'), + (0x10C9D, 'M', '𐳝'), + (0x10C9E, 'M', '𐳞'), + (0x10C9F, 'M', '𐳟'), + (0x10CA0, 'M', '𐳠'), + (0x10CA1, 'M', '𐳡'), + (0x10CA2, 'M', '𐳢'), + (0x10CA3, 'M', '𐳣'), + (0x10CA4, 'M', '𐳤'), + (0x10CA5, 'M', '𐳥'), + (0x10CA6, 'M', '𐳦'), + (0x10CA7, 'M', '𐳧'), + (0x10CA8, 'M', '𐳨'), + (0x10CA9, 'M', '𐳩'), + (0x10CAA, 'M', '𐳪'), + (0x10CAB, 'M', '𐳫'), + (0x10CAC, 'M', '𐳬'), + (0x10CAD, 'M', '𐳭'), + (0x10CAE, 'M', '𐳮'), + (0x10CAF, 'M', '𐳯'), + (0x10CB0, 'M', '𐳰'), + (0x10CB1, 'M', '𐳱'), + (0x10CB2, 'M', '𐳲'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D28, 'X'), + (0x10D30, 'V'), + (0x10D3A, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), + (0x10EFD, 'V'), + (0x10F28, 'X'), + (0x10F30, 'V'), + (0x10F5A, 'X'), + (0x10F70, 'V'), + (0x10F8A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), + (0x10FE0, 'V'), + (0x10FF7, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11076, 'X'), + (0x1107F, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + ] + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x110C3, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11148, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), + (0x11180, 'V'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + (0x11213, 'V'), + (0x11242, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133B, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + (0x11400, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + (0x11462, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), + (0x11680, 'V'), + (0x116BA, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171B, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11747, 'X'), + (0x11800, 'V'), + (0x1183C, 'X'), + (0x118A0, 'M', '𑣀'), + (0x118A1, 'M', '𑣁'), + (0x118A2, 'M', '𑣂'), + (0x118A3, 'M', '𑣃'), + (0x118A4, 'M', '𑣄'), + (0x118A5, 'M', '𑣅'), + (0x118A6, 'M', '𑣆'), + ] + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118A7, 'M', '𑣇'), + (0x118A8, 'M', '𑣈'), + (0x118A9, 'M', '𑣉'), + (0x118AA, 'M', '𑣊'), + (0x118AB, 'M', '𑣋'), + (0x118AC, 'M', '𑣌'), + (0x118AD, 'M', '𑣍'), + (0x118AE, 'M', '𑣎'), + (0x118AF, 'M', '𑣏'), + (0x118B0, 'M', '𑣐'), + (0x118B1, 'M', '𑣑'), + (0x118B2, 'M', '𑣒'), + (0x118B3, 'M', '𑣓'), + (0x118B4, 'M', '𑣔'), + (0x118B5, 'M', '𑣕'), + (0x118B6, 'M', '𑣖'), + (0x118B7, 'M', '𑣗'), + (0x118B8, 'M', '𑣘'), + (0x118B9, 'M', '𑣙'), + (0x118BA, 'M', '𑣚'), + (0x118BB, 'M', '𑣛'), + (0x118BC, 'M', '𑣜'), + (0x118BD, 'M', '𑣝'), + (0x118BE, 'M', '𑣞'), + (0x118BF, 'M', '𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), + (0x119A0, 'V'), + (0x119A8, 'X'), + (0x119AA, 'V'), + (0x119D8, 'X'), + (0x119DA, 'V'), + (0x119E5, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11AA3, 'X'), + (0x11AB0, 'V'), + (0x11AF9, 'X'), + (0x11B00, 'V'), + (0x11B0A, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), + (0x11D60, 'V'), + (0x11D66, 'X'), + (0x11D67, 'V'), + (0x11D69, 'X'), + (0x11D6A, 'V'), + (0x11D8F, 'X'), + (0x11D90, 'V'), + (0x11D92, 'X'), + (0x11D93, 'V'), + (0x11D99, 'X'), + (0x11DA0, 'V'), + (0x11DAA, 'X'), + (0x11EE0, 'V'), + (0x11EF9, 'X'), + (0x11F00, 'V'), + ] + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11F11, 'X'), + (0x11F12, 'V'), + (0x11F3B, 'X'), + (0x11F3E, 'V'), + (0x11F5A, 'X'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), + (0x11FF2, 'X'), + (0x11FFF, 'V'), + (0x1239A, 'X'), + (0x12400, 'V'), + (0x1246F, 'X'), + (0x12470, 'V'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), + (0x12F90, 'V'), + (0x12FF3, 'X'), + (0x13000, 'V'), + (0x13430, 'X'), + (0x13440, 'V'), + (0x13456, 'X'), + (0x14400, 'V'), + (0x14647, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16ABF, 'X'), + (0x16AC0, 'V'), + (0x16ACA, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), + (0x16E40, 'M', '𖹠'), + (0x16E41, 'M', '𖹡'), + (0x16E42, 'M', '𖹢'), + (0x16E43, 'M', '𖹣'), + (0x16E44, 'M', '𖹤'), + (0x16E45, 'M', '𖹥'), + (0x16E46, 'M', '𖹦'), + (0x16E47, 'M', '𖹧'), + (0x16E48, 'M', '𖹨'), + (0x16E49, 'M', '𖹩'), + (0x16E4A, 'M', '𖹪'), + (0x16E4B, 'M', '𖹫'), + (0x16E4C, 'M', '𖹬'), + (0x16E4D, 'M', '𖹭'), + (0x16E4E, 'M', '𖹮'), + (0x16E4F, 'M', '𖹯'), + (0x16E50, 'M', '𖹰'), + (0x16E51, 'M', '𖹱'), + (0x16E52, 'M', '𖹲'), + (0x16E53, 'M', '𖹳'), + (0x16E54, 'M', '𖹴'), + (0x16E55, 'M', '𖹵'), + (0x16E56, 'M', '𖹶'), + (0x16E57, 'M', '𖹷'), + (0x16E58, 'M', '𖹸'), + (0x16E59, 'M', '𖹹'), + (0x16E5A, 'M', '𖹺'), + (0x16E5B, 'M', '𖹻'), + (0x16E5C, 'M', '𖹼'), + (0x16E5D, 'M', '𖹽'), + (0x16E5E, 'M', '𖹾'), + (0x16E5F, 'M', '𖹿'), + (0x16E60, 'V'), + (0x16E9B, 'X'), + (0x16F00, 'V'), + (0x16F4B, 'X'), + (0x16F4F, 'V'), + (0x16F88, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), + (0x17000, 'V'), + (0x187F8, 'X'), + (0x18800, 'V'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), + (0x1AFF0, 'V'), + ] + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1AFF4, 'X'), + (0x1AFF5, 'V'), + (0x1AFFC, 'X'), + (0x1AFFD, 'V'), + (0x1AFFF, 'X'), + (0x1B000, 'V'), + (0x1B123, 'X'), + (0x1B132, 'V'), + (0x1B133, 'X'), + (0x1B150, 'V'), + (0x1B153, 'X'), + (0x1B155, 'V'), + (0x1B156, 'X'), + (0x1B164, 'V'), + (0x1B168, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), + (0x1CF00, 'V'), + (0x1CF2E, 'X'), + (0x1CF30, 'V'), + (0x1CF47, 'X'), + (0x1CF50, 'V'), + (0x1CFC4, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', '𝅗𝅥'), + (0x1D15F, 'M', '𝅘𝅥'), + (0x1D160, 'M', '𝅘𝅥𝅮'), + (0x1D161, 'M', '𝅘𝅥𝅯'), + (0x1D162, 'M', '𝅘𝅥𝅰'), + (0x1D163, 'M', '𝅘𝅥𝅱'), + (0x1D164, 'M', '𝅘𝅥𝅲'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', '𝆹𝅥'), + (0x1D1BC, 'M', '𝆺𝅥'), + (0x1D1BD, 'M', '𝆹𝅥𝅮'), + (0x1D1BE, 'M', '𝆺𝅥𝅮'), + (0x1D1BF, 'M', '𝆹𝅥𝅯'), + (0x1D1C0, 'M', '𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1EB, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D2C0, 'V'), + (0x1D2D4, 'X'), + (0x1D2E0, 'V'), + (0x1D2F4, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D379, 'X'), + (0x1D400, 'M', 'a'), + (0x1D401, 'M', 'b'), + (0x1D402, 'M', 'c'), + (0x1D403, 'M', 'd'), + (0x1D404, 'M', 'e'), + (0x1D405, 'M', 'f'), + (0x1D406, 'M', 'g'), + (0x1D407, 'M', 'h'), + (0x1D408, 'M', 'i'), + (0x1D409, 'M', 'j'), + (0x1D40A, 'M', 'k'), + (0x1D40B, 'M', 'l'), + (0x1D40C, 'M', 'm'), + (0x1D40D, 'M', 'n'), + (0x1D40E, 'M', 'o'), + (0x1D40F, 'M', 'p'), + (0x1D410, 'M', 'q'), + (0x1D411, 'M', 'r'), + (0x1D412, 'M', 's'), + (0x1D413, 'M', 't'), + (0x1D414, 'M', 'u'), + (0x1D415, 'M', 'v'), + (0x1D416, 'M', 'w'), + (0x1D417, 'M', 'x'), + (0x1D418, 'M', 'y'), + (0x1D419, 'M', 'z'), + (0x1D41A, 'M', 'a'), + (0x1D41B, 'M', 'b'), + (0x1D41C, 'M', 'c'), + (0x1D41D, 'M', 'd'), + (0x1D41E, 'M', 'e'), + (0x1D41F, 'M', 'f'), + (0x1D420, 'M', 'g'), + ] + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D421, 'M', 'h'), + (0x1D422, 'M', 'i'), + (0x1D423, 'M', 'j'), + (0x1D424, 'M', 'k'), + (0x1D425, 'M', 'l'), + (0x1D426, 'M', 'm'), + (0x1D427, 'M', 'n'), + (0x1D428, 'M', 'o'), + (0x1D429, 'M', 'p'), + (0x1D42A, 'M', 'q'), + (0x1D42B, 'M', 'r'), + (0x1D42C, 'M', 's'), + (0x1D42D, 'M', 't'), + (0x1D42E, 'M', 'u'), + (0x1D42F, 'M', 'v'), + (0x1D430, 'M', 'w'), + (0x1D431, 'M', 'x'), + (0x1D432, 'M', 'y'), + (0x1D433, 'M', 'z'), + (0x1D434, 'M', 'a'), + (0x1D435, 'M', 'b'), + (0x1D436, 'M', 'c'), + (0x1D437, 'M', 'd'), + (0x1D438, 'M', 'e'), + (0x1D439, 'M', 'f'), + (0x1D43A, 'M', 'g'), + (0x1D43B, 'M', 'h'), + (0x1D43C, 'M', 'i'), + (0x1D43D, 'M', 'j'), + (0x1D43E, 'M', 'k'), + (0x1D43F, 'M', 'l'), + (0x1D440, 'M', 'm'), + (0x1D441, 'M', 'n'), + (0x1D442, 'M', 'o'), + (0x1D443, 'M', 'p'), + (0x1D444, 'M', 'q'), + (0x1D445, 'M', 'r'), + (0x1D446, 'M', 's'), + (0x1D447, 'M', 't'), + (0x1D448, 'M', 'u'), + (0x1D449, 'M', 'v'), + (0x1D44A, 'M', 'w'), + (0x1D44B, 'M', 'x'), + (0x1D44C, 'M', 'y'), + (0x1D44D, 'M', 'z'), + (0x1D44E, 'M', 'a'), + (0x1D44F, 'M', 'b'), + (0x1D450, 'M', 'c'), + (0x1D451, 'M', 'd'), + (0x1D452, 'M', 'e'), + (0x1D453, 'M', 'f'), + (0x1D454, 'M', 'g'), + (0x1D455, 'X'), + (0x1D456, 'M', 'i'), + (0x1D457, 'M', 'j'), + (0x1D458, 'M', 'k'), + (0x1D459, 'M', 'l'), + (0x1D45A, 'M', 'm'), + (0x1D45B, 'M', 'n'), + (0x1D45C, 'M', 'o'), + (0x1D45D, 'M', 'p'), + (0x1D45E, 'M', 'q'), + (0x1D45F, 'M', 'r'), + (0x1D460, 'M', 's'), + (0x1D461, 'M', 't'), + (0x1D462, 'M', 'u'), + (0x1D463, 'M', 'v'), + (0x1D464, 'M', 'w'), + (0x1D465, 'M', 'x'), + (0x1D466, 'M', 'y'), + (0x1D467, 'M', 'z'), + (0x1D468, 'M', 'a'), + (0x1D469, 'M', 'b'), + (0x1D46A, 'M', 'c'), + (0x1D46B, 'M', 'd'), + (0x1D46C, 'M', 'e'), + (0x1D46D, 'M', 'f'), + (0x1D46E, 'M', 'g'), + (0x1D46F, 'M', 'h'), + (0x1D470, 'M', 'i'), + (0x1D471, 'M', 'j'), + (0x1D472, 'M', 'k'), + (0x1D473, 'M', 'l'), + (0x1D474, 'M', 'm'), + (0x1D475, 'M', 'n'), + (0x1D476, 'M', 'o'), + (0x1D477, 'M', 'p'), + (0x1D478, 'M', 'q'), + (0x1D479, 'M', 'r'), + (0x1D47A, 'M', 's'), + (0x1D47B, 'M', 't'), + (0x1D47C, 'M', 'u'), + (0x1D47D, 'M', 'v'), + (0x1D47E, 'M', 'w'), + (0x1D47F, 'M', 'x'), + (0x1D480, 'M', 'y'), + (0x1D481, 'M', 'z'), + (0x1D482, 'M', 'a'), + (0x1D483, 'M', 'b'), + (0x1D484, 'M', 'c'), + ] + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D485, 'M', 'd'), + (0x1D486, 'M', 'e'), + (0x1D487, 'M', 'f'), + (0x1D488, 'M', 'g'), + (0x1D489, 'M', 'h'), + (0x1D48A, 'M', 'i'), + (0x1D48B, 'M', 'j'), + (0x1D48C, 'M', 'k'), + (0x1D48D, 'M', 'l'), + (0x1D48E, 'M', 'm'), + (0x1D48F, 'M', 'n'), + (0x1D490, 'M', 'o'), + (0x1D491, 'M', 'p'), + (0x1D492, 'M', 'q'), + (0x1D493, 'M', 'r'), + (0x1D494, 'M', 's'), + (0x1D495, 'M', 't'), + (0x1D496, 'M', 'u'), + (0x1D497, 'M', 'v'), + (0x1D498, 'M', 'w'), + (0x1D499, 'M', 'x'), + (0x1D49A, 'M', 'y'), + (0x1D49B, 'M', 'z'), + (0x1D49C, 'M', 'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', 'c'), + (0x1D49F, 'M', 'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', 'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', 'j'), + (0x1D4A6, 'M', 'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', 'n'), + (0x1D4AA, 'M', 'o'), + (0x1D4AB, 'M', 'p'), + (0x1D4AC, 'M', 'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', 's'), + (0x1D4AF, 'M', 't'), + (0x1D4B0, 'M', 'u'), + (0x1D4B1, 'M', 'v'), + (0x1D4B2, 'M', 'w'), + (0x1D4B3, 'M', 'x'), + (0x1D4B4, 'M', 'y'), + (0x1D4B5, 'M', 'z'), + (0x1D4B6, 'M', 'a'), + (0x1D4B7, 'M', 'b'), + (0x1D4B8, 'M', 'c'), + (0x1D4B9, 'M', 'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', 'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', 'h'), + (0x1D4BE, 'M', 'i'), + (0x1D4BF, 'M', 'j'), + (0x1D4C0, 'M', 'k'), + (0x1D4C1, 'M', 'l'), + (0x1D4C2, 'M', 'm'), + (0x1D4C3, 'M', 'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', 'p'), + (0x1D4C6, 'M', 'q'), + (0x1D4C7, 'M', 'r'), + (0x1D4C8, 'M', 's'), + (0x1D4C9, 'M', 't'), + (0x1D4CA, 'M', 'u'), + (0x1D4CB, 'M', 'v'), + (0x1D4CC, 'M', 'w'), + (0x1D4CD, 'M', 'x'), + (0x1D4CE, 'M', 'y'), + (0x1D4CF, 'M', 'z'), + (0x1D4D0, 'M', 'a'), + (0x1D4D1, 'M', 'b'), + (0x1D4D2, 'M', 'c'), + (0x1D4D3, 'M', 'd'), + (0x1D4D4, 'M', 'e'), + (0x1D4D5, 'M', 'f'), + (0x1D4D6, 'M', 'g'), + (0x1D4D7, 'M', 'h'), + (0x1D4D8, 'M', 'i'), + (0x1D4D9, 'M', 'j'), + (0x1D4DA, 'M', 'k'), + (0x1D4DB, 'M', 'l'), + (0x1D4DC, 'M', 'm'), + (0x1D4DD, 'M', 'n'), + (0x1D4DE, 'M', 'o'), + (0x1D4DF, 'M', 'p'), + (0x1D4E0, 'M', 'q'), + (0x1D4E1, 'M', 'r'), + (0x1D4E2, 'M', 's'), + (0x1D4E3, 'M', 't'), + (0x1D4E4, 'M', 'u'), + (0x1D4E5, 'M', 'v'), + (0x1D4E6, 'M', 'w'), + (0x1D4E7, 'M', 'x'), + (0x1D4E8, 'M', 'y'), + (0x1D4E9, 'M', 'z'), + (0x1D4EA, 'M', 'a'), + (0x1D4EB, 'M', 'b'), + ] + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D4EC, 'M', 'c'), + (0x1D4ED, 'M', 'd'), + (0x1D4EE, 'M', 'e'), + (0x1D4EF, 'M', 'f'), + (0x1D4F0, 'M', 'g'), + (0x1D4F1, 'M', 'h'), + (0x1D4F2, 'M', 'i'), + (0x1D4F3, 'M', 'j'), + (0x1D4F4, 'M', 'k'), + (0x1D4F5, 'M', 'l'), + (0x1D4F6, 'M', 'm'), + (0x1D4F7, 'M', 'n'), + (0x1D4F8, 'M', 'o'), + (0x1D4F9, 'M', 'p'), + (0x1D4FA, 'M', 'q'), + (0x1D4FB, 'M', 'r'), + (0x1D4FC, 'M', 's'), + (0x1D4FD, 'M', 't'), + (0x1D4FE, 'M', 'u'), + (0x1D4FF, 'M', 'v'), + (0x1D500, 'M', 'w'), + (0x1D501, 'M', 'x'), + (0x1D502, 'M', 'y'), + (0x1D503, 'M', 'z'), + (0x1D504, 'M', 'a'), + (0x1D505, 'M', 'b'), + (0x1D506, 'X'), + (0x1D507, 'M', 'd'), + (0x1D508, 'M', 'e'), + (0x1D509, 'M', 'f'), + (0x1D50A, 'M', 'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', 'j'), + (0x1D50E, 'M', 'k'), + (0x1D50F, 'M', 'l'), + (0x1D510, 'M', 'm'), + (0x1D511, 'M', 'n'), + (0x1D512, 'M', 'o'), + (0x1D513, 'M', 'p'), + (0x1D514, 'M', 'q'), + (0x1D515, 'X'), + (0x1D516, 'M', 's'), + (0x1D517, 'M', 't'), + (0x1D518, 'M', 'u'), + (0x1D519, 'M', 'v'), + (0x1D51A, 'M', 'w'), + (0x1D51B, 'M', 'x'), + (0x1D51C, 'M', 'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', 'a'), + (0x1D51F, 'M', 'b'), + (0x1D520, 'M', 'c'), + (0x1D521, 'M', 'd'), + (0x1D522, 'M', 'e'), + (0x1D523, 'M', 'f'), + (0x1D524, 'M', 'g'), + (0x1D525, 'M', 'h'), + (0x1D526, 'M', 'i'), + (0x1D527, 'M', 'j'), + (0x1D528, 'M', 'k'), + (0x1D529, 'M', 'l'), + (0x1D52A, 'M', 'm'), + (0x1D52B, 'M', 'n'), + (0x1D52C, 'M', 'o'), + (0x1D52D, 'M', 'p'), + (0x1D52E, 'M', 'q'), + (0x1D52F, 'M', 'r'), + (0x1D530, 'M', 's'), + (0x1D531, 'M', 't'), + (0x1D532, 'M', 'u'), + (0x1D533, 'M', 'v'), + (0x1D534, 'M', 'w'), + (0x1D535, 'M', 'x'), + (0x1D536, 'M', 'y'), + (0x1D537, 'M', 'z'), + (0x1D538, 'M', 'a'), + (0x1D539, 'M', 'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', 'd'), + (0x1D53C, 'M', 'e'), + (0x1D53D, 'M', 'f'), + (0x1D53E, 'M', 'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', 'i'), + (0x1D541, 'M', 'j'), + (0x1D542, 'M', 'k'), + (0x1D543, 'M', 'l'), + (0x1D544, 'M', 'm'), + (0x1D545, 'X'), + (0x1D546, 'M', 'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', 's'), + (0x1D54B, 'M', 't'), + (0x1D54C, 'M', 'u'), + (0x1D54D, 'M', 'v'), + (0x1D54E, 'M', 'w'), + (0x1D54F, 'M', 'x'), + (0x1D550, 'M', 'y'), + (0x1D551, 'X'), + (0x1D552, 'M', 'a'), + ] + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D553, 'M', 'b'), + (0x1D554, 'M', 'c'), + (0x1D555, 'M', 'd'), + (0x1D556, 'M', 'e'), + (0x1D557, 'M', 'f'), + (0x1D558, 'M', 'g'), + (0x1D559, 'M', 'h'), + (0x1D55A, 'M', 'i'), + (0x1D55B, 'M', 'j'), + (0x1D55C, 'M', 'k'), + (0x1D55D, 'M', 'l'), + (0x1D55E, 'M', 'm'), + (0x1D55F, 'M', 'n'), + (0x1D560, 'M', 'o'), + (0x1D561, 'M', 'p'), + (0x1D562, 'M', 'q'), + (0x1D563, 'M', 'r'), + (0x1D564, 'M', 's'), + (0x1D565, 'M', 't'), + (0x1D566, 'M', 'u'), + (0x1D567, 'M', 'v'), + (0x1D568, 'M', 'w'), + (0x1D569, 'M', 'x'), + (0x1D56A, 'M', 'y'), + (0x1D56B, 'M', 'z'), + (0x1D56C, 'M', 'a'), + (0x1D56D, 'M', 'b'), + (0x1D56E, 'M', 'c'), + (0x1D56F, 'M', 'd'), + (0x1D570, 'M', 'e'), + (0x1D571, 'M', 'f'), + (0x1D572, 'M', 'g'), + (0x1D573, 'M', 'h'), + (0x1D574, 'M', 'i'), + (0x1D575, 'M', 'j'), + (0x1D576, 'M', 'k'), + (0x1D577, 'M', 'l'), + (0x1D578, 'M', 'm'), + (0x1D579, 'M', 'n'), + (0x1D57A, 'M', 'o'), + (0x1D57B, 'M', 'p'), + (0x1D57C, 'M', 'q'), + (0x1D57D, 'M', 'r'), + (0x1D57E, 'M', 's'), + (0x1D57F, 'M', 't'), + (0x1D580, 'M', 'u'), + (0x1D581, 'M', 'v'), + (0x1D582, 'M', 'w'), + (0x1D583, 'M', 'x'), + (0x1D584, 'M', 'y'), + (0x1D585, 'M', 'z'), + (0x1D586, 'M', 'a'), + (0x1D587, 'M', 'b'), + (0x1D588, 'M', 'c'), + (0x1D589, 'M', 'd'), + (0x1D58A, 'M', 'e'), + (0x1D58B, 'M', 'f'), + (0x1D58C, 'M', 'g'), + (0x1D58D, 'M', 'h'), + (0x1D58E, 'M', 'i'), + (0x1D58F, 'M', 'j'), + (0x1D590, 'M', 'k'), + (0x1D591, 'M', 'l'), + (0x1D592, 'M', 'm'), + (0x1D593, 'M', 'n'), + (0x1D594, 'M', 'o'), + (0x1D595, 'M', 'p'), + (0x1D596, 'M', 'q'), + (0x1D597, 'M', 'r'), + (0x1D598, 'M', 's'), + (0x1D599, 'M', 't'), + (0x1D59A, 'M', 'u'), + (0x1D59B, 'M', 'v'), + (0x1D59C, 'M', 'w'), + (0x1D59D, 'M', 'x'), + (0x1D59E, 'M', 'y'), + (0x1D59F, 'M', 'z'), + (0x1D5A0, 'M', 'a'), + (0x1D5A1, 'M', 'b'), + (0x1D5A2, 'M', 'c'), + (0x1D5A3, 'M', 'd'), + (0x1D5A4, 'M', 'e'), + (0x1D5A5, 'M', 'f'), + (0x1D5A6, 'M', 'g'), + (0x1D5A7, 'M', 'h'), + (0x1D5A8, 'M', 'i'), + (0x1D5A9, 'M', 'j'), + (0x1D5AA, 'M', 'k'), + (0x1D5AB, 'M', 'l'), + (0x1D5AC, 'M', 'm'), + (0x1D5AD, 'M', 'n'), + (0x1D5AE, 'M', 'o'), + (0x1D5AF, 'M', 'p'), + (0x1D5B0, 'M', 'q'), + (0x1D5B1, 'M', 'r'), + (0x1D5B2, 'M', 's'), + (0x1D5B3, 'M', 't'), + (0x1D5B4, 'M', 'u'), + (0x1D5B5, 'M', 'v'), + (0x1D5B6, 'M', 'w'), + ] + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D5B7, 'M', 'x'), + (0x1D5B8, 'M', 'y'), + (0x1D5B9, 'M', 'z'), + (0x1D5BA, 'M', 'a'), + (0x1D5BB, 'M', 'b'), + (0x1D5BC, 'M', 'c'), + (0x1D5BD, 'M', 'd'), + (0x1D5BE, 'M', 'e'), + (0x1D5BF, 'M', 'f'), + (0x1D5C0, 'M', 'g'), + (0x1D5C1, 'M', 'h'), + (0x1D5C2, 'M', 'i'), + (0x1D5C3, 'M', 'j'), + (0x1D5C4, 'M', 'k'), + (0x1D5C5, 'M', 'l'), + (0x1D5C6, 'M', 'm'), + (0x1D5C7, 'M', 'n'), + (0x1D5C8, 'M', 'o'), + (0x1D5C9, 'M', 'p'), + (0x1D5CA, 'M', 'q'), + (0x1D5CB, 'M', 'r'), + (0x1D5CC, 'M', 's'), + (0x1D5CD, 'M', 't'), + (0x1D5CE, 'M', 'u'), + (0x1D5CF, 'M', 'v'), + (0x1D5D0, 'M', 'w'), + (0x1D5D1, 'M', 'x'), + (0x1D5D2, 'M', 'y'), + (0x1D5D3, 'M', 'z'), + (0x1D5D4, 'M', 'a'), + (0x1D5D5, 'M', 'b'), + (0x1D5D6, 'M', 'c'), + (0x1D5D7, 'M', 'd'), + (0x1D5D8, 'M', 'e'), + (0x1D5D9, 'M', 'f'), + (0x1D5DA, 'M', 'g'), + (0x1D5DB, 'M', 'h'), + (0x1D5DC, 'M', 'i'), + (0x1D5DD, 'M', 'j'), + (0x1D5DE, 'M', 'k'), + (0x1D5DF, 'M', 'l'), + (0x1D5E0, 'M', 'm'), + (0x1D5E1, 'M', 'n'), + (0x1D5E2, 'M', 'o'), + (0x1D5E3, 'M', 'p'), + (0x1D5E4, 'M', 'q'), + (0x1D5E5, 'M', 'r'), + (0x1D5E6, 'M', 's'), + (0x1D5E7, 'M', 't'), + (0x1D5E8, 'M', 'u'), + (0x1D5E9, 'M', 'v'), + (0x1D5EA, 'M', 'w'), + (0x1D5EB, 'M', 'x'), + (0x1D5EC, 'M', 'y'), + (0x1D5ED, 'M', 'z'), + (0x1D5EE, 'M', 'a'), + (0x1D5EF, 'M', 'b'), + (0x1D5F0, 'M', 'c'), + (0x1D5F1, 'M', 'd'), + (0x1D5F2, 'M', 'e'), + (0x1D5F3, 'M', 'f'), + (0x1D5F4, 'M', 'g'), + (0x1D5F5, 'M', 'h'), + (0x1D5F6, 'M', 'i'), + (0x1D5F7, 'M', 'j'), + (0x1D5F8, 'M', 'k'), + (0x1D5F9, 'M', 'l'), + (0x1D5FA, 'M', 'm'), + (0x1D5FB, 'M', 'n'), + (0x1D5FC, 'M', 'o'), + (0x1D5FD, 'M', 'p'), + (0x1D5FE, 'M', 'q'), + (0x1D5FF, 'M', 'r'), + (0x1D600, 'M', 's'), + (0x1D601, 'M', 't'), + (0x1D602, 'M', 'u'), + (0x1D603, 'M', 'v'), + (0x1D604, 'M', 'w'), + (0x1D605, 'M', 'x'), + (0x1D606, 'M', 'y'), + (0x1D607, 'M', 'z'), + (0x1D608, 'M', 'a'), + (0x1D609, 'M', 'b'), + (0x1D60A, 'M', 'c'), + (0x1D60B, 'M', 'd'), + (0x1D60C, 'M', 'e'), + (0x1D60D, 'M', 'f'), + (0x1D60E, 'M', 'g'), + (0x1D60F, 'M', 'h'), + (0x1D610, 'M', 'i'), + (0x1D611, 'M', 'j'), + (0x1D612, 'M', 'k'), + (0x1D613, 'M', 'l'), + (0x1D614, 'M', 'm'), + (0x1D615, 'M', 'n'), + (0x1D616, 'M', 'o'), + (0x1D617, 'M', 'p'), + (0x1D618, 'M', 'q'), + (0x1D619, 'M', 'r'), + (0x1D61A, 'M', 's'), + ] + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D61B, 'M', 't'), + (0x1D61C, 'M', 'u'), + (0x1D61D, 'M', 'v'), + (0x1D61E, 'M', 'w'), + (0x1D61F, 'M', 'x'), + (0x1D620, 'M', 'y'), + (0x1D621, 'M', 'z'), + (0x1D622, 'M', 'a'), + (0x1D623, 'M', 'b'), + (0x1D624, 'M', 'c'), + (0x1D625, 'M', 'd'), + (0x1D626, 'M', 'e'), + (0x1D627, 'M', 'f'), + (0x1D628, 'M', 'g'), + (0x1D629, 'M', 'h'), + (0x1D62A, 'M', 'i'), + (0x1D62B, 'M', 'j'), + (0x1D62C, 'M', 'k'), + (0x1D62D, 'M', 'l'), + (0x1D62E, 'M', 'm'), + (0x1D62F, 'M', 'n'), + (0x1D630, 'M', 'o'), + (0x1D631, 'M', 'p'), + (0x1D632, 'M', 'q'), + (0x1D633, 'M', 'r'), + (0x1D634, 'M', 's'), + (0x1D635, 'M', 't'), + (0x1D636, 'M', 'u'), + (0x1D637, 'M', 'v'), + (0x1D638, 'M', 'w'), + (0x1D639, 'M', 'x'), + (0x1D63A, 'M', 'y'), + (0x1D63B, 'M', 'z'), + (0x1D63C, 'M', 'a'), + (0x1D63D, 'M', 'b'), + (0x1D63E, 'M', 'c'), + (0x1D63F, 'M', 'd'), + (0x1D640, 'M', 'e'), + (0x1D641, 'M', 'f'), + (0x1D642, 'M', 'g'), + (0x1D643, 'M', 'h'), + (0x1D644, 'M', 'i'), + (0x1D645, 'M', 'j'), + (0x1D646, 'M', 'k'), + (0x1D647, 'M', 'l'), + (0x1D648, 'M', 'm'), + (0x1D649, 'M', 'n'), + (0x1D64A, 'M', 'o'), + (0x1D64B, 'M', 'p'), + (0x1D64C, 'M', 'q'), + (0x1D64D, 'M', 'r'), + (0x1D64E, 'M', 's'), + (0x1D64F, 'M', 't'), + (0x1D650, 'M', 'u'), + (0x1D651, 'M', 'v'), + (0x1D652, 'M', 'w'), + (0x1D653, 'M', 'x'), + (0x1D654, 'M', 'y'), + (0x1D655, 'M', 'z'), + (0x1D656, 'M', 'a'), + (0x1D657, 'M', 'b'), + (0x1D658, 'M', 'c'), + (0x1D659, 'M', 'd'), + (0x1D65A, 'M', 'e'), + (0x1D65B, 'M', 'f'), + (0x1D65C, 'M', 'g'), + (0x1D65D, 'M', 'h'), + (0x1D65E, 'M', 'i'), + (0x1D65F, 'M', 'j'), + (0x1D660, 'M', 'k'), + (0x1D661, 'M', 'l'), + (0x1D662, 'M', 'm'), + (0x1D663, 'M', 'n'), + (0x1D664, 'M', 'o'), + (0x1D665, 'M', 'p'), + (0x1D666, 'M', 'q'), + (0x1D667, 'M', 'r'), + (0x1D668, 'M', 's'), + (0x1D669, 'M', 't'), + (0x1D66A, 'M', 'u'), + (0x1D66B, 'M', 'v'), + (0x1D66C, 'M', 'w'), + (0x1D66D, 'M', 'x'), + (0x1D66E, 'M', 'y'), + (0x1D66F, 'M', 'z'), + (0x1D670, 'M', 'a'), + (0x1D671, 'M', 'b'), + (0x1D672, 'M', 'c'), + (0x1D673, 'M', 'd'), + (0x1D674, 'M', 'e'), + (0x1D675, 'M', 'f'), + (0x1D676, 'M', 'g'), + (0x1D677, 'M', 'h'), + (0x1D678, 'M', 'i'), + (0x1D679, 'M', 'j'), + (0x1D67A, 'M', 'k'), + (0x1D67B, 'M', 'l'), + (0x1D67C, 'M', 'm'), + (0x1D67D, 'M', 'n'), + (0x1D67E, 'M', 'o'), + ] + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D67F, 'M', 'p'), + (0x1D680, 'M', 'q'), + (0x1D681, 'M', 'r'), + (0x1D682, 'M', 's'), + (0x1D683, 'M', 't'), + (0x1D684, 'M', 'u'), + (0x1D685, 'M', 'v'), + (0x1D686, 'M', 'w'), + (0x1D687, 'M', 'x'), + (0x1D688, 'M', 'y'), + (0x1D689, 'M', 'z'), + (0x1D68A, 'M', 'a'), + (0x1D68B, 'M', 'b'), + (0x1D68C, 'M', 'c'), + (0x1D68D, 'M', 'd'), + (0x1D68E, 'M', 'e'), + (0x1D68F, 'M', 'f'), + (0x1D690, 'M', 'g'), + (0x1D691, 'M', 'h'), + (0x1D692, 'M', 'i'), + (0x1D693, 'M', 'j'), + (0x1D694, 'M', 'k'), + (0x1D695, 'M', 'l'), + (0x1D696, 'M', 'm'), + (0x1D697, 'M', 'n'), + (0x1D698, 'M', 'o'), + (0x1D699, 'M', 'p'), + (0x1D69A, 'M', 'q'), + (0x1D69B, 'M', 'r'), + (0x1D69C, 'M', 's'), + (0x1D69D, 'M', 't'), + (0x1D69E, 'M', 'u'), + (0x1D69F, 'M', 'v'), + (0x1D6A0, 'M', 'w'), + (0x1D6A1, 'M', 'x'), + (0x1D6A2, 'M', 'y'), + (0x1D6A3, 'M', 'z'), + (0x1D6A4, 'M', 'ı'), + (0x1D6A5, 'M', 'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', 'α'), + (0x1D6A9, 'M', 'β'), + (0x1D6AA, 'M', 'γ'), + (0x1D6AB, 'M', 'δ'), + (0x1D6AC, 'M', 'ε'), + (0x1D6AD, 'M', 'ζ'), + (0x1D6AE, 'M', 'η'), + (0x1D6AF, 'M', 'θ'), + (0x1D6B0, 'M', 'ι'), + (0x1D6B1, 'M', 'κ'), + (0x1D6B2, 'M', 'λ'), + (0x1D6B3, 'M', 'μ'), + (0x1D6B4, 'M', 'ν'), + (0x1D6B5, 'M', 'ξ'), + (0x1D6B6, 'M', 'ο'), + (0x1D6B7, 'M', 'π'), + (0x1D6B8, 'M', 'ρ'), + (0x1D6B9, 'M', 'θ'), + (0x1D6BA, 'M', 'σ'), + (0x1D6BB, 'M', 'τ'), + (0x1D6BC, 'M', 'υ'), + (0x1D6BD, 'M', 'φ'), + (0x1D6BE, 'M', 'χ'), + (0x1D6BF, 'M', 'ψ'), + (0x1D6C0, 'M', 'ω'), + (0x1D6C1, 'M', '∇'), + (0x1D6C2, 'M', 'α'), + (0x1D6C3, 'M', 'β'), + (0x1D6C4, 'M', 'γ'), + (0x1D6C5, 'M', 'δ'), + (0x1D6C6, 'M', 'ε'), + (0x1D6C7, 'M', 'ζ'), + (0x1D6C8, 'M', 'η'), + (0x1D6C9, 'M', 'θ'), + (0x1D6CA, 'M', 'ι'), + (0x1D6CB, 'M', 'κ'), + (0x1D6CC, 'M', 'λ'), + (0x1D6CD, 'M', 'μ'), + (0x1D6CE, 'M', 'ν'), + (0x1D6CF, 'M', 'ξ'), + (0x1D6D0, 'M', 'ο'), + (0x1D6D1, 'M', 'π'), + (0x1D6D2, 'M', 'ρ'), + (0x1D6D3, 'M', 'σ'), + (0x1D6D5, 'M', 'τ'), + (0x1D6D6, 'M', 'υ'), + (0x1D6D7, 'M', 'φ'), + (0x1D6D8, 'M', 'χ'), + (0x1D6D9, 'M', 'ψ'), + (0x1D6DA, 'M', 'ω'), + (0x1D6DB, 'M', '∂'), + (0x1D6DC, 'M', 'ε'), + (0x1D6DD, 'M', 'θ'), + (0x1D6DE, 'M', 'κ'), + (0x1D6DF, 'M', 'φ'), + (0x1D6E0, 'M', 'ρ'), + (0x1D6E1, 'M', 'π'), + (0x1D6E2, 'M', 'α'), + (0x1D6E3, 'M', 'β'), + (0x1D6E4, 'M', 'γ'), + ] + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D6E5, 'M', 'δ'), + (0x1D6E6, 'M', 'ε'), + (0x1D6E7, 'M', 'ζ'), + (0x1D6E8, 'M', 'η'), + (0x1D6E9, 'M', 'θ'), + (0x1D6EA, 'M', 'ι'), + (0x1D6EB, 'M', 'κ'), + (0x1D6EC, 'M', 'λ'), + (0x1D6ED, 'M', 'μ'), + (0x1D6EE, 'M', 'ν'), + (0x1D6EF, 'M', 'ξ'), + (0x1D6F0, 'M', 'ο'), + (0x1D6F1, 'M', 'π'), + (0x1D6F2, 'M', 'ρ'), + (0x1D6F3, 'M', 'θ'), + (0x1D6F4, 'M', 'σ'), + (0x1D6F5, 'M', 'τ'), + (0x1D6F6, 'M', 'υ'), + (0x1D6F7, 'M', 'φ'), + (0x1D6F8, 'M', 'χ'), + (0x1D6F9, 'M', 'ψ'), + (0x1D6FA, 'M', 'ω'), + (0x1D6FB, 'M', '∇'), + (0x1D6FC, 'M', 'α'), + (0x1D6FD, 'M', 'β'), + (0x1D6FE, 'M', 'γ'), + (0x1D6FF, 'M', 'δ'), + (0x1D700, 'M', 'ε'), + (0x1D701, 'M', 'ζ'), + (0x1D702, 'M', 'η'), + (0x1D703, 'M', 'θ'), + (0x1D704, 'M', 'ι'), + (0x1D705, 'M', 'κ'), + (0x1D706, 'M', 'λ'), + (0x1D707, 'M', 'μ'), + (0x1D708, 'M', 'ν'), + (0x1D709, 'M', 'ξ'), + (0x1D70A, 'M', 'ο'), + (0x1D70B, 'M', 'π'), + (0x1D70C, 'M', 'ρ'), + (0x1D70D, 'M', 'σ'), + (0x1D70F, 'M', 'τ'), + (0x1D710, 'M', 'υ'), + (0x1D711, 'M', 'φ'), + (0x1D712, 'M', 'χ'), + (0x1D713, 'M', 'ψ'), + (0x1D714, 'M', 'ω'), + (0x1D715, 'M', '∂'), + (0x1D716, 'M', 'ε'), + (0x1D717, 'M', 'θ'), + (0x1D718, 'M', 'κ'), + (0x1D719, 'M', 'φ'), + (0x1D71A, 'M', 'ρ'), + (0x1D71B, 'M', 'π'), + (0x1D71C, 'M', 'α'), + (0x1D71D, 'M', 'β'), + (0x1D71E, 'M', 'γ'), + (0x1D71F, 'M', 'δ'), + (0x1D720, 'M', 'ε'), + (0x1D721, 'M', 'ζ'), + (0x1D722, 'M', 'η'), + (0x1D723, 'M', 'θ'), + (0x1D724, 'M', 'ι'), + (0x1D725, 'M', 'κ'), + (0x1D726, 'M', 'λ'), + (0x1D727, 'M', 'μ'), + (0x1D728, 'M', 'ν'), + (0x1D729, 'M', 'ξ'), + (0x1D72A, 'M', 'ο'), + (0x1D72B, 'M', 'π'), + (0x1D72C, 'M', 'ρ'), + (0x1D72D, 'M', 'θ'), + (0x1D72E, 'M', 'σ'), + (0x1D72F, 'M', 'τ'), + (0x1D730, 'M', 'υ'), + (0x1D731, 'M', 'φ'), + (0x1D732, 'M', 'χ'), + (0x1D733, 'M', 'ψ'), + (0x1D734, 'M', 'ω'), + (0x1D735, 'M', '∇'), + (0x1D736, 'M', 'α'), + (0x1D737, 'M', 'β'), + (0x1D738, 'M', 'γ'), + (0x1D739, 'M', 'δ'), + (0x1D73A, 'M', 'ε'), + (0x1D73B, 'M', 'ζ'), + (0x1D73C, 'M', 'η'), + (0x1D73D, 'M', 'θ'), + (0x1D73E, 'M', 'ι'), + (0x1D73F, 'M', 'κ'), + (0x1D740, 'M', 'λ'), + (0x1D741, 'M', 'μ'), + (0x1D742, 'M', 'ν'), + (0x1D743, 'M', 'ξ'), + (0x1D744, 'M', 'ο'), + (0x1D745, 'M', 'π'), + (0x1D746, 'M', 'ρ'), + (0x1D747, 'M', 'σ'), + (0x1D749, 'M', 'τ'), + (0x1D74A, 'M', 'υ'), + ] + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D74B, 'M', 'φ'), + (0x1D74C, 'M', 'χ'), + (0x1D74D, 'M', 'ψ'), + (0x1D74E, 'M', 'ω'), + (0x1D74F, 'M', '∂'), + (0x1D750, 'M', 'ε'), + (0x1D751, 'M', 'θ'), + (0x1D752, 'M', 'κ'), + (0x1D753, 'M', 'φ'), + (0x1D754, 'M', 'ρ'), + (0x1D755, 'M', 'π'), + (0x1D756, 'M', 'α'), + (0x1D757, 'M', 'β'), + (0x1D758, 'M', 'γ'), + (0x1D759, 'M', 'δ'), + (0x1D75A, 'M', 'ε'), + (0x1D75B, 'M', 'ζ'), + (0x1D75C, 'M', 'η'), + (0x1D75D, 'M', 'θ'), + (0x1D75E, 'M', 'ι'), + (0x1D75F, 'M', 'κ'), + (0x1D760, 'M', 'λ'), + (0x1D761, 'M', 'μ'), + (0x1D762, 'M', 'ν'), + (0x1D763, 'M', 'ξ'), + (0x1D764, 'M', 'ο'), + (0x1D765, 'M', 'π'), + (0x1D766, 'M', 'ρ'), + (0x1D767, 'M', 'θ'), + (0x1D768, 'M', 'σ'), + (0x1D769, 'M', 'τ'), + (0x1D76A, 'M', 'υ'), + (0x1D76B, 'M', 'φ'), + (0x1D76C, 'M', 'χ'), + (0x1D76D, 'M', 'ψ'), + (0x1D76E, 'M', 'ω'), + (0x1D76F, 'M', '∇'), + (0x1D770, 'M', 'α'), + (0x1D771, 'M', 'β'), + (0x1D772, 'M', 'γ'), + (0x1D773, 'M', 'δ'), + (0x1D774, 'M', 'ε'), + (0x1D775, 'M', 'ζ'), + (0x1D776, 'M', 'η'), + (0x1D777, 'M', 'θ'), + (0x1D778, 'M', 'ι'), + (0x1D779, 'M', 'κ'), + (0x1D77A, 'M', 'λ'), + (0x1D77B, 'M', 'μ'), + (0x1D77C, 'M', 'ν'), + (0x1D77D, 'M', 'ξ'), + (0x1D77E, 'M', 'ο'), + (0x1D77F, 'M', 'π'), + (0x1D780, 'M', 'ρ'), + (0x1D781, 'M', 'σ'), + (0x1D783, 'M', 'τ'), + (0x1D784, 'M', 'υ'), + (0x1D785, 'M', 'φ'), + (0x1D786, 'M', 'χ'), + (0x1D787, 'M', 'ψ'), + (0x1D788, 'M', 'ω'), + (0x1D789, 'M', '∂'), + (0x1D78A, 'M', 'ε'), + (0x1D78B, 'M', 'θ'), + (0x1D78C, 'M', 'κ'), + (0x1D78D, 'M', 'φ'), + (0x1D78E, 'M', 'ρ'), + (0x1D78F, 'M', 'π'), + (0x1D790, 'M', 'α'), + (0x1D791, 'M', 'β'), + (0x1D792, 'M', 'γ'), + (0x1D793, 'M', 'δ'), + (0x1D794, 'M', 'ε'), + (0x1D795, 'M', 'ζ'), + (0x1D796, 'M', 'η'), + (0x1D797, 'M', 'θ'), + (0x1D798, 'M', 'ι'), + (0x1D799, 'M', 'κ'), + (0x1D79A, 'M', 'λ'), + (0x1D79B, 'M', 'μ'), + (0x1D79C, 'M', 'ν'), + (0x1D79D, 'M', 'ξ'), + (0x1D79E, 'M', 'ο'), + (0x1D79F, 'M', 'π'), + (0x1D7A0, 'M', 'ρ'), + (0x1D7A1, 'M', 'θ'), + (0x1D7A2, 'M', 'σ'), + (0x1D7A3, 'M', 'τ'), + (0x1D7A4, 'M', 'υ'), + (0x1D7A5, 'M', 'φ'), + (0x1D7A6, 'M', 'χ'), + (0x1D7A7, 'M', 'ψ'), + (0x1D7A8, 'M', 'ω'), + (0x1D7A9, 'M', '∇'), + (0x1D7AA, 'M', 'α'), + (0x1D7AB, 'M', 'β'), + (0x1D7AC, 'M', 'γ'), + (0x1D7AD, 'M', 'δ'), + (0x1D7AE, 'M', 'ε'), + (0x1D7AF, 'M', 'ζ'), + ] + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D7B0, 'M', 'η'), + (0x1D7B1, 'M', 'θ'), + (0x1D7B2, 'M', 'ι'), + (0x1D7B3, 'M', 'κ'), + (0x1D7B4, 'M', 'λ'), + (0x1D7B5, 'M', 'μ'), + (0x1D7B6, 'M', 'ν'), + (0x1D7B7, 'M', 'ξ'), + (0x1D7B8, 'M', 'ο'), + (0x1D7B9, 'M', 'π'), + (0x1D7BA, 'M', 'ρ'), + (0x1D7BB, 'M', 'σ'), + (0x1D7BD, 'M', 'τ'), + (0x1D7BE, 'M', 'υ'), + (0x1D7BF, 'M', 'φ'), + (0x1D7C0, 'M', 'χ'), + (0x1D7C1, 'M', 'ψ'), + (0x1D7C2, 'M', 'ω'), + (0x1D7C3, 'M', '∂'), + (0x1D7C4, 'M', 'ε'), + (0x1D7C5, 'M', 'θ'), + (0x1D7C6, 'M', 'κ'), + (0x1D7C7, 'M', 'φ'), + (0x1D7C8, 'M', 'ρ'), + (0x1D7C9, 'M', 'π'), + (0x1D7CA, 'M', 'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', '0'), + (0x1D7CF, 'M', '1'), + (0x1D7D0, 'M', '2'), + (0x1D7D1, 'M', '3'), + (0x1D7D2, 'M', '4'), + (0x1D7D3, 'M', '5'), + (0x1D7D4, 'M', '6'), + (0x1D7D5, 'M', '7'), + (0x1D7D6, 'M', '8'), + (0x1D7D7, 'M', '9'), + (0x1D7D8, 'M', '0'), + (0x1D7D9, 'M', '1'), + (0x1D7DA, 'M', '2'), + (0x1D7DB, 'M', '3'), + (0x1D7DC, 'M', '4'), + (0x1D7DD, 'M', '5'), + (0x1D7DE, 'M', '6'), + (0x1D7DF, 'M', '7'), + (0x1D7E0, 'M', '8'), + (0x1D7E1, 'M', '9'), + (0x1D7E2, 'M', '0'), + (0x1D7E3, 'M', '1'), + (0x1D7E4, 'M', '2'), + (0x1D7E5, 'M', '3'), + (0x1D7E6, 'M', '4'), + (0x1D7E7, 'M', '5'), + (0x1D7E8, 'M', '6'), + (0x1D7E9, 'M', '7'), + (0x1D7EA, 'M', '8'), + (0x1D7EB, 'M', '9'), + (0x1D7EC, 'M', '0'), + (0x1D7ED, 'M', '1'), + (0x1D7EE, 'M', '2'), + (0x1D7EF, 'M', '3'), + (0x1D7F0, 'M', '4'), + (0x1D7F1, 'M', '5'), + (0x1D7F2, 'M', '6'), + (0x1D7F3, 'M', '7'), + (0x1D7F4, 'M', '8'), + (0x1D7F5, 'M', '9'), + (0x1D7F6, 'M', '0'), + (0x1D7F7, 'M', '1'), + (0x1D7F8, 'M', '2'), + (0x1D7F9, 'M', '3'), + (0x1D7FA, 'M', '4'), + (0x1D7FB, 'M', '5'), + (0x1D7FC, 'M', '6'), + (0x1D7FD, 'M', '7'), + (0x1D7FE, 'M', '8'), + (0x1D7FF, 'M', '9'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1DF00, 'V'), + (0x1DF1F, 'X'), + (0x1DF25, 'V'), + (0x1DF2B, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E030, 'M', 'а'), + (0x1E031, 'M', 'б'), + (0x1E032, 'M', 'в'), + ] + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E033, 'M', 'г'), + (0x1E034, 'M', 'д'), + (0x1E035, 'M', 'е'), + (0x1E036, 'M', 'ж'), + (0x1E037, 'M', 'з'), + (0x1E038, 'M', 'и'), + (0x1E039, 'M', 'к'), + (0x1E03A, 'M', 'л'), + (0x1E03B, 'M', 'м'), + (0x1E03C, 'M', 'о'), + (0x1E03D, 'M', 'п'), + (0x1E03E, 'M', 'р'), + (0x1E03F, 'M', 'с'), + (0x1E040, 'M', 'т'), + (0x1E041, 'M', 'у'), + (0x1E042, 'M', 'ф'), + (0x1E043, 'M', 'х'), + (0x1E044, 'M', 'ц'), + (0x1E045, 'M', 'ч'), + (0x1E046, 'M', 'ш'), + (0x1E047, 'M', 'ы'), + (0x1E048, 'M', 'э'), + (0x1E049, 'M', 'ю'), + (0x1E04A, 'M', 'ꚉ'), + (0x1E04B, 'M', 'ә'), + (0x1E04C, 'M', 'і'), + (0x1E04D, 'M', 'ј'), + (0x1E04E, 'M', 'ө'), + (0x1E04F, 'M', 'ү'), + (0x1E050, 'M', 'ӏ'), + (0x1E051, 'M', 'а'), + (0x1E052, 'M', 'б'), + (0x1E053, 'M', 'в'), + (0x1E054, 'M', 'г'), + (0x1E055, 'M', 'д'), + (0x1E056, 'M', 'е'), + (0x1E057, 'M', 'ж'), + (0x1E058, 'M', 'з'), + (0x1E059, 'M', 'и'), + (0x1E05A, 'M', 'к'), + (0x1E05B, 'M', 'л'), + (0x1E05C, 'M', 'о'), + (0x1E05D, 'M', 'п'), + (0x1E05E, 'M', 'с'), + (0x1E05F, 'M', 'у'), + (0x1E060, 'M', 'ф'), + (0x1E061, 'M', 'х'), + (0x1E062, 'M', 'ц'), + (0x1E063, 'M', 'ч'), + (0x1E064, 'M', 'ш'), + (0x1E065, 'M', 'ъ'), + (0x1E066, 'M', 'ы'), + (0x1E067, 'M', 'ґ'), + (0x1E068, 'M', 'і'), + (0x1E069, 'M', 'ѕ'), + (0x1E06A, 'M', 'џ'), + (0x1E06B, 'M', 'ҫ'), + (0x1E06C, 'M', 'ꙑ'), + (0x1E06D, 'M', 'ұ'), + (0x1E06E, 'X'), + (0x1E08F, 'V'), + (0x1E090, 'X'), + (0x1E100, 'V'), + (0x1E12D, 'X'), + (0x1E130, 'V'), + (0x1E13E, 'X'), + (0x1E140, 'V'), + (0x1E14A, 'X'), + (0x1E14E, 'V'), + (0x1E150, 'X'), + (0x1E290, 'V'), + (0x1E2AF, 'X'), + (0x1E2C0, 'V'), + (0x1E2FA, 'X'), + (0x1E2FF, 'V'), + (0x1E300, 'X'), + (0x1E4D0, 'V'), + (0x1E4FA, 'X'), + (0x1E7E0, 'V'), + (0x1E7E7, 'X'), + (0x1E7E8, 'V'), + (0x1E7EC, 'X'), + (0x1E7ED, 'V'), + (0x1E7EF, 'X'), + (0x1E7F0, 'V'), + (0x1E7FF, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', '𞤢'), + (0x1E901, 'M', '𞤣'), + (0x1E902, 'M', '𞤤'), + (0x1E903, 'M', '𞤥'), + (0x1E904, 'M', '𞤦'), + (0x1E905, 'M', '𞤧'), + (0x1E906, 'M', '𞤨'), + (0x1E907, 'M', '𞤩'), + (0x1E908, 'M', '𞤪'), + (0x1E909, 'M', '𞤫'), + ] + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E90A, 'M', '𞤬'), + (0x1E90B, 'M', '𞤭'), + (0x1E90C, 'M', '𞤮'), + (0x1E90D, 'M', '𞤯'), + (0x1E90E, 'M', '𞤰'), + (0x1E90F, 'M', '𞤱'), + (0x1E910, 'M', '𞤲'), + (0x1E911, 'M', '𞤳'), + (0x1E912, 'M', '𞤴'), + (0x1E913, 'M', '𞤵'), + (0x1E914, 'M', '𞤶'), + (0x1E915, 'M', '𞤷'), + (0x1E916, 'M', '𞤸'), + (0x1E917, 'M', '𞤹'), + (0x1E918, 'M', '𞤺'), + (0x1E919, 'M', '𞤻'), + (0x1E91A, 'M', '𞤼'), + (0x1E91B, 'M', '𞤽'), + (0x1E91C, 'M', '𞤾'), + (0x1E91D, 'M', '𞤿'), + (0x1E91E, 'M', '𞥀'), + (0x1E91F, 'M', '𞥁'), + (0x1E920, 'M', '𞥂'), + (0x1E921, 'M', '𞥃'), + (0x1E922, 'V'), + (0x1E94C, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), + (0x1EC71, 'V'), + (0x1ECB5, 'X'), + (0x1ED01, 'V'), + (0x1ED3E, 'X'), + (0x1EE00, 'M', 'ا'), + (0x1EE01, 'M', 'ب'), + (0x1EE02, 'M', 'ج'), + (0x1EE03, 'M', 'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', 'و'), + (0x1EE06, 'M', 'ز'), + (0x1EE07, 'M', 'ح'), + (0x1EE08, 'M', 'ط'), + (0x1EE09, 'M', 'ي'), + (0x1EE0A, 'M', 'ك'), + (0x1EE0B, 'M', 'ل'), + (0x1EE0C, 'M', 'م'), + (0x1EE0D, 'M', 'ن'), + (0x1EE0E, 'M', 'س'), + (0x1EE0F, 'M', 'ع'), + (0x1EE10, 'M', 'ف'), + (0x1EE11, 'M', 'ص'), + (0x1EE12, 'M', 'ق'), + (0x1EE13, 'M', 'ر'), + (0x1EE14, 'M', 'ش'), + (0x1EE15, 'M', 'ت'), + (0x1EE16, 'M', 'ث'), + (0x1EE17, 'M', 'خ'), + (0x1EE18, 'M', 'ذ'), + (0x1EE19, 'M', 'ض'), + (0x1EE1A, 'M', 'ظ'), + (0x1EE1B, 'M', 'غ'), + (0x1EE1C, 'M', 'ٮ'), + (0x1EE1D, 'M', 'ں'), + (0x1EE1E, 'M', 'ڡ'), + (0x1EE1F, 'M', 'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', 'ب'), + (0x1EE22, 'M', 'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', 'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', 'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', 'ي'), + (0x1EE2A, 'M', 'ك'), + (0x1EE2B, 'M', 'ل'), + (0x1EE2C, 'M', 'م'), + (0x1EE2D, 'M', 'ن'), + (0x1EE2E, 'M', 'س'), + (0x1EE2F, 'M', 'ع'), + (0x1EE30, 'M', 'ف'), + (0x1EE31, 'M', 'ص'), + (0x1EE32, 'M', 'ق'), + (0x1EE33, 'X'), + (0x1EE34, 'M', 'ش'), + (0x1EE35, 'M', 'ت'), + (0x1EE36, 'M', 'ث'), + (0x1EE37, 'M', 'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', 'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', 'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', 'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', 'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', 'ي'), + (0x1EE4A, 'X'), + ] + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE4B, 'M', 'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', 'ن'), + (0x1EE4E, 'M', 'س'), + (0x1EE4F, 'M', 'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', 'ص'), + (0x1EE52, 'M', 'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', 'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', 'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', 'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', 'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', 'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', 'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', 'ب'), + (0x1EE62, 'M', 'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', 'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', 'ح'), + (0x1EE68, 'M', 'ط'), + (0x1EE69, 'M', 'ي'), + (0x1EE6A, 'M', 'ك'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', 'م'), + (0x1EE6D, 'M', 'ن'), + (0x1EE6E, 'M', 'س'), + (0x1EE6F, 'M', 'ع'), + (0x1EE70, 'M', 'ف'), + (0x1EE71, 'M', 'ص'), + (0x1EE72, 'M', 'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', 'ش'), + (0x1EE75, 'M', 'ت'), + (0x1EE76, 'M', 'ث'), + (0x1EE77, 'M', 'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', 'ض'), + (0x1EE7A, 'M', 'ظ'), + (0x1EE7B, 'M', 'غ'), + (0x1EE7C, 'M', 'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', 'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', 'ا'), + (0x1EE81, 'M', 'ب'), + (0x1EE82, 'M', 'ج'), + (0x1EE83, 'M', 'د'), + (0x1EE84, 'M', 'ه'), + (0x1EE85, 'M', 'و'), + (0x1EE86, 'M', 'ز'), + (0x1EE87, 'M', 'ح'), + (0x1EE88, 'M', 'ط'), + (0x1EE89, 'M', 'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', 'ل'), + (0x1EE8C, 'M', 'م'), + (0x1EE8D, 'M', 'ن'), + (0x1EE8E, 'M', 'س'), + (0x1EE8F, 'M', 'ع'), + (0x1EE90, 'M', 'ف'), + (0x1EE91, 'M', 'ص'), + (0x1EE92, 'M', 'ق'), + (0x1EE93, 'M', 'ر'), + (0x1EE94, 'M', 'ش'), + (0x1EE95, 'M', 'ت'), + (0x1EE96, 'M', 'ث'), + (0x1EE97, 'M', 'خ'), + (0x1EE98, 'M', 'ذ'), + (0x1EE99, 'M', 'ض'), + (0x1EE9A, 'M', 'ظ'), + (0x1EE9B, 'M', 'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', 'ب'), + (0x1EEA2, 'M', 'ج'), + (0x1EEA3, 'M', 'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', 'و'), + (0x1EEA6, 'M', 'ز'), + (0x1EEA7, 'M', 'ح'), + (0x1EEA8, 'M', 'ط'), + (0x1EEA9, 'M', 'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', 'ل'), + (0x1EEAC, 'M', 'م'), + (0x1EEAD, 'M', 'ن'), + (0x1EEAE, 'M', 'س'), + (0x1EEAF, 'M', 'ع'), + (0x1EEB0, 'M', 'ف'), + (0x1EEB1, 'M', 'ص'), + (0x1EEB2, 'M', 'ق'), + (0x1EEB3, 'M', 'ر'), + (0x1EEB4, 'M', 'ش'), + ] + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EEB5, 'M', 'ت'), + (0x1EEB6, 'M', 'ث'), + (0x1EEB7, 'M', 'خ'), + (0x1EEB8, 'M', 'ذ'), + (0x1EEB9, 'M', 'ض'), + (0x1EEBA, 'M', 'ظ'), + (0x1EEBB, 'M', 'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0C0, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0F6, 'X'), + (0x1F101, '3', '0,'), + (0x1F102, '3', '1,'), + (0x1F103, '3', '2,'), + (0x1F104, '3', '3,'), + (0x1F105, '3', '4,'), + (0x1F106, '3', '5,'), + (0x1F107, '3', '6,'), + (0x1F108, '3', '7,'), + (0x1F109, '3', '8,'), + (0x1F10A, '3', '9,'), + (0x1F10B, 'V'), + (0x1F110, '3', '(a)'), + (0x1F111, '3', '(b)'), + (0x1F112, '3', '(c)'), + (0x1F113, '3', '(d)'), + (0x1F114, '3', '(e)'), + (0x1F115, '3', '(f)'), + (0x1F116, '3', '(g)'), + (0x1F117, '3', '(h)'), + (0x1F118, '3', '(i)'), + (0x1F119, '3', '(j)'), + (0x1F11A, '3', '(k)'), + (0x1F11B, '3', '(l)'), + (0x1F11C, '3', '(m)'), + (0x1F11D, '3', '(n)'), + (0x1F11E, '3', '(o)'), + (0x1F11F, '3', '(p)'), + (0x1F120, '3', '(q)'), + (0x1F121, '3', '(r)'), + (0x1F122, '3', '(s)'), + (0x1F123, '3', '(t)'), + (0x1F124, '3', '(u)'), + (0x1F125, '3', '(v)'), + (0x1F126, '3', '(w)'), + (0x1F127, '3', '(x)'), + (0x1F128, '3', '(y)'), + (0x1F129, '3', '(z)'), + (0x1F12A, 'M', '〔s〕'), + (0x1F12B, 'M', 'c'), + (0x1F12C, 'M', 'r'), + (0x1F12D, 'M', 'cd'), + (0x1F12E, 'M', 'wz'), + (0x1F12F, 'V'), + (0x1F130, 'M', 'a'), + (0x1F131, 'M', 'b'), + (0x1F132, 'M', 'c'), + (0x1F133, 'M', 'd'), + (0x1F134, 'M', 'e'), + (0x1F135, 'M', 'f'), + (0x1F136, 'M', 'g'), + (0x1F137, 'M', 'h'), + (0x1F138, 'M', 'i'), + (0x1F139, 'M', 'j'), + (0x1F13A, 'M', 'k'), + (0x1F13B, 'M', 'l'), + (0x1F13C, 'M', 'm'), + (0x1F13D, 'M', 'n'), + (0x1F13E, 'M', 'o'), + (0x1F13F, 'M', 'p'), + (0x1F140, 'M', 'q'), + (0x1F141, 'M', 'r'), + (0x1F142, 'M', 's'), + (0x1F143, 'M', 't'), + (0x1F144, 'M', 'u'), + (0x1F145, 'M', 'v'), + (0x1F146, 'M', 'w'), + (0x1F147, 'M', 'x'), + (0x1F148, 'M', 'y'), + (0x1F149, 'M', 'z'), + (0x1F14A, 'M', 'hv'), + (0x1F14B, 'M', 'mv'), + (0x1F14C, 'M', 'sd'), + (0x1F14D, 'M', 'ss'), + (0x1F14E, 'M', 'ppv'), + (0x1F14F, 'M', 'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', 'mc'), + (0x1F16B, 'M', 'md'), + ] + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F16C, 'M', 'mr'), + (0x1F16D, 'V'), + (0x1F190, 'M', 'dj'), + (0x1F191, 'V'), + (0x1F1AE, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', 'ほか'), + (0x1F201, 'M', 'ココ'), + (0x1F202, 'M', 'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', '手'), + (0x1F211, 'M', '字'), + (0x1F212, 'M', '双'), + (0x1F213, 'M', 'デ'), + (0x1F214, 'M', '二'), + (0x1F215, 'M', '多'), + (0x1F216, 'M', '解'), + (0x1F217, 'M', '天'), + (0x1F218, 'M', '交'), + (0x1F219, 'M', '映'), + (0x1F21A, 'M', '無'), + (0x1F21B, 'M', '料'), + (0x1F21C, 'M', '前'), + (0x1F21D, 'M', '後'), + (0x1F21E, 'M', '再'), + (0x1F21F, 'M', '新'), + (0x1F220, 'M', '初'), + (0x1F221, 'M', '終'), + (0x1F222, 'M', '生'), + (0x1F223, 'M', '販'), + (0x1F224, 'M', '声'), + (0x1F225, 'M', '吹'), + (0x1F226, 'M', '演'), + (0x1F227, 'M', '投'), + (0x1F228, 'M', '捕'), + (0x1F229, 'M', '一'), + (0x1F22A, 'M', '三'), + (0x1F22B, 'M', '遊'), + (0x1F22C, 'M', '左'), + (0x1F22D, 'M', '中'), + (0x1F22E, 'M', '右'), + (0x1F22F, 'M', '指'), + (0x1F230, 'M', '走'), + (0x1F231, 'M', '打'), + (0x1F232, 'M', '禁'), + (0x1F233, 'M', '空'), + (0x1F234, 'M', '合'), + (0x1F235, 'M', '満'), + (0x1F236, 'M', '有'), + (0x1F237, 'M', '月'), + (0x1F238, 'M', '申'), + (0x1F239, 'M', '割'), + (0x1F23A, 'M', '営'), + (0x1F23B, 'M', '配'), + (0x1F23C, 'X'), + (0x1F240, 'M', '〔本〕'), + (0x1F241, 'M', '〔三〕'), + (0x1F242, 'M', '〔二〕'), + (0x1F243, 'M', '〔安〕'), + (0x1F244, 'M', '〔点〕'), + (0x1F245, 'M', '〔打〕'), + (0x1F246, 'M', '〔盗〕'), + (0x1F247, 'M', '〔勝〕'), + (0x1F248, 'M', '〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', '得'), + (0x1F251, 'M', '可'), + (0x1F252, 'X'), + (0x1F260, 'V'), + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D8, 'X'), + (0x1F6DC, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6FD, 'X'), + (0x1F700, 'V'), + (0x1F777, 'X'), + (0x1F77B, 'V'), + (0x1F7DA, 'X'), + (0x1F7E0, 'V'), + (0x1F7EC, 'X'), + (0x1F7F0, 'V'), + (0x1F7F1, 'X'), + (0x1F800, 'V'), + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), + (0x1F900, 'V'), + (0x1FA54, 'X'), + (0x1FA60, 'V'), + (0x1FA6E, 'X'), + ] + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FA70, 'V'), + (0x1FA7D, 'X'), + (0x1FA80, 'V'), + (0x1FA89, 'X'), + (0x1FA90, 'V'), + (0x1FABE, 'X'), + (0x1FABF, 'V'), + (0x1FAC6, 'X'), + (0x1FACE, 'V'), + (0x1FADC, 'X'), + (0x1FAE0, 'V'), + (0x1FAE9, 'X'), + (0x1FAF0, 'V'), + (0x1FAF9, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', '0'), + (0x1FBF1, 'M', '1'), + (0x1FBF2, 'M', '2'), + (0x1FBF3, 'M', '3'), + (0x1FBF4, 'M', '4'), + (0x1FBF5, 'M', '5'), + (0x1FBF6, 'M', '6'), + (0x1FBF7, 'M', '7'), + (0x1FBF8, 'M', '8'), + (0x1FBF9, 'M', '9'), + (0x1FBFA, 'X'), + (0x20000, 'V'), + (0x2A6E0, 'X'), + (0x2A700, 'V'), + (0x2B73A, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), + (0x2F800, 'M', '丽'), + (0x2F801, 'M', '丸'), + (0x2F802, 'M', '乁'), + (0x2F803, 'M', '𠄢'), + (0x2F804, 'M', '你'), + (0x2F805, 'M', '侮'), + (0x2F806, 'M', '侻'), + (0x2F807, 'M', '倂'), + (0x2F808, 'M', '偺'), + (0x2F809, 'M', '備'), + (0x2F80A, 'M', '僧'), + (0x2F80B, 'M', '像'), + (0x2F80C, 'M', '㒞'), + (0x2F80D, 'M', '𠘺'), + (0x2F80E, 'M', '免'), + (0x2F80F, 'M', '兔'), + (0x2F810, 'M', '兤'), + (0x2F811, 'M', '具'), + (0x2F812, 'M', '𠔜'), + (0x2F813, 'M', '㒹'), + (0x2F814, 'M', '內'), + (0x2F815, 'M', '再'), + (0x2F816, 'M', '𠕋'), + (0x2F817, 'M', '冗'), + (0x2F818, 'M', '冤'), + (0x2F819, 'M', '仌'), + (0x2F81A, 'M', '冬'), + (0x2F81B, 'M', '况'), + (0x2F81C, 'M', '𩇟'), + (0x2F81D, 'M', '凵'), + (0x2F81E, 'M', '刃'), + (0x2F81F, 'M', '㓟'), + (0x2F820, 'M', '刻'), + (0x2F821, 'M', '剆'), + (0x2F822, 'M', '割'), + (0x2F823, 'M', '剷'), + (0x2F824, 'M', '㔕'), + (0x2F825, 'M', '勇'), + (0x2F826, 'M', '勉'), + (0x2F827, 'M', '勤'), + (0x2F828, 'M', '勺'), + (0x2F829, 'M', '包'), + (0x2F82A, 'M', '匆'), + (0x2F82B, 'M', '北'), + (0x2F82C, 'M', '卉'), + (0x2F82D, 'M', '卑'), + (0x2F82E, 'M', '博'), + (0x2F82F, 'M', '即'), + (0x2F830, 'M', '卽'), + (0x2F831, 'M', '卿'), + (0x2F834, 'M', '𠨬'), + (0x2F835, 'M', '灰'), + (0x2F836, 'M', '及'), + (0x2F837, 'M', '叟'), + (0x2F838, 'M', '𠭣'), + (0x2F839, 'M', '叫'), + (0x2F83A, 'M', '叱'), + (0x2F83B, 'M', '吆'), + (0x2F83C, 'M', '咞'), + (0x2F83D, 'M', '吸'), + (0x2F83E, 'M', '呈'), + ] + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F83F, 'M', '周'), + (0x2F840, 'M', '咢'), + (0x2F841, 'M', '哶'), + (0x2F842, 'M', '唐'), + (0x2F843, 'M', '啓'), + (0x2F844, 'M', '啣'), + (0x2F845, 'M', '善'), + (0x2F847, 'M', '喙'), + (0x2F848, 'M', '喫'), + (0x2F849, 'M', '喳'), + (0x2F84A, 'M', '嗂'), + (0x2F84B, 'M', '圖'), + (0x2F84C, 'M', '嘆'), + (0x2F84D, 'M', '圗'), + (0x2F84E, 'M', '噑'), + (0x2F84F, 'M', '噴'), + (0x2F850, 'M', '切'), + (0x2F851, 'M', '壮'), + (0x2F852, 'M', '城'), + (0x2F853, 'M', '埴'), + (0x2F854, 'M', '堍'), + (0x2F855, 'M', '型'), + (0x2F856, 'M', '堲'), + (0x2F857, 'M', '報'), + (0x2F858, 'M', '墬'), + (0x2F859, 'M', '𡓤'), + (0x2F85A, 'M', '売'), + (0x2F85B, 'M', '壷'), + (0x2F85C, 'M', '夆'), + (0x2F85D, 'M', '多'), + (0x2F85E, 'M', '夢'), + (0x2F85F, 'M', '奢'), + (0x2F860, 'M', '𡚨'), + (0x2F861, 'M', '𡛪'), + (0x2F862, 'M', '姬'), + (0x2F863, 'M', '娛'), + (0x2F864, 'M', '娧'), + (0x2F865, 'M', '姘'), + (0x2F866, 'M', '婦'), + (0x2F867, 'M', '㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', '嬈'), + (0x2F86A, 'M', '嬾'), + (0x2F86C, 'M', '𡧈'), + (0x2F86D, 'M', '寃'), + (0x2F86E, 'M', '寘'), + (0x2F86F, 'M', '寧'), + (0x2F870, 'M', '寳'), + (0x2F871, 'M', '𡬘'), + (0x2F872, 'M', '寿'), + (0x2F873, 'M', '将'), + (0x2F874, 'X'), + (0x2F875, 'M', '尢'), + (0x2F876, 'M', '㞁'), + (0x2F877, 'M', '屠'), + (0x2F878, 'M', '屮'), + (0x2F879, 'M', '峀'), + (0x2F87A, 'M', '岍'), + (0x2F87B, 'M', '𡷤'), + (0x2F87C, 'M', '嵃'), + (0x2F87D, 'M', '𡷦'), + (0x2F87E, 'M', '嵮'), + (0x2F87F, 'M', '嵫'), + (0x2F880, 'M', '嵼'), + (0x2F881, 'M', '巡'), + (0x2F882, 'M', '巢'), + (0x2F883, 'M', '㠯'), + (0x2F884, 'M', '巽'), + (0x2F885, 'M', '帨'), + (0x2F886, 'M', '帽'), + (0x2F887, 'M', '幩'), + (0x2F888, 'M', '㡢'), + (0x2F889, 'M', '𢆃'), + (0x2F88A, 'M', '㡼'), + (0x2F88B, 'M', '庰'), + (0x2F88C, 'M', '庳'), + (0x2F88D, 'M', '庶'), + (0x2F88E, 'M', '廊'), + (0x2F88F, 'M', '𪎒'), + (0x2F890, 'M', '廾'), + (0x2F891, 'M', '𢌱'), + (0x2F893, 'M', '舁'), + (0x2F894, 'M', '弢'), + (0x2F896, 'M', '㣇'), + (0x2F897, 'M', '𣊸'), + (0x2F898, 'M', '𦇚'), + (0x2F899, 'M', '形'), + (0x2F89A, 'M', '彫'), + (0x2F89B, 'M', '㣣'), + (0x2F89C, 'M', '徚'), + (0x2F89D, 'M', '忍'), + (0x2F89E, 'M', '志'), + (0x2F89F, 'M', '忹'), + (0x2F8A0, 'M', '悁'), + (0x2F8A1, 'M', '㤺'), + (0x2F8A2, 'M', '㤜'), + (0x2F8A3, 'M', '悔'), + (0x2F8A4, 'M', '𢛔'), + (0x2F8A5, 'M', '惇'), + (0x2F8A6, 'M', '慈'), + ] + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F8A7, 'M', '慌'), + (0x2F8A8, 'M', '慎'), + (0x2F8A9, 'M', '慌'), + (0x2F8AA, 'M', '慺'), + (0x2F8AB, 'M', '憎'), + (0x2F8AC, 'M', '憲'), + (0x2F8AD, 'M', '憤'), + (0x2F8AE, 'M', '憯'), + (0x2F8AF, 'M', '懞'), + (0x2F8B0, 'M', '懲'), + (0x2F8B1, 'M', '懶'), + (0x2F8B2, 'M', '成'), + (0x2F8B3, 'M', '戛'), + (0x2F8B4, 'M', '扝'), + (0x2F8B5, 'M', '抱'), + (0x2F8B6, 'M', '拔'), + (0x2F8B7, 'M', '捐'), + (0x2F8B8, 'M', '𢬌'), + (0x2F8B9, 'M', '挽'), + (0x2F8BA, 'M', '拼'), + (0x2F8BB, 'M', '捨'), + (0x2F8BC, 'M', '掃'), + (0x2F8BD, 'M', '揤'), + (0x2F8BE, 'M', '𢯱'), + (0x2F8BF, 'M', '搢'), + (0x2F8C0, 'M', '揅'), + (0x2F8C1, 'M', '掩'), + (0x2F8C2, 'M', '㨮'), + (0x2F8C3, 'M', '摩'), + (0x2F8C4, 'M', '摾'), + (0x2F8C5, 'M', '撝'), + (0x2F8C6, 'M', '摷'), + (0x2F8C7, 'M', '㩬'), + (0x2F8C8, 'M', '敏'), + (0x2F8C9, 'M', '敬'), + (0x2F8CA, 'M', '𣀊'), + (0x2F8CB, 'M', '旣'), + (0x2F8CC, 'M', '書'), + (0x2F8CD, 'M', '晉'), + (0x2F8CE, 'M', '㬙'), + (0x2F8CF, 'M', '暑'), + (0x2F8D0, 'M', '㬈'), + (0x2F8D1, 'M', '㫤'), + (0x2F8D2, 'M', '冒'), + (0x2F8D3, 'M', '冕'), + (0x2F8D4, 'M', '最'), + (0x2F8D5, 'M', '暜'), + (0x2F8D6, 'M', '肭'), + (0x2F8D7, 'M', '䏙'), + (0x2F8D8, 'M', '朗'), + (0x2F8D9, 'M', '望'), + (0x2F8DA, 'M', '朡'), + (0x2F8DB, 'M', '杞'), + (0x2F8DC, 'M', '杓'), + (0x2F8DD, 'M', '𣏃'), + (0x2F8DE, 'M', '㭉'), + (0x2F8DF, 'M', '柺'), + (0x2F8E0, 'M', '枅'), + (0x2F8E1, 'M', '桒'), + (0x2F8E2, 'M', '梅'), + (0x2F8E3, 'M', '𣑭'), + (0x2F8E4, 'M', '梎'), + (0x2F8E5, 'M', '栟'), + (0x2F8E6, 'M', '椔'), + (0x2F8E7, 'M', '㮝'), + (0x2F8E8, 'M', '楂'), + (0x2F8E9, 'M', '榣'), + (0x2F8EA, 'M', '槪'), + (0x2F8EB, 'M', '檨'), + (0x2F8EC, 'M', '𣚣'), + (0x2F8ED, 'M', '櫛'), + (0x2F8EE, 'M', '㰘'), + (0x2F8EF, 'M', '次'), + (0x2F8F0, 'M', '𣢧'), + (0x2F8F1, 'M', '歔'), + (0x2F8F2, 'M', '㱎'), + (0x2F8F3, 'M', '歲'), + (0x2F8F4, 'M', '殟'), + (0x2F8F5, 'M', '殺'), + (0x2F8F6, 'M', '殻'), + (0x2F8F7, 'M', '𣪍'), + (0x2F8F8, 'M', '𡴋'), + (0x2F8F9, 'M', '𣫺'), + (0x2F8FA, 'M', '汎'), + (0x2F8FB, 'M', '𣲼'), + (0x2F8FC, 'M', '沿'), + (0x2F8FD, 'M', '泍'), + (0x2F8FE, 'M', '汧'), + (0x2F8FF, 'M', '洖'), + (0x2F900, 'M', '派'), + (0x2F901, 'M', '海'), + (0x2F902, 'M', '流'), + (0x2F903, 'M', '浩'), + (0x2F904, 'M', '浸'), + (0x2F905, 'M', '涅'), + (0x2F906, 'M', '𣴞'), + (0x2F907, 'M', '洴'), + (0x2F908, 'M', '港'), + (0x2F909, 'M', '湮'), + (0x2F90A, 'M', '㴳'), + ] + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F90B, 'M', '滋'), + (0x2F90C, 'M', '滇'), + (0x2F90D, 'M', '𣻑'), + (0x2F90E, 'M', '淹'), + (0x2F90F, 'M', '潮'), + (0x2F910, 'M', '𣽞'), + (0x2F911, 'M', '𣾎'), + (0x2F912, 'M', '濆'), + (0x2F913, 'M', '瀹'), + (0x2F914, 'M', '瀞'), + (0x2F915, 'M', '瀛'), + (0x2F916, 'M', '㶖'), + (0x2F917, 'M', '灊'), + (0x2F918, 'M', '災'), + (0x2F919, 'M', '灷'), + (0x2F91A, 'M', '炭'), + (0x2F91B, 'M', '𠔥'), + (0x2F91C, 'M', '煅'), + (0x2F91D, 'M', '𤉣'), + (0x2F91E, 'M', '熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', '爨'), + (0x2F921, 'M', '爵'), + (0x2F922, 'M', '牐'), + (0x2F923, 'M', '𤘈'), + (0x2F924, 'M', '犀'), + (0x2F925, 'M', '犕'), + (0x2F926, 'M', '𤜵'), + (0x2F927, 'M', '𤠔'), + (0x2F928, 'M', '獺'), + (0x2F929, 'M', '王'), + (0x2F92A, 'M', '㺬'), + (0x2F92B, 'M', '玥'), + (0x2F92C, 'M', '㺸'), + (0x2F92E, 'M', '瑇'), + (0x2F92F, 'M', '瑜'), + (0x2F930, 'M', '瑱'), + (0x2F931, 'M', '璅'), + (0x2F932, 'M', '瓊'), + (0x2F933, 'M', '㼛'), + (0x2F934, 'M', '甤'), + (0x2F935, 'M', '𤰶'), + (0x2F936, 'M', '甾'), + (0x2F937, 'M', '𤲒'), + (0x2F938, 'M', '異'), + (0x2F939, 'M', '𢆟'), + (0x2F93A, 'M', '瘐'), + (0x2F93B, 'M', '𤾡'), + (0x2F93C, 'M', '𤾸'), + (0x2F93D, 'M', '𥁄'), + (0x2F93E, 'M', '㿼'), + (0x2F93F, 'M', '䀈'), + (0x2F940, 'M', '直'), + (0x2F941, 'M', '𥃳'), + (0x2F942, 'M', '𥃲'), + (0x2F943, 'M', '𥄙'), + (0x2F944, 'M', '𥄳'), + (0x2F945, 'M', '眞'), + (0x2F946, 'M', '真'), + (0x2F948, 'M', '睊'), + (0x2F949, 'M', '䀹'), + (0x2F94A, 'M', '瞋'), + (0x2F94B, 'M', '䁆'), + (0x2F94C, 'M', '䂖'), + (0x2F94D, 'M', '𥐝'), + (0x2F94E, 'M', '硎'), + (0x2F94F, 'M', '碌'), + (0x2F950, 'M', '磌'), + (0x2F951, 'M', '䃣'), + (0x2F952, 'M', '𥘦'), + (0x2F953, 'M', '祖'), + (0x2F954, 'M', '𥚚'), + (0x2F955, 'M', '𥛅'), + (0x2F956, 'M', '福'), + (0x2F957, 'M', '秫'), + (0x2F958, 'M', '䄯'), + (0x2F959, 'M', '穀'), + (0x2F95A, 'M', '穊'), + (0x2F95B, 'M', '穏'), + (0x2F95C, 'M', '𥥼'), + (0x2F95D, 'M', '𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', '䈂'), + (0x2F961, 'M', '𥮫'), + (0x2F962, 'M', '篆'), + (0x2F963, 'M', '築'), + (0x2F964, 'M', '䈧'), + (0x2F965, 'M', '𥲀'), + (0x2F966, 'M', '糒'), + (0x2F967, 'M', '䊠'), + (0x2F968, 'M', '糨'), + (0x2F969, 'M', '糣'), + (0x2F96A, 'M', '紀'), + (0x2F96B, 'M', '𥾆'), + (0x2F96C, 'M', '絣'), + (0x2F96D, 'M', '䌁'), + (0x2F96E, 'M', '緇'), + (0x2F96F, 'M', '縂'), + (0x2F970, 'M', '繅'), + (0x2F971, 'M', '䌴'), + ] + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F972, 'M', '𦈨'), + (0x2F973, 'M', '𦉇'), + (0x2F974, 'M', '䍙'), + (0x2F975, 'M', '𦋙'), + (0x2F976, 'M', '罺'), + (0x2F977, 'M', '𦌾'), + (0x2F978, 'M', '羕'), + (0x2F979, 'M', '翺'), + (0x2F97A, 'M', '者'), + (0x2F97B, 'M', '𦓚'), + (0x2F97C, 'M', '𦔣'), + (0x2F97D, 'M', '聠'), + (0x2F97E, 'M', '𦖨'), + (0x2F97F, 'M', '聰'), + (0x2F980, 'M', '𣍟'), + (0x2F981, 'M', '䏕'), + (0x2F982, 'M', '育'), + (0x2F983, 'M', '脃'), + (0x2F984, 'M', '䐋'), + (0x2F985, 'M', '脾'), + (0x2F986, 'M', '媵'), + (0x2F987, 'M', '𦞧'), + (0x2F988, 'M', '𦞵'), + (0x2F989, 'M', '𣎓'), + (0x2F98A, 'M', '𣎜'), + (0x2F98B, 'M', '舁'), + (0x2F98C, 'M', '舄'), + (0x2F98D, 'M', '辞'), + (0x2F98E, 'M', '䑫'), + (0x2F98F, 'M', '芑'), + (0x2F990, 'M', '芋'), + (0x2F991, 'M', '芝'), + (0x2F992, 'M', '劳'), + (0x2F993, 'M', '花'), + (0x2F994, 'M', '芳'), + (0x2F995, 'M', '芽'), + (0x2F996, 'M', '苦'), + (0x2F997, 'M', '𦬼'), + (0x2F998, 'M', '若'), + (0x2F999, 'M', '茝'), + (0x2F99A, 'M', '荣'), + (0x2F99B, 'M', '莭'), + (0x2F99C, 'M', '茣'), + (0x2F99D, 'M', '莽'), + (0x2F99E, 'M', '菧'), + (0x2F99F, 'M', '著'), + (0x2F9A0, 'M', '荓'), + (0x2F9A1, 'M', '菊'), + (0x2F9A2, 'M', '菌'), + (0x2F9A3, 'M', '菜'), + (0x2F9A4, 'M', '𦰶'), + (0x2F9A5, 'M', '𦵫'), + (0x2F9A6, 'M', '𦳕'), + (0x2F9A7, 'M', '䔫'), + (0x2F9A8, 'M', '蓱'), + (0x2F9A9, 'M', '蓳'), + (0x2F9AA, 'M', '蔖'), + (0x2F9AB, 'M', '𧏊'), + (0x2F9AC, 'M', '蕤'), + (0x2F9AD, 'M', '𦼬'), + (0x2F9AE, 'M', '䕝'), + (0x2F9AF, 'M', '䕡'), + (0x2F9B0, 'M', '𦾱'), + (0x2F9B1, 'M', '𧃒'), + (0x2F9B2, 'M', '䕫'), + (0x2F9B3, 'M', '虐'), + (0x2F9B4, 'M', '虜'), + (0x2F9B5, 'M', '虧'), + (0x2F9B6, 'M', '虩'), + (0x2F9B7, 'M', '蚩'), + (0x2F9B8, 'M', '蚈'), + (0x2F9B9, 'M', '蜎'), + (0x2F9BA, 'M', '蛢'), + (0x2F9BB, 'M', '蝹'), + (0x2F9BC, 'M', '蜨'), + (0x2F9BD, 'M', '蝫'), + (0x2F9BE, 'M', '螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', '蟡'), + (0x2F9C1, 'M', '蠁'), + (0x2F9C2, 'M', '䗹'), + (0x2F9C3, 'M', '衠'), + (0x2F9C4, 'M', '衣'), + (0x2F9C5, 'M', '𧙧'), + (0x2F9C6, 'M', '裗'), + (0x2F9C7, 'M', '裞'), + (0x2F9C8, 'M', '䘵'), + (0x2F9C9, 'M', '裺'), + (0x2F9CA, 'M', '㒻'), + (0x2F9CB, 'M', '𧢮'), + (0x2F9CC, 'M', '𧥦'), + (0x2F9CD, 'M', '䚾'), + (0x2F9CE, 'M', '䛇'), + (0x2F9CF, 'M', '誠'), + (0x2F9D0, 'M', '諭'), + (0x2F9D1, 'M', '變'), + (0x2F9D2, 'M', '豕'), + (0x2F9D3, 'M', '𧲨'), + (0x2F9D4, 'M', '貫'), + (0x2F9D5, 'M', '賁'), + ] + +def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F9D6, 'M', '贛'), + (0x2F9D7, 'M', '起'), + (0x2F9D8, 'M', '𧼯'), + (0x2F9D9, 'M', '𠠄'), + (0x2F9DA, 'M', '跋'), + (0x2F9DB, 'M', '趼'), + (0x2F9DC, 'M', '跰'), + (0x2F9DD, 'M', '𠣞'), + (0x2F9DE, 'M', '軔'), + (0x2F9DF, 'M', '輸'), + (0x2F9E0, 'M', '𨗒'), + (0x2F9E1, 'M', '𨗭'), + (0x2F9E2, 'M', '邔'), + (0x2F9E3, 'M', '郱'), + (0x2F9E4, 'M', '鄑'), + (0x2F9E5, 'M', '𨜮'), + (0x2F9E6, 'M', '鄛'), + (0x2F9E7, 'M', '鈸'), + (0x2F9E8, 'M', '鋗'), + (0x2F9E9, 'M', '鋘'), + (0x2F9EA, 'M', '鉼'), + (0x2F9EB, 'M', '鏹'), + (0x2F9EC, 'M', '鐕'), + (0x2F9ED, 'M', '𨯺'), + (0x2F9EE, 'M', '開'), + (0x2F9EF, 'M', '䦕'), + (0x2F9F0, 'M', '閷'), + (0x2F9F1, 'M', '𨵷'), + (0x2F9F2, 'M', '䧦'), + (0x2F9F3, 'M', '雃'), + (0x2F9F4, 'M', '嶲'), + (0x2F9F5, 'M', '霣'), + (0x2F9F6, 'M', '𩅅'), + (0x2F9F7, 'M', '𩈚'), + (0x2F9F8, 'M', '䩮'), + (0x2F9F9, 'M', '䩶'), + (0x2F9FA, 'M', '韠'), + (0x2F9FB, 'M', '𩐊'), + (0x2F9FC, 'M', '䪲'), + (0x2F9FD, 'M', '𩒖'), + (0x2F9FE, 'M', '頋'), + (0x2FA00, 'M', '頩'), + (0x2FA01, 'M', '𩖶'), + (0x2FA02, 'M', '飢'), + (0x2FA03, 'M', '䬳'), + (0x2FA04, 'M', '餩'), + (0x2FA05, 'M', '馧'), + (0x2FA06, 'M', '駂'), + (0x2FA07, 'M', '駾'), + (0x2FA08, 'M', '䯎'), + (0x2FA09, 'M', '𩬰'), + (0x2FA0A, 'M', '鬒'), + (0x2FA0B, 'M', '鱀'), + (0x2FA0C, 'M', '鳽'), + (0x2FA0D, 'M', '䳎'), + (0x2FA0E, 'M', '䳭'), + (0x2FA0F, 'M', '鵧'), + (0x2FA10, 'M', '𪃎'), + (0x2FA11, 'M', '䳸'), + (0x2FA12, 'M', '𪄅'), + (0x2FA13, 'M', '𪈎'), + (0x2FA14, 'M', '𪊑'), + (0x2FA15, 'M', '麻'), + (0x2FA16, 'M', '䵖'), + (0x2FA17, 'M', '黹'), + (0x2FA18, 'M', '黾'), + (0x2FA19, 'M', '鼅'), + (0x2FA1A, 'M', '鼏'), + (0x2FA1B, 'M', '鼖'), + (0x2FA1C, 'M', '鼻'), + (0x2FA1D, 'M', '𪘀'), + (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), + (0x31350, 'V'), + (0x323B0, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() + + _seg_80() + + _seg_81() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__init__.py new file mode 100644 index 0000000..1300b86 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__init__.py @@ -0,0 +1,57 @@ +# coding: utf-8 +from .exceptions import * +from .ext import ExtType, Timestamp + +import os +import sys + + +version = (1, 0, 5) +__version__ = "1.0.5" + + +if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2: + from .fallback import Packer, unpackb, Unpacker +else: + try: + from ._cmsgpack import Packer, unpackb, Unpacker + except ImportError: + from .fallback import Packer, unpackb, Unpacker + + +def pack(o, stream, **kwargs): + """ + Pack object `o` and write it to `stream` + + See :class:`Packer` for options. + """ + packer = Packer(**kwargs) + stream.write(packer.pack(o)) + + +def packb(o, **kwargs): + """ + Pack object `o` and return packed bytes + + See :class:`Packer` for options. + """ + return Packer(**kwargs).pack(o) + + +def unpack(stream, **kwargs): + """ + Unpack an object from `stream`. + + Raises `ExtraData` when `stream` contains extra bytes. + See :class:`Unpacker` for options. + """ + data = stream.read() + return unpackb(data, **kwargs) + + +# alias for compatibility to simplejson/marshal/pickle. +load = unpack +loads = unpackb + +dump = pack +dumps = packb diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..bb32b81 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..4ab8ba0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc new file mode 100644 index 0000000..92b6417 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc new file mode 100644 index 0000000..1097940 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/exceptions.py b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/exceptions.py new file mode 100644 index 0000000..d6d2615 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/exceptions.py @@ -0,0 +1,48 @@ +class UnpackException(Exception): + """Base class for some exceptions raised while unpacking. + + NOTE: unpack may raise exception other than subclass of + UnpackException. If you want to catch all error, catch + Exception instead. + """ + + +class BufferFull(UnpackException): + pass + + +class OutOfData(UnpackException): + pass + + +class FormatError(ValueError, UnpackException): + """Invalid msgpack format""" + + +class StackError(ValueError, UnpackException): + """Too nested""" + + +# Deprecated. Use ValueError instead +UnpackValueError = ValueError + + +class ExtraData(UnpackValueError): + """ExtraData is raised when there is trailing data. + + This exception is raised while only one-shot (not streaming) + unpack. + """ + + def __init__(self, unpacked, extra): + self.unpacked = unpacked + self.extra = extra + + def __str__(self): + return "unpack(b) received extra data." + + +# Deprecated. Use Exception instead to catch all exception during packing. +PackException = Exception +PackValueError = ValueError +PackOverflowError = OverflowError diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/ext.py b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/ext.py new file mode 100644 index 0000000..23e0d6b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/ext.py @@ -0,0 +1,193 @@ +# coding: utf-8 +from collections import namedtuple +import datetime +import sys +import struct + + +PY2 = sys.version_info[0] == 2 + +if PY2: + int_types = (int, long) + _utc = None +else: + int_types = int + try: + _utc = datetime.timezone.utc + except AttributeError: + _utc = datetime.timezone(datetime.timedelta(0)) + + +class ExtType(namedtuple("ExtType", "code data")): + """ExtType represents ext type in msgpack.""" + + def __new__(cls, code, data): + if not isinstance(code, int): + raise TypeError("code must be int") + if not isinstance(data, bytes): + raise TypeError("data must be bytes") + if not 0 <= code <= 127: + raise ValueError("code must be 0~127") + return super(ExtType, cls).__new__(cls, code, data) + + +class Timestamp(object): + """Timestamp represents the Timestamp extension type in msgpack. + + When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. When using pure-Python + msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and unpack `Timestamp`. + + This class is immutable: Do not override seconds and nanoseconds. + """ + + __slots__ = ["seconds", "nanoseconds"] + + def __init__(self, seconds, nanoseconds=0): + """Initialize a Timestamp object. + + :param int seconds: + Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds). + May be negative. + + :param int nanoseconds: + Number of nanoseconds to add to `seconds` to get fractional time. + Maximum is 999_999_999. Default is 0. + + Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns. + """ + if not isinstance(seconds, int_types): + raise TypeError("seconds must be an integer") + if not isinstance(nanoseconds, int_types): + raise TypeError("nanoseconds must be an integer") + if not (0 <= nanoseconds < 10**9): + raise ValueError( + "nanoseconds must be a non-negative integer less than 999999999." + ) + self.seconds = seconds + self.nanoseconds = nanoseconds + + def __repr__(self): + """String representation of Timestamp.""" + return "Timestamp(seconds={0}, nanoseconds={1})".format( + self.seconds, self.nanoseconds + ) + + def __eq__(self, other): + """Check for equality with another Timestamp object""" + if type(other) is self.__class__: + return ( + self.seconds == other.seconds and self.nanoseconds == other.nanoseconds + ) + return False + + def __ne__(self, other): + """not-equals method (see :func:`__eq__()`)""" + return not self.__eq__(other) + + def __hash__(self): + return hash((self.seconds, self.nanoseconds)) + + @staticmethod + def from_bytes(b): + """Unpack bytes into a `Timestamp` object. + + Used for pure-Python msgpack unpacking. + + :param b: Payload from msgpack ext message with code -1 + :type b: bytes + + :returns: Timestamp object unpacked from msgpack ext payload + :rtype: Timestamp + """ + if len(b) == 4: + seconds = struct.unpack("!L", b)[0] + nanoseconds = 0 + elif len(b) == 8: + data64 = struct.unpack("!Q", b)[0] + seconds = data64 & 0x00000003FFFFFFFF + nanoseconds = data64 >> 34 + elif len(b) == 12: + nanoseconds, seconds = struct.unpack("!Iq", b) + else: + raise ValueError( + "Timestamp type can only be created from 32, 64, or 96-bit byte objects" + ) + return Timestamp(seconds, nanoseconds) + + def to_bytes(self): + """Pack this Timestamp object into bytes. + + Used for pure-Python msgpack packing. + + :returns data: Payload for EXT message with code -1 (timestamp type) + :rtype: bytes + """ + if (self.seconds >> 34) == 0: # seconds is non-negative and fits in 34 bits + data64 = self.nanoseconds << 34 | self.seconds + if data64 & 0xFFFFFFFF00000000 == 0: + # nanoseconds is zero and seconds < 2**32, so timestamp 32 + data = struct.pack("!L", data64) + else: + # timestamp 64 + data = struct.pack("!Q", data64) + else: + # timestamp 96 + data = struct.pack("!Iq", self.nanoseconds, self.seconds) + return data + + @staticmethod + def from_unix(unix_sec): + """Create a Timestamp from posix timestamp in seconds. + + :param unix_float: Posix timestamp in seconds. + :type unix_float: int or float. + """ + seconds = int(unix_sec // 1) + nanoseconds = int((unix_sec % 1) * 10**9) + return Timestamp(seconds, nanoseconds) + + def to_unix(self): + """Get the timestamp as a floating-point value. + + :returns: posix timestamp + :rtype: float + """ + return self.seconds + self.nanoseconds / 1e9 + + @staticmethod + def from_unix_nano(unix_ns): + """Create a Timestamp from posix timestamp in nanoseconds. + + :param int unix_ns: Posix timestamp in nanoseconds. + :rtype: Timestamp + """ + return Timestamp(*divmod(unix_ns, 10**9)) + + def to_unix_nano(self): + """Get the timestamp as a unixtime in nanoseconds. + + :returns: posix timestamp in nanoseconds + :rtype: int + """ + return self.seconds * 10**9 + self.nanoseconds + + def to_datetime(self): + """Get the timestamp as a UTC datetime. + + Python 2 is not supported. + + :rtype: datetime. + """ + return datetime.datetime.fromtimestamp(0, _utc) + datetime.timedelta( + seconds=self.to_unix() + ) + + @staticmethod + def from_datetime(dt): + """Create a Timestamp from datetime with tzinfo. + + Python 2 is not supported. + + :rtype: Timestamp + """ + return Timestamp.from_unix(dt.timestamp()) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/fallback.py b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/fallback.py new file mode 100644 index 0000000..e8cebc1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/msgpack/fallback.py @@ -0,0 +1,1010 @@ +"""Fallback pure Python implementation of msgpack""" +from datetime import datetime as _DateTime +import sys +import struct + + +PY2 = sys.version_info[0] == 2 +if PY2: + int_types = (int, long) + + def dict_iteritems(d): + return d.iteritems() + +else: + int_types = int + unicode = str + xrange = range + + def dict_iteritems(d): + return d.items() + + +if sys.version_info < (3, 5): + # Ugly hack... + RecursionError = RuntimeError + + def _is_recursionerror(e): + return ( + len(e.args) == 1 + and isinstance(e.args[0], str) + and e.args[0].startswith("maximum recursion depth exceeded") + ) + +else: + + def _is_recursionerror(e): + return True + + +if hasattr(sys, "pypy_version_info"): + # StringIO is slow on PyPy, StringIO is faster. However: PyPy's own + # StringBuilder is fastest. + from __pypy__ import newlist_hint + + try: + from __pypy__.builders import BytesBuilder as StringBuilder + except ImportError: + from __pypy__.builders import StringBuilder + USING_STRINGBUILDER = True + + class StringIO(object): + def __init__(self, s=b""): + if s: + self.builder = StringBuilder(len(s)) + self.builder.append(s) + else: + self.builder = StringBuilder() + + def write(self, s): + if isinstance(s, memoryview): + s = s.tobytes() + elif isinstance(s, bytearray): + s = bytes(s) + self.builder.append(s) + + def getvalue(self): + return self.builder.build() + +else: + USING_STRINGBUILDER = False + from io import BytesIO as StringIO + + newlist_hint = lambda size: [] + + +from .exceptions import BufferFull, OutOfData, ExtraData, FormatError, StackError + +from .ext import ExtType, Timestamp + + +EX_SKIP = 0 +EX_CONSTRUCT = 1 +EX_READ_ARRAY_HEADER = 2 +EX_READ_MAP_HEADER = 3 + +TYPE_IMMEDIATE = 0 +TYPE_ARRAY = 1 +TYPE_MAP = 2 +TYPE_RAW = 3 +TYPE_BIN = 4 +TYPE_EXT = 5 + +DEFAULT_RECURSE_LIMIT = 511 + + +def _check_type_strict(obj, t, type=type, tuple=tuple): + if type(t) is tuple: + return type(obj) in t + else: + return type(obj) is t + + +def _get_data_from_buffer(obj): + view = memoryview(obj) + if view.itemsize != 1: + raise ValueError("cannot unpack from multi-byte object") + return view + + +def unpackb(packed, **kwargs): + """ + Unpack an object from `packed`. + + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``ValueError`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + + See :class:`Unpacker` for options. + """ + unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs) + unpacker.feed(packed) + try: + ret = unpacker._unpack() + except OutOfData: + raise ValueError("Unpack failed: incomplete input") + except RecursionError as e: + if _is_recursionerror(e): + raise StackError + raise + if unpacker._got_extradata(): + raise ExtraData(ret, unpacker._get_extradata()) + return ret + + +if sys.version_info < (2, 7, 6): + + def _unpack_from(f, b, o=0): + """Explicit type cast for legacy struct.unpack_from""" + return struct.unpack_from(f, bytes(b), o) + +else: + _unpack_from = struct.unpack_from + +_NO_FORMAT_USED = "" +_MSGPACK_HEADERS = { + 0xC4: (1, _NO_FORMAT_USED, TYPE_BIN), + 0xC5: (2, ">H", TYPE_BIN), + 0xC6: (4, ">I", TYPE_BIN), + 0xC7: (2, "Bb", TYPE_EXT), + 0xC8: (3, ">Hb", TYPE_EXT), + 0xC9: (5, ">Ib", TYPE_EXT), + 0xCA: (4, ">f"), + 0xCB: (8, ">d"), + 0xCC: (1, _NO_FORMAT_USED), + 0xCD: (2, ">H"), + 0xCE: (4, ">I"), + 0xCF: (8, ">Q"), + 0xD0: (1, "b"), + 0xD1: (2, ">h"), + 0xD2: (4, ">i"), + 0xD3: (8, ">q"), + 0xD4: (1, "b1s", TYPE_EXT), + 0xD5: (2, "b2s", TYPE_EXT), + 0xD6: (4, "b4s", TYPE_EXT), + 0xD7: (8, "b8s", TYPE_EXT), + 0xD8: (16, "b16s", TYPE_EXT), + 0xD9: (1, _NO_FORMAT_USED, TYPE_RAW), + 0xDA: (2, ">H", TYPE_RAW), + 0xDB: (4, ">I", TYPE_RAW), + 0xDC: (2, ">H", TYPE_ARRAY), + 0xDD: (4, ">I", TYPE_ARRAY), + 0xDE: (2, ">H", TYPE_MAP), + 0xDF: (4, ">I", TYPE_MAP), +} + + +class Unpacker(object): + """Streaming unpacker. + + Arguments: + + :param file_like: + File-like object having `.read(n)` method. + If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable. + + :param int read_size: + Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`) + + :param bool use_list: + If true, unpack msgpack array to Python list. + Otherwise, unpack to Python tuple. (default: True) + + :param bool raw: + If true, unpack msgpack raw to Python bytes. + Otherwise, unpack to Python str by decoding with UTF-8 encoding (default). + + :param int timestamp: + Control how timestamp type is unpacked: + + 0 - Timestamp + 1 - float (Seconds from the EPOCH) + 2 - int (Nanoseconds from the EPOCH) + 3 - datetime.datetime (UTC). Python 2 is not supported. + + :param bool strict_map_key: + If true (default), only str or bytes are accepted for map (dict) keys. + + :param callable object_hook: + When specified, it should be callable. + Unpacker calls it with a dict argument after unpacking msgpack map. + (See also simplejson) + + :param callable object_pairs_hook: + When specified, it should be callable. + Unpacker calls it with a list of key-value pairs after unpacking msgpack map. + (See also simplejson) + + :param str unicode_errors: + The error handler for decoding unicode. (default: 'strict') + This option should be used only when you have msgpack data which + contains invalid UTF-8 string. + + :param int max_buffer_size: + Limits size of data waiting unpacked. 0 means 2**32-1. + The default value is 100*1024*1024 (100MiB). + Raises `BufferFull` exception when it is insufficient. + You should set this parameter when unpacking data from untrusted source. + + :param int max_str_len: + Deprecated, use *max_buffer_size* instead. + Limits max length of str. (default: max_buffer_size) + + :param int max_bin_len: + Deprecated, use *max_buffer_size* instead. + Limits max length of bin. (default: max_buffer_size) + + :param int max_array_len: + Limits max length of array. + (default: max_buffer_size) + + :param int max_map_len: + Limits max length of map. + (default: max_buffer_size//2) + + :param int max_ext_len: + Deprecated, use *max_buffer_size* instead. + Limits max size of ext type. (default: max_buffer_size) + + Example of streaming deserialize from file-like object:: + + unpacker = Unpacker(file_like) + for o in unpacker: + process(o) + + Example of streaming deserialize from socket:: + + unpacker = Unpacker() + while True: + buf = sock.recv(1024**2) + if not buf: + break + unpacker.feed(buf) + for o in unpacker: + process(o) + + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``OutOfData`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + """ + + def __init__( + self, + file_like=None, + read_size=0, + use_list=True, + raw=False, + timestamp=0, + strict_map_key=True, + object_hook=None, + object_pairs_hook=None, + list_hook=None, + unicode_errors=None, + max_buffer_size=100 * 1024 * 1024, + ext_hook=ExtType, + max_str_len=-1, + max_bin_len=-1, + max_array_len=-1, + max_map_len=-1, + max_ext_len=-1, + ): + if unicode_errors is None: + unicode_errors = "strict" + + if file_like is None: + self._feeding = True + else: + if not callable(file_like.read): + raise TypeError("`file_like.read` must be callable") + self.file_like = file_like + self._feeding = False + + #: array of bytes fed. + self._buffer = bytearray() + #: Which position we currently reads + self._buff_i = 0 + + # When Unpacker is used as an iterable, between the calls to next(), + # the buffer is not "consumed" completely, for efficiency sake. + # Instead, it is done sloppily. To make sure we raise BufferFull at + # the correct moments, we have to keep track of how sloppy we were. + # Furthermore, when the buffer is incomplete (that is: in the case + # we raise an OutOfData) we need to rollback the buffer to the correct + # state, which _buf_checkpoint records. + self._buf_checkpoint = 0 + + if not max_buffer_size: + max_buffer_size = 2**31 - 1 + if max_str_len == -1: + max_str_len = max_buffer_size + if max_bin_len == -1: + max_bin_len = max_buffer_size + if max_array_len == -1: + max_array_len = max_buffer_size + if max_map_len == -1: + max_map_len = max_buffer_size // 2 + if max_ext_len == -1: + max_ext_len = max_buffer_size + + self._max_buffer_size = max_buffer_size + if read_size > self._max_buffer_size: + raise ValueError("read_size must be smaller than max_buffer_size") + self._read_size = read_size or min(self._max_buffer_size, 16 * 1024) + self._raw = bool(raw) + self._strict_map_key = bool(strict_map_key) + self._unicode_errors = unicode_errors + self._use_list = use_list + if not (0 <= timestamp <= 3): + raise ValueError("timestamp must be 0..3") + self._timestamp = timestamp + self._list_hook = list_hook + self._object_hook = object_hook + self._object_pairs_hook = object_pairs_hook + self._ext_hook = ext_hook + self._max_str_len = max_str_len + self._max_bin_len = max_bin_len + self._max_array_len = max_array_len + self._max_map_len = max_map_len + self._max_ext_len = max_ext_len + self._stream_offset = 0 + + if list_hook is not None and not callable(list_hook): + raise TypeError("`list_hook` is not callable") + if object_hook is not None and not callable(object_hook): + raise TypeError("`object_hook` is not callable") + if object_pairs_hook is not None and not callable(object_pairs_hook): + raise TypeError("`object_pairs_hook` is not callable") + if object_hook is not None and object_pairs_hook is not None: + raise TypeError( + "object_pairs_hook and object_hook are mutually " "exclusive" + ) + if not callable(ext_hook): + raise TypeError("`ext_hook` is not callable") + + def feed(self, next_bytes): + assert self._feeding + view = _get_data_from_buffer(next_bytes) + if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size: + raise BufferFull + + # Strip buffer before checkpoint before reading file. + if self._buf_checkpoint > 0: + del self._buffer[: self._buf_checkpoint] + self._buff_i -= self._buf_checkpoint + self._buf_checkpoint = 0 + + # Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython + self._buffer.extend(view) + + def _consume(self): + """Gets rid of the used parts of the buffer.""" + self._stream_offset += self._buff_i - self._buf_checkpoint + self._buf_checkpoint = self._buff_i + + def _got_extradata(self): + return self._buff_i < len(self._buffer) + + def _get_extradata(self): + return self._buffer[self._buff_i :] + + def read_bytes(self, n): + ret = self._read(n, raise_outofdata=False) + self._consume() + return ret + + def _read(self, n, raise_outofdata=True): + # (int) -> bytearray + self._reserve(n, raise_outofdata=raise_outofdata) + i = self._buff_i + ret = self._buffer[i : i + n] + self._buff_i = i + len(ret) + return ret + + def _reserve(self, n, raise_outofdata=True): + remain_bytes = len(self._buffer) - self._buff_i - n + + # Fast path: buffer has n bytes already + if remain_bytes >= 0: + return + + if self._feeding: + self._buff_i = self._buf_checkpoint + raise OutOfData + + # Strip buffer before checkpoint before reading file. + if self._buf_checkpoint > 0: + del self._buffer[: self._buf_checkpoint] + self._buff_i -= self._buf_checkpoint + self._buf_checkpoint = 0 + + # Read from file + remain_bytes = -remain_bytes + if remain_bytes + len(self._buffer) > self._max_buffer_size: + raise BufferFull + while remain_bytes > 0: + to_read_bytes = max(self._read_size, remain_bytes) + read_data = self.file_like.read(to_read_bytes) + if not read_data: + break + assert isinstance(read_data, bytes) + self._buffer += read_data + remain_bytes -= len(read_data) + + if len(self._buffer) < n + self._buff_i and raise_outofdata: + self._buff_i = 0 # rollback + raise OutOfData + + def _read_header(self): + typ = TYPE_IMMEDIATE + n = 0 + obj = None + self._reserve(1) + b = self._buffer[self._buff_i] + self._buff_i += 1 + if b & 0b10000000 == 0: + obj = b + elif b & 0b11100000 == 0b11100000: + obj = -1 - (b ^ 0xFF) + elif b & 0b11100000 == 0b10100000: + n = b & 0b00011111 + typ = TYPE_RAW + if n > self._max_str_len: + raise ValueError("%s exceeds max_str_len(%s)" % (n, self._max_str_len)) + obj = self._read(n) + elif b & 0b11110000 == 0b10010000: + n = b & 0b00001111 + typ = TYPE_ARRAY + if n > self._max_array_len: + raise ValueError( + "%s exceeds max_array_len(%s)" % (n, self._max_array_len) + ) + elif b & 0b11110000 == 0b10000000: + n = b & 0b00001111 + typ = TYPE_MAP + if n > self._max_map_len: + raise ValueError("%s exceeds max_map_len(%s)" % (n, self._max_map_len)) + elif b == 0xC0: + obj = None + elif b == 0xC2: + obj = False + elif b == 0xC3: + obj = True + elif 0xC4 <= b <= 0xC6: + size, fmt, typ = _MSGPACK_HEADERS[b] + self._reserve(size) + if len(fmt) > 0: + n = _unpack_from(fmt, self._buffer, self._buff_i)[0] + else: + n = self._buffer[self._buff_i] + self._buff_i += size + if n > self._max_bin_len: + raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + obj = self._read(n) + elif 0xC7 <= b <= 0xC9: + size, fmt, typ = _MSGPACK_HEADERS[b] + self._reserve(size) + L, n = _unpack_from(fmt, self._buffer, self._buff_i) + self._buff_i += size + if L > self._max_ext_len: + raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + obj = self._read(L) + elif 0xCA <= b <= 0xD3: + size, fmt = _MSGPACK_HEADERS[b] + self._reserve(size) + if len(fmt) > 0: + obj = _unpack_from(fmt, self._buffer, self._buff_i)[0] + else: + obj = self._buffer[self._buff_i] + self._buff_i += size + elif 0xD4 <= b <= 0xD8: + size, fmt, typ = _MSGPACK_HEADERS[b] + if self._max_ext_len < size: + raise ValueError( + "%s exceeds max_ext_len(%s)" % (size, self._max_ext_len) + ) + self._reserve(size + 1) + n, obj = _unpack_from(fmt, self._buffer, self._buff_i) + self._buff_i += size + 1 + elif 0xD9 <= b <= 0xDB: + size, fmt, typ = _MSGPACK_HEADERS[b] + self._reserve(size) + if len(fmt) > 0: + (n,) = _unpack_from(fmt, self._buffer, self._buff_i) + else: + n = self._buffer[self._buff_i] + self._buff_i += size + if n > self._max_str_len: + raise ValueError("%s exceeds max_str_len(%s)" % (n, self._max_str_len)) + obj = self._read(n) + elif 0xDC <= b <= 0xDD: + size, fmt, typ = _MSGPACK_HEADERS[b] + self._reserve(size) + (n,) = _unpack_from(fmt, self._buffer, self._buff_i) + self._buff_i += size + if n > self._max_array_len: + raise ValueError( + "%s exceeds max_array_len(%s)" % (n, self._max_array_len) + ) + elif 0xDE <= b <= 0xDF: + size, fmt, typ = _MSGPACK_HEADERS[b] + self._reserve(size) + (n,) = _unpack_from(fmt, self._buffer, self._buff_i) + self._buff_i += size + if n > self._max_map_len: + raise ValueError("%s exceeds max_map_len(%s)" % (n, self._max_map_len)) + else: + raise FormatError("Unknown header: 0x%x" % b) + return typ, n, obj + + def _unpack(self, execute=EX_CONSTRUCT): + typ, n, obj = self._read_header() + + if execute == EX_READ_ARRAY_HEADER: + if typ != TYPE_ARRAY: + raise ValueError("Expected array") + return n + if execute == EX_READ_MAP_HEADER: + if typ != TYPE_MAP: + raise ValueError("Expected map") + return n + # TODO should we eliminate the recursion? + if typ == TYPE_ARRAY: + if execute == EX_SKIP: + for i in xrange(n): + # TODO check whether we need to call `list_hook` + self._unpack(EX_SKIP) + return + ret = newlist_hint(n) + for i in xrange(n): + ret.append(self._unpack(EX_CONSTRUCT)) + if self._list_hook is not None: + ret = self._list_hook(ret) + # TODO is the interaction between `list_hook` and `use_list` ok? + return ret if self._use_list else tuple(ret) + if typ == TYPE_MAP: + if execute == EX_SKIP: + for i in xrange(n): + # TODO check whether we need to call hooks + self._unpack(EX_SKIP) + self._unpack(EX_SKIP) + return + if self._object_pairs_hook is not None: + ret = self._object_pairs_hook( + (self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT)) + for _ in xrange(n) + ) + else: + ret = {} + for _ in xrange(n): + key = self._unpack(EX_CONSTRUCT) + if self._strict_map_key and type(key) not in (unicode, bytes): + raise ValueError( + "%s is not allowed for map key" % str(type(key)) + ) + if not PY2 and type(key) is str: + key = sys.intern(key) + ret[key] = self._unpack(EX_CONSTRUCT) + if self._object_hook is not None: + ret = self._object_hook(ret) + return ret + if execute == EX_SKIP: + return + if typ == TYPE_RAW: + if self._raw: + obj = bytes(obj) + else: + obj = obj.decode("utf_8", self._unicode_errors) + return obj + if typ == TYPE_BIN: + return bytes(obj) + if typ == TYPE_EXT: + if n == -1: # timestamp + ts = Timestamp.from_bytes(bytes(obj)) + if self._timestamp == 1: + return ts.to_unix() + elif self._timestamp == 2: + return ts.to_unix_nano() + elif self._timestamp == 3: + return ts.to_datetime() + else: + return ts + else: + return self._ext_hook(n, bytes(obj)) + assert typ == TYPE_IMMEDIATE + return obj + + def __iter__(self): + return self + + def __next__(self): + try: + ret = self._unpack(EX_CONSTRUCT) + self._consume() + return ret + except OutOfData: + self._consume() + raise StopIteration + except RecursionError: + raise StackError + + next = __next__ + + def skip(self): + self._unpack(EX_SKIP) + self._consume() + + def unpack(self): + try: + ret = self._unpack(EX_CONSTRUCT) + except RecursionError: + raise StackError + self._consume() + return ret + + def read_array_header(self): + ret = self._unpack(EX_READ_ARRAY_HEADER) + self._consume() + return ret + + def read_map_header(self): + ret = self._unpack(EX_READ_MAP_HEADER) + self._consume() + return ret + + def tell(self): + return self._stream_offset + + +class Packer(object): + """ + MessagePack Packer + + Usage:: + + packer = Packer() + astream.write(packer.pack(a)) + astream.write(packer.pack(b)) + + Packer's constructor has some keyword arguments: + + :param callable default: + Convert user type to builtin type that Packer supports. + See also simplejson's document. + + :param bool use_single_float: + Use single precision float type for float. (default: False) + + :param bool autoreset: + Reset buffer after each pack and return its content as `bytes`. (default: True). + If set this to false, use `bytes()` to get content and `.reset()` to clear buffer. + + :param bool use_bin_type: + Use bin type introduced in msgpack spec 2.0 for bytes. + It also enables str8 type for unicode. (default: True) + + :param bool strict_types: + If set to true, types will be checked to be exact. Derived classes + from serializable types will not be serialized and will be + treated as unsupported type and forwarded to default. + Additionally tuples will not be serialized as lists. + This is useful when trying to implement accurate serialization + for python types. + + :param bool datetime: + If set to true, datetime with tzinfo is packed into Timestamp type. + Note that the tzinfo is stripped in the timestamp. + You can get UTC datetime with `timestamp=3` option of the Unpacker. + (Python 2 is not supported). + + :param str unicode_errors: + The error handler for encoding unicode. (default: 'strict') + DO NOT USE THIS!! This option is kept for very specific usage. + + Example of streaming deserialize from file-like object:: + + unpacker = Unpacker(file_like) + for o in unpacker: + process(o) + + Example of streaming deserialize from socket:: + + unpacker = Unpacker() + while True: + buf = sock.recv(1024**2) + if not buf: + break + unpacker.feed(buf) + for o in unpacker: + process(o) + + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``OutOfData`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + """ + + def __init__( + self, + default=None, + use_single_float=False, + autoreset=True, + use_bin_type=True, + strict_types=False, + datetime=False, + unicode_errors=None, + ): + self._strict_types = strict_types + self._use_float = use_single_float + self._autoreset = autoreset + self._use_bin_type = use_bin_type + self._buffer = StringIO() + if PY2 and datetime: + raise ValueError("datetime is not supported in Python 2") + self._datetime = bool(datetime) + self._unicode_errors = unicode_errors or "strict" + if default is not None: + if not callable(default): + raise TypeError("default must be callable") + self._default = default + + def _pack( + self, + obj, + nest_limit=DEFAULT_RECURSE_LIMIT, + check=isinstance, + check_type_strict=_check_type_strict, + ): + default_used = False + if self._strict_types: + check = check_type_strict + list_types = list + else: + list_types = (list, tuple) + while True: + if nest_limit < 0: + raise ValueError("recursion limit exceeded") + if obj is None: + return self._buffer.write(b"\xc0") + if check(obj, bool): + if obj: + return self._buffer.write(b"\xc3") + return self._buffer.write(b"\xc2") + if check(obj, int_types): + if 0 <= obj < 0x80: + return self._buffer.write(struct.pack("B", obj)) + if -0x20 <= obj < 0: + return self._buffer.write(struct.pack("b", obj)) + if 0x80 <= obj <= 0xFF: + return self._buffer.write(struct.pack("BB", 0xCC, obj)) + if -0x80 <= obj < 0: + return self._buffer.write(struct.pack(">Bb", 0xD0, obj)) + if 0xFF < obj <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xCD, obj)) + if -0x8000 <= obj < -0x80: + return self._buffer.write(struct.pack(">Bh", 0xD1, obj)) + if 0xFFFF < obj <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xCE, obj)) + if -0x80000000 <= obj < -0x8000: + return self._buffer.write(struct.pack(">Bi", 0xD2, obj)) + if 0xFFFFFFFF < obj <= 0xFFFFFFFFFFFFFFFF: + return self._buffer.write(struct.pack(">BQ", 0xCF, obj)) + if -0x8000000000000000 <= obj < -0x80000000: + return self._buffer.write(struct.pack(">Bq", 0xD3, obj)) + if not default_used and self._default is not None: + obj = self._default(obj) + default_used = True + continue + raise OverflowError("Integer value out of range") + if check(obj, (bytes, bytearray)): + n = len(obj) + if n >= 2**32: + raise ValueError("%s is too large" % type(obj).__name__) + self._pack_bin_header(n) + return self._buffer.write(obj) + if check(obj, unicode): + obj = obj.encode("utf-8", self._unicode_errors) + n = len(obj) + if n >= 2**32: + raise ValueError("String is too large") + self._pack_raw_header(n) + return self._buffer.write(obj) + if check(obj, memoryview): + n = obj.nbytes + if n >= 2**32: + raise ValueError("Memoryview is too large") + self._pack_bin_header(n) + return self._buffer.write(obj) + if check(obj, float): + if self._use_float: + return self._buffer.write(struct.pack(">Bf", 0xCA, obj)) + return self._buffer.write(struct.pack(">Bd", 0xCB, obj)) + if check(obj, (ExtType, Timestamp)): + if check(obj, Timestamp): + code = -1 + data = obj.to_bytes() + else: + code = obj.code + data = obj.data + assert isinstance(code, int) + assert isinstance(data, bytes) + L = len(data) + if L == 1: + self._buffer.write(b"\xd4") + elif L == 2: + self._buffer.write(b"\xd5") + elif L == 4: + self._buffer.write(b"\xd6") + elif L == 8: + self._buffer.write(b"\xd7") + elif L == 16: + self._buffer.write(b"\xd8") + elif L <= 0xFF: + self._buffer.write(struct.pack(">BB", 0xC7, L)) + elif L <= 0xFFFF: + self._buffer.write(struct.pack(">BH", 0xC8, L)) + else: + self._buffer.write(struct.pack(">BI", 0xC9, L)) + self._buffer.write(struct.pack("b", code)) + self._buffer.write(data) + return + if check(obj, list_types): + n = len(obj) + self._pack_array_header(n) + for i in xrange(n): + self._pack(obj[i], nest_limit - 1) + return + if check(obj, dict): + return self._pack_map_pairs( + len(obj), dict_iteritems(obj), nest_limit - 1 + ) + + if self._datetime and check(obj, _DateTime) and obj.tzinfo is not None: + obj = Timestamp.from_datetime(obj) + default_used = 1 + continue + + if not default_used and self._default is not None: + obj = self._default(obj) + default_used = 1 + continue + + if self._datetime and check(obj, _DateTime): + raise ValueError("Cannot serialize %r where tzinfo=None" % (obj,)) + + raise TypeError("Cannot serialize %r" % (obj,)) + + def pack(self, obj): + try: + self._pack(obj) + except: + self._buffer = StringIO() # force reset + raise + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = StringIO() + return ret + + def pack_map_pairs(self, pairs): + self._pack_map_pairs(len(pairs), pairs) + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = StringIO() + return ret + + def pack_array_header(self, n): + if n >= 2**32: + raise ValueError + self._pack_array_header(n) + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = StringIO() + return ret + + def pack_map_header(self, n): + if n >= 2**32: + raise ValueError + self._pack_map_header(n) + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = StringIO() + return ret + + def pack_ext_type(self, typecode, data): + if not isinstance(typecode, int): + raise TypeError("typecode must have int type.") + if not 0 <= typecode <= 127: + raise ValueError("typecode should be 0-127") + if not isinstance(data, bytes): + raise TypeError("data must have bytes type") + L = len(data) + if L > 0xFFFFFFFF: + raise ValueError("Too large data") + if L == 1: + self._buffer.write(b"\xd4") + elif L == 2: + self._buffer.write(b"\xd5") + elif L == 4: + self._buffer.write(b"\xd6") + elif L == 8: + self._buffer.write(b"\xd7") + elif L == 16: + self._buffer.write(b"\xd8") + elif L <= 0xFF: + self._buffer.write(b"\xc7" + struct.pack("B", L)) + elif L <= 0xFFFF: + self._buffer.write(b"\xc8" + struct.pack(">H", L)) + else: + self._buffer.write(b"\xc9" + struct.pack(">I", L)) + self._buffer.write(struct.pack("B", typecode)) + self._buffer.write(data) + + def _pack_array_header(self, n): + if n <= 0x0F: + return self._buffer.write(struct.pack("B", 0x90 + n)) + if n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xDC, n)) + if n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xDD, n)) + raise ValueError("Array is too large") + + def _pack_map_header(self, n): + if n <= 0x0F: + return self._buffer.write(struct.pack("B", 0x80 + n)) + if n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xDE, n)) + if n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xDF, n)) + raise ValueError("Dict is too large") + + def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): + self._pack_map_header(n) + for (k, v) in pairs: + self._pack(k, nest_limit - 1) + self._pack(v, nest_limit - 1) + + def _pack_raw_header(self, n): + if n <= 0x1F: + self._buffer.write(struct.pack("B", 0xA0 + n)) + elif self._use_bin_type and n <= 0xFF: + self._buffer.write(struct.pack(">BB", 0xD9, n)) + elif n <= 0xFFFF: + self._buffer.write(struct.pack(">BH", 0xDA, n)) + elif n <= 0xFFFFFFFF: + self._buffer.write(struct.pack(">BI", 0xDB, n)) + else: + raise ValueError("Raw is too large") + + def _pack_bin_header(self, n): + if not self._use_bin_type: + return self._pack_raw_header(n) + elif n <= 0xFF: + return self._buffer.write(struct.pack(">BB", 0xC4, n)) + elif n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xC5, n)) + elif n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xC6, n)) + else: + raise ValueError("Bin is too large") + + def bytes(self): + """Return internal buffer contents as bytes object""" + return self._buffer.getvalue() + + def reset(self): + """Reset internal buffer. + + This method is useful only when autoreset=False. + """ + self._buffer = StringIO() + + def getbuffer(self): + """Return view of internal buffer.""" + if USING_STRINGBUILDER or PY2: + return memoryview(self.bytes()) + else: + return self._buffer.getbuffer() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__about__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__about__.py new file mode 100644 index 0000000..3551bc2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__about__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "21.3" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = "2014-2019 %s" % __author__ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__init__.py new file mode 100644 index 0000000..3c50c5d --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__init__.py @@ -0,0 +1,25 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from .__about__ import ( + __author__, + __copyright__, + __email__, + __license__, + __summary__, + __title__, + __uri__, + __version__, +) + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-312.pyc new file mode 100644 index 0000000..f243421 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f126e70 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc new file mode 100644 index 0000000..9c3f143 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc new file mode 100644 index 0000000..89dd52a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc new file mode 100644 index 0000000..547e3a2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc new file mode 100644 index 0000000..f056154 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc new file mode 100644 index 0000000..7c796e6 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc new file mode 100644 index 0000000..422cf8c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc new file mode 100644 index 0000000..f71f3a1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..eb054b2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-312.pyc new file mode 100644 index 0000000..93dd5bf Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/_manylinux.py b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/_manylinux.py new file mode 100644 index 0000000..4c379aa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/_manylinux.py @@ -0,0 +1,301 @@ +import collections +import functools +import os +import re +import struct +import sys +import warnings +from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple + + +# Python does not provide platform information at sufficient granularity to +# identify the architecture of the running executable in some cases, so we +# determine it dynamically by reading the information from the running +# process. This only applies on Linux, which uses the ELF format. +class _ELFFileHeader: + # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header + class _InvalidELFFileHeader(ValueError): + """ + An invalid ELF file header was found. + """ + + ELF_MAGIC_NUMBER = 0x7F454C46 + ELFCLASS32 = 1 + ELFCLASS64 = 2 + ELFDATA2LSB = 1 + ELFDATA2MSB = 2 + EM_386 = 3 + EM_S390 = 22 + EM_ARM = 40 + EM_X86_64 = 62 + EF_ARM_ABIMASK = 0xFF000000 + EF_ARM_ABI_VER5 = 0x05000000 + EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + def __init__(self, file: IO[bytes]) -> None: + def unpack(fmt: str) -> int: + try: + data = file.read(struct.calcsize(fmt)) + result: Tuple[int, ...] = struct.unpack(fmt, data) + except struct.error: + raise _ELFFileHeader._InvalidELFFileHeader() + return result[0] + + self.e_ident_magic = unpack(">I") + if self.e_ident_magic != self.ELF_MAGIC_NUMBER: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_class = unpack("B") + if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_data = unpack("B") + if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_version = unpack("B") + self.e_ident_osabi = unpack("B") + self.e_ident_abiversion = unpack("B") + self.e_ident_pad = file.read(7) + format_h = "H" + format_i = "I" + format_q = "Q" + format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q + self.e_type = unpack(format_h) + self.e_machine = unpack(format_h) + self.e_version = unpack(format_i) + self.e_entry = unpack(format_p) + self.e_phoff = unpack(format_p) + self.e_shoff = unpack(format_p) + self.e_flags = unpack(format_i) + self.e_ehsize = unpack(format_h) + self.e_phentsize = unpack(format_h) + self.e_phnum = unpack(format_h) + self.e_shentsize = unpack(format_h) + self.e_shnum = unpack(format_h) + self.e_shstrndx = unpack(format_h) + + +def _get_elf_header() -> Optional[_ELFFileHeader]: + try: + with open(sys.executable, "rb") as f: + elf_header = _ELFFileHeader(f) + except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): + return None + return elf_header + + +def _is_linux_armhf() -> bool: + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_ARM + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABIMASK + ) == elf_header.EF_ARM_ABI_VER5 + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD + ) == elf_header.EF_ARM_ABI_FLOAT_HARD + return result + + +def _is_linux_i686() -> bool: + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_386 + return result + + +def _have_compatible_abi(arch: str) -> bool: + if arch == "armv7l": + return _is_linux_armhf() + if arch == "i686": + return _is_linux_i686() + return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} + + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. +_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) + + +class _GLibCVersion(NamedTuple): + major: int + minor: int + + +def _glibc_version_string_confstr() -> Optional[str]: + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". + version_string = os.confstr("CS_GNU_LIBC_VERSION") + assert version_string is not None + _, version = version_string.split() + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes() -> Optional[str]: + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # We must also handle the special case where the executable is not a + # dynamically linked executable. This can occur when using musl libc, + # for example. In this situation, dlopen() will error, leading to an + # OSError. Interestingly, at least in the case of musl, there is no + # errno set on the OSError. The single string argument used to construct + # OSError comes from libc itself and is therefore not portable to + # hard code here. In any case, failure to call dlopen() means we + # can proceed, so we bail on our attempt. + try: + process_namespace = ctypes.CDLL(None) + except OSError: + return None + + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str: str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +def _glibc_version_string() -> Optional[str]: + """Returns glibc version string, or None if not using glibc.""" + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _parse_glibc_version(version_str: str) -> Tuple[int, int]: + """Parse glibc version. + + We use a regexp instead of str.split because we want to discard any + random junk that might come after the minor version -- this might happen + in patched/forked versions of glibc (e.g. Linaro's version of glibc + uses version strings like "2.20-2014.11"). See gh-3588. + """ + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + "Expected glibc version with 2 components major.minor," + " got: %s" % version_str, + RuntimeWarning, + ) + return -1, -1 + return int(m.group("major")), int(m.group("minor")) + + +@functools.lru_cache() +def _get_glibc_version() -> Tuple[int, int]: + version_str = _glibc_version_string() + if version_str is None: + return (-1, -1) + return _parse_glibc_version(version_str) + + +# From PEP 513, PEP 600 +def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: + sys_glibc = _get_glibc_version() + if sys_glibc < version: + return False + # Check for presence of _manylinux module. + try: + import _manylinux # noqa + except ImportError: + return True + if hasattr(_manylinux, "manylinux_compatible"): + result = _manylinux.manylinux_compatible(version[0], version[1], arch) + if result is not None: + return bool(result) + return True + if version == _GLibCVersion(2, 5): + if hasattr(_manylinux, "manylinux1_compatible"): + return bool(_manylinux.manylinux1_compatible) + if version == _GLibCVersion(2, 12): + if hasattr(_manylinux, "manylinux2010_compatible"): + return bool(_manylinux.manylinux2010_compatible) + if version == _GLibCVersion(2, 17): + if hasattr(_manylinux, "manylinux2014_compatible"): + return bool(_manylinux.manylinux2014_compatible) + return True + + +_LEGACY_MANYLINUX_MAP = { + # CentOS 7 w/ glibc 2.17 (PEP 599) + (2, 17): "manylinux2014", + # CentOS 6 w/ glibc 2.12 (PEP 571) + (2, 12): "manylinux2010", + # CentOS 5 w/ glibc 2.5 (PEP 513) + (2, 5): "manylinux1", +} + + +def platform_tags(linux: str, arch: str) -> Iterator[str]: + if not _have_compatible_abi(arch): + return + # Oldest glibc to be supported regardless of architecture is (2, 17). + too_old_glibc2 = _GLibCVersion(2, 16) + if arch in {"x86_64", "i686"}: + # On x86/i686 also oldest glibc to be supported is (2, 5). + too_old_glibc2 = _GLibCVersion(2, 4) + current_glibc = _GLibCVersion(*_get_glibc_version()) + glibc_max_list = [current_glibc] + # We can assume compatibility across glibc major versions. + # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 + # + # Build a list of maximum glibc versions so that we can + # output the canonical list of all glibc from current_glibc + # down to too_old_glibc2, including all intermediary versions. + for glibc_major in range(current_glibc.major - 1, 1, -1): + glibc_minor = _LAST_GLIBC_MINOR[glibc_major] + glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) + for glibc_max in glibc_max_list: + if glibc_max.major == too_old_glibc2.major: + min_minor = too_old_glibc2.minor + else: + # For other glibc major versions oldest supported is (x, 0). + min_minor = -1 + for glibc_minor in range(glibc_max.minor, min_minor, -1): + glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) + tag = "manylinux_{}_{}".format(*glibc_version) + if _is_compatible(tag, arch, glibc_version): + yield linux.replace("linux", tag) + # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. + if glibc_version in _LEGACY_MANYLINUX_MAP: + legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] + if _is_compatible(legacy_tag, arch, glibc_version): + yield linux.replace("linux", legacy_tag) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/_musllinux.py b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/_musllinux.py new file mode 100644 index 0000000..8ac3059 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/_musllinux.py @@ -0,0 +1,136 @@ +"""PEP 656 support. + +This module implements logic to detect if the currently running Python is +linked against musl, and what musl version is used. +""" + +import contextlib +import functools +import operator +import os +import re +import struct +import subprocess +import sys +from typing import IO, Iterator, NamedTuple, Optional, Tuple + + +def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]: + return struct.unpack(fmt, f.read(struct.calcsize(fmt))) + + +def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]: + """Detect musl libc location by parsing the Python executable. + + Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca + ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html + """ + f.seek(0) + try: + ident = _read_unpacked(f, "16B") + except struct.error: + return None + if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF. + return None + f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version. + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, p_fmt, p_idx = { + 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit. + 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit. + }[ident[4]] + except KeyError: + return None + else: + p_get = operator.itemgetter(*p_idx) + + # Find the interpreter section and return its content. + try: + _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt) + except struct.error: + return None + for i in range(e_phnum + 1): + f.seek(e_phoff + e_phentsize * i) + try: + p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt)) + except struct.error: + return None + if p_type != 3: # Not PT_INTERP. + continue + f.seek(p_offset) + interpreter = os.fsdecode(f.read(p_filesz)).strip("\0") + if "musl" not in interpreter: + return None + return interpreter + return None + + +class _MuslVersion(NamedTuple): + major: int + minor: int + + +def _parse_musl_version(output: str) -> Optional[_MuslVersion]: + lines = [n for n in (n.strip() for n in output.splitlines()) if n] + if len(lines) < 2 or lines[0][:4] != "musl": + return None + m = re.match(r"Version (\d+)\.(\d+)", lines[1]) + if not m: + return None + return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) + + +@functools.lru_cache() +def _get_musl_version(executable: str) -> Optional[_MuslVersion]: + """Detect currently-running musl runtime version. + + This is done by checking the specified executable's dynamic linking + information, and invoking the loader to parse its output for a version + string. If the loader is musl, the output would be something like:: + + musl libc (x86_64) + Version 1.2.2 + Dynamic Program Loader + """ + with contextlib.ExitStack() as stack: + try: + f = stack.enter_context(open(executable, "rb")) + except OSError: + return None + ld = _parse_ld_musl_from_elf(f) + if not ld: + return None + proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) + return _parse_musl_version(proc.stderr) + + +def platform_tags(arch: str) -> Iterator[str]: + """Generate musllinux tags compatible to the current platform. + + :param arch: Should be the part of platform tag after the ``linux_`` + prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a + prerequisite for the current platform to be musllinux-compatible. + + :returns: An iterator of compatible musllinux tags. + """ + sys_musl = _get_musl_version(sys.executable) + if sys_musl is None: # Python not dynamically linked against musl. + return + for minor in range(sys_musl.minor, -1, -1): + yield f"musllinux_{sys_musl.major}_{minor}_{arch}" + + +if __name__ == "__main__": # pragma: no cover + import sysconfig + + plat = sysconfig.get_platform() + assert plat.startswith("linux-"), "not linux" + + print("plat:", plat) + print("musl:", _get_musl_version(sys.executable)) + print("tags:", end=" ") + for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): + print(t, end="\n ") diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/_structures.py b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/_structures.py new file mode 100644 index 0000000..90a6465 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/_structures.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +class InfinityType: + def __repr__(self) -> str: + return "Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return False + + def __le__(self, other: object) -> bool: + return False + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return True + + def __ge__(self, other: object) -> bool: + return True + + def __neg__(self: object) -> "NegativeInfinityType": + return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType: + def __repr__(self) -> str: + return "-Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return True + + def __le__(self, other: object) -> bool: + return True + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return False + + def __ge__(self, other: object) -> bool: + return False + + def __neg__(self: object) -> InfinityType: + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/markers.py b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/markers.py new file mode 100644 index 0000000..540e7a4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/markers.py @@ -0,0 +1,304 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import operator +import os +import platform +import sys +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from pip._vendor.pyparsing import ( # noqa: N817 + Forward, + Group, + Literal as L, + ParseException, + ParseResults, + QuotedString, + ZeroOrMore, + stringEnd, + stringStart, +) + +from .specifiers import InvalidSpecifier, Specifier + +__all__ = [ + "InvalidMarker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "Marker", + "default_environment", +] + +Operator = Callable[[str, str], bool] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Node: + def __init__(self, value: Any) -> None: + self.value = value + + def __str__(self) -> str: + return str(self.value) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def serialize(self) -> str: + raise NotImplementedError + + +class Variable(Node): + def serialize(self) -> str: + return str(self) + + +class Value(Node): + def serialize(self) -> str: + return f'"{self}"' + + +class Op(Node): + def serialize(self) -> str: + return str(self) + + +VARIABLE = ( + L("implementation_version") + | L("platform_python_implementation") + | L("implementation_name") + | L("python_full_version") + | L("platform_release") + | L("platform_version") + | L("platform_machine") + | L("platform_system") + | L("python_version") + | L("sys_platform") + | L("os_name") + | L("os.name") # PEP-345 + | L("sys.platform") # PEP-345 + | L("platform.version") # PEP-345 + | L("platform.machine") # PEP-345 + | L("platform.python_implementation") # PEP-345 + | L("python_implementation") # undocumented setuptools legacy + | L("extra") # PEP-508 +) +ALIASES = { + "os.name": "os_name", + "sys.platform": "sys_platform", + "platform.version": "platform_version", + "platform.machine": "platform_machine", + "platform.python_implementation": "platform_python_implementation", + "python_implementation": "platform_python_implementation", +} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) + +VERSION_CMP = ( + L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") +) + +MARKER_OP = VERSION_CMP | L("not in") | L("in") +MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) + +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) + +BOOLOP = L("and") | L("or") + +MARKER_VAR = VARIABLE | MARKER_VALUE + +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) + +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() + +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) + +MARKER = stringStart + MARKER_EXPR + stringEnd + + +def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]: + if isinstance(results, ParseResults): + return [_coerce_parse_result(i) for i in results] + else: + return results + + +def _format_marker( + marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True +) -> str: + + assert isinstance(marker, (list, tuple, str)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators: Dict[str, Operator] = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs: str, op: Op, rhs: str) -> bool: + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs) + + oper: Optional[Operator] = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") + + return oper(lhs, rhs) + + +class Undefined: + pass + + +_undefined = Undefined() + + +def _get_env(environment: Dict[str, str], name: str) -> str: + value: Union[str, Undefined] = environment.get(name, _undefined) + + if isinstance(value, Undefined): + raise UndefinedEnvironmentName( + f"{name!r} does not exist in evaluation environment." + ) + + return value + + +def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool: + groups: List[List[bool]] = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, str)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + lhs_value = _get_env(environment, lhs.value) + rhs_value = rhs.value + else: + lhs_value = lhs.value + rhs_value = _get_env(environment, rhs.value) + + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info: "sys._version_info") -> str: + version = "{0.major}.{0.minor}.{0.micro}".format(info) + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment() -> Dict[str, str]: + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker: + def __init__(self, marker: str) -> None: + try: + self._markers = _coerce_parse_result(MARKER.parseString(marker)) + except ParseException as e: + raise InvalidMarker( + f"Invalid marker: {marker!r}, parse error at " + f"{marker[e.loc : e.loc + 8]!r}" + ) + + def __str__(self) -> str: + return _format_marker(self._markers) + + def __repr__(self) -> str: + return f"" + + def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + if environment is not None: + current_environment.update(environment) + + return _evaluate_markers(self._markers, current_environment) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/requirements.py b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/requirements.py new file mode 100644 index 0000000..1eab7dd --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/requirements.py @@ -0,0 +1,146 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import re +import string +import urllib.parse +from typing import List, Optional as TOptional, Set + +from pip._vendor.pyparsing import ( # noqa + Combine, + Literal as L, + Optional, + ParseException, + Regex, + Word, + ZeroOrMore, + originalTextFor, + stringEnd, + stringStart, +) + +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +ALPHANUM = Word(string.ascii_letters + string.digits) + +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() + +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) + +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER + +URI = Regex(r"[^ ]+")("url") +URL = AT + URI + +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") + +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) + +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine( + VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False +)("_raw_spec") +_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") + +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) + +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction( + lambda s, l, t: Marker(s[t._original_start : t._original_end]) +) +MARKER_SEPARATOR = SEMICOLON +MARKER = MARKER_SEPARATOR + MARKER_EXPR + +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) + +NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) + +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd +# pyparsing isn't thread safe during initialization, so we do it eagerly, see +# issue #104 +REQUIREMENT.parseString("x[]") + + +class Requirement: + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string: str) -> None: + try: + req = REQUIREMENT.parseString(requirement_string) + except ParseException as e: + raise InvalidRequirement( + f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}' + ) + + self.name: str = req.name + if req.url: + parsed_url = urllib.parse.urlparse(req.url) + if parsed_url.scheme == "file": + if urllib.parse.urlunparse(parsed_url) != req.url: + raise InvalidRequirement("Invalid URL given") + elif not (parsed_url.scheme and parsed_url.netloc) or ( + not parsed_url.scheme and not parsed_url.netloc + ): + raise InvalidRequirement(f"Invalid URL: {req.url}") + self.url: TOptional[str] = req.url + else: + self.url = None + self.extras: Set[str] = set(req.extras.asList() if req.extras else []) + self.specifier: SpecifierSet = SpecifierSet(req.specifier) + self.marker: TOptional[Marker] = req.marker if req.marker else None + + def __str__(self) -> str: + parts: List[str] = [self.name] + + if self.extras: + formatted_extras = ",".join(sorted(self.extras)) + parts.append(f"[{formatted_extras}]") + + if self.specifier: + parts.append(str(self.specifier)) + + if self.url: + parts.append(f"@ {self.url}") + if self.marker: + parts.append(" ") + + if self.marker: + parts.append(f"; {self.marker}") + + return "".join(parts) + + def __repr__(self) -> str: + return f"" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/specifiers.py b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/specifiers.py new file mode 100644 index 0000000..0e218a6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/specifiers.py @@ -0,0 +1,802 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import abc +import functools +import itertools +import re +import warnings +from typing import ( + Callable, + Dict, + Iterable, + Iterator, + List, + Optional, + Pattern, + Set, + Tuple, + TypeVar, + Union, +) + +from .utils import canonicalize_version +from .version import LegacyVersion, Version, parse + +ParsedVersion = Union[Version, LegacyVersion] +UnparsedVersion = Union[Version, LegacyVersion, str] +VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion) +CallableOperator = Callable[[ParsedVersion, str], bool] + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __str__(self) -> str: + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractproperty + def prereleases(self) -> Optional[bool]: + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value: bool) -> None: + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter( + self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None + ) -> Iterable[VersionTypeVar]: + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators: Dict[str, str] = {} + _regex: Pattern[str] + + def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier(f"Invalid specifier: '{spec}'") + + self._spec: Tuple[str, str] = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self) -> str: + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + + def __str__(self) -> str: + return "{}{}".format(*self._spec) + + @property + def _canonical_spec(self) -> Tuple[str, str]: + return self._spec[0], canonicalize_version(self._spec[1]) + + def __hash__(self) -> int: + return hash(self._canonical_spec) + + def __eq__(self, other: object) -> bool: + if isinstance(other, str): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) + return operator_callable + + def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion: + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self) -> str: + return self._spec[0] + + @property + def version(self) -> str: + return self._spec[1] + + @property + def prereleases(self) -> Optional[bool]: + return self._prereleases + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + def __contains__(self, item: str) -> bool: + return self.contains(item) + + def contains( + self, item: UnparsedVersion, prereleases: Optional[bool] = None + ) -> bool: + + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + normalized_item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable: CallableOperator = self._get_operator(self.operator) + return operator_callable(normalized_item, self.version) + + def filter( + self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None + ) -> Iterable[VersionTypeVar]: + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later in case nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex_str = r""" + (?P(==|!=|<=|>=|<|>)) + \s* + (?P + [^,;\s)]* # Since this is a "legacy" specifier, and the version + # string can be just about anything, we match everything + # except for whitespace, a semi-colon for marker support, + # a closing paren since versions can be enclosed in + # them, and a comma since it's a version separator. + ) + """ + + _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: + super().__init__(spec, prereleases) + + warnings.warn( + "Creating a LegacyVersion has been deprecated and will be " + "removed in the next major release", + DeprecationWarning, + ) + + def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion: + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool: + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool: + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool: + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal( + self, prospective: LegacyVersion, spec: str + ) -> bool: + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool: + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool: + return prospective > self._coerce_version(spec) + + +def _require_version_compare( + fn: Callable[["Specifier", ParsedVersion, str], bool] +) -> Callable[["Specifier", ParsedVersion, str], bool]: + @functools.wraps(fn) + def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool: + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool: + + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore suffix segments. + prefix = ".".join( + list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + @_require_version_compare + def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool: + + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + prospective = Version(prospective.public) + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + split_spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + split_prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = split_prospective[: len(split_spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + padded_spec, padded_prospective = _pad_version( + split_spec, shortened_prospective + ) + + return padded_prospective == padded_spec + else: + # Convert our spec string into a Version + spec_version = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = Version(prospective.public) + + return prospective == spec_version + + @_require_version_compare + def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool: + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool: + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal( + self, prospective: ParsedVersion, spec: str + ) -> bool: + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool: + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool: + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self) -> bool: + + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version: str) -> List[str]: + result: List[str] = [] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _is_not_suffix(segment: str) -> bool: + return not any( + segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") + ) + + +def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) + + +class SpecifierSet(BaseSpecifier): + def __init__( + self, specifiers: str = "", prereleases: Optional[bool] = None + ) -> None: + + # Split on , to break each individual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed: Set[_IndividualSpecifier] = set() + for specifier in split_specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self) -> str: + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"" + + def __str__(self) -> str: + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self) -> int: + return hash(self._specs) + + def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": + if isinstance(other, str): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other: object) -> bool: + if isinstance(other, (str, _IndividualSpecifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __len__(self) -> int: + return len(self._specs) + + def __iter__(self) -> Iterator[_IndividualSpecifier]: + return iter(self._specs) + + @property + def prereleases(self) -> Optional[bool]: + + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + def __contains__(self, item: UnparsedVersion) -> bool: + return self.contains(item) + + def contains( + self, item: UnparsedVersion, prereleases: Optional[bool] = None + ) -> bool: + + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all(s.contains(item, prereleases=prereleases) for s in self._specs) + + def filter( + self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None + ) -> Iterable[VersionTypeVar]: + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered: List[VersionTypeVar] = [] + found_prereleases: List[VersionTypeVar] = [] + + item: UnparsedVersion + parsed_version: Union[Version, LegacyVersion] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/tags.py b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/tags.py new file mode 100644 index 0000000..9a3d25a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/tags.py @@ -0,0 +1,487 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import logging +import platform +import sys +import sysconfig +from importlib.machinery import EXTENSION_SUFFIXES +from typing import ( + Dict, + FrozenSet, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +from . import _manylinux, _musllinux + +logger = logging.getLogger(__name__) + +PythonVersion = Sequence[int] +MacVersion = Tuple[int, int] + +INTERPRETER_SHORT_NAMES: Dict[str, str] = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} + + +_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 + + +class Tag: + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] + + def __init__(self, interpreter: str, abi: str, platform: str) -> None: + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + # The __hash__ of every single element in a Set[Tag] will be evaluated each time + # that a set calls its `.disjoint()` method, which may be called hundreds of + # times when scanning a page of links for packages with tags matching that + # Set[Tag]. Pre-computing the value here produces significant speedups for + # downstream consumers. + self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property + def interpreter(self) -> str: + return self._interpreter + + @property + def abi(self) -> str: + return self._abi + + @property + def platform(self) -> str: + return self._platform + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self._hash == other._hash) # Short-circuit ASAP for perf reasons. + and (self._platform == other._platform) + and (self._abi == other._abi) + and (self._interpreter == other._interpreter) + ) + + def __hash__(self) -> int: + return self._hash + + def __str__(self) -> str: + return f"{self._interpreter}-{self._abi}-{self._platform}" + + def __repr__(self) -> str: + return f"<{self} @ {id(self)}>" + + +def parse_tag(tag: str) -> FrozenSet[Tag]: + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: + value = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string: str) -> str: + return string.replace(".", "_").replace("-", "_") + + +def _abi3_applies(python_version: PythonVersion) -> bool: + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) + + +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append(f"cp{version}") + abis.insert( + 0, + "cp{version}{debug}{pymalloc}{ucs4}".format( + version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 + ), + ) + return abis + + +def cpython_tags( + python_version: Optional[PythonVersion] = None, + abis: Optional[Iterable[str]] = None, + platforms: Optional[Iterable[str]] = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + if not python_version: + python_version = sys.version_info[:2] + + interpreter = f"cp{_version_nodot(python_version[:2])}" + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + if _abi3_applies(python_version): + yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) + yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) + + if _abi3_applies(python_version): + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{version}".format( + version=_version_nodot((python_version[0], minor_version)) + ) + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi() -> Iterator[str]: + abi = sysconfig.get_config_var("SOABI") + if abi: + yield _normalize_string(abi) + + +def generic_tags( + interpreter: Optional[str] = None, + abis: Optional[Iterable[str]] = None, + platforms: Optional[Iterable[str]] = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + platforms = list(platforms or platform_tags()) + abis = list(abis) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield f"py{_version_nodot(py_version[:2])}" + yield f"py{py_version[0]}" + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield f"py{_version_nodot((py_version[0], minor))}" + + +def compatible_tags( + python_version: Optional[PythonVersion] = None, + interpreter: Optional[str] = None, + platforms: Optional[Iterable[str]] = None, +) -> Iterator[Tag]: + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + if cpu_arch in {"arm64", "x86_64"}: + formats.append("universal2") + + if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: + formats.append("universal") + + return formats + + +def mac_platforms( + version: Optional[MacVersion] = None, arch: Optional[str] = None +) -> Iterator[str]: + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() + if version is None: + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + + if (10, 0) <= version and version < (11, 0): + # Prior to Mac OS 11, each yearly release of Mac OS bumped the + # "minor" version number. The major version was always 10. + for minor_version in range(version[1], -1, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=10, minor=minor_version, binary_format=binary_format + ) + + if version >= (11, 0): + # Starting with Mac OS 11, each yearly release bumps the major version + # number. The minor versions are now the midyear updates. + for major_version in range(version[0], 10, -1): + compat_version = major_version, 0 + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=major_version, minor=0, binary_format=binary_format + ) + + if version >= (11, 0): + # Mac OS 11 on x86_64 is compatible with binaries from previous releases. + # Arm64 support was introduced in 11.0, so no Arm binaries from previous + # releases exist. + # + # However, the "universal2" binary format can have a + # macOS version earlier than 11.0 when the x86_64 part of the binary supports + # that version of macOS. + if arch == "x86_64": + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + else: + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_format = "universal2" + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + + +def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: + linux = _normalize_string(sysconfig.get_platform()) + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv7l" + _, arch = linux.split("_", 1) + yield from _manylinux.platform_tags(linux, arch) + yield from _musllinux.platform_tags(arch) + yield linux + + +def _generic_platforms() -> Iterator[str]: + yield _normalize_string(sysconfig.get_platform()) + + +def platform_tags() -> Iterator[str]: + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name() -> str: + """ + Returns the name of the running interpreter. + """ + name = sys.implementation.name + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(*, warn: bool = False) -> str: + """ + Returns the version of the running interpreter. + """ + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[:2]) + return version + + +def _version_nodot(version: PythonVersion) -> str: + return "".join(map(str, version)) + + +def sys_tags(*, warn: bool = False) -> Iterator[Tag]: + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + + interp_name = interpreter_name() + if interp_name == "cp": + yield from cpython_tags(warn=warn) + else: + yield from generic_tags() + + if interp_name == "pp": + yield from compatible_tags(interpreter="pp3") + else: + yield from compatible_tags() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/utils.py b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/utils.py new file mode 100644 index 0000000..bab11b8 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/utils.py @@ -0,0 +1,136 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import re +from typing import FrozenSet, NewType, Tuple, Union, cast + +from .tags import Tag, parse_tag +from .version import InvalidVersion, Version + +BuildTag = Union[Tuple[()], Tuple[int, str]] +NormalizedName = NewType("NormalizedName", str) + + +class InvalidWheelFilename(ValueError): + """ + An invalid wheel filename was found, users should refer to PEP 427. + """ + + +class InvalidSdistFilename(ValueError): + """ + An invalid sdist filename was found, users should refer to the packaging user guide. + """ + + +_canonicalize_regex = re.compile(r"[-_.]+") +# PEP 427: The build number must start with a digit. +_build_tag_regex = re.compile(r"(\d+)(.*)") + + +def canonicalize_name(name: str) -> NormalizedName: + # This is taken from PEP 503. + value = _canonicalize_regex.sub("-", name).lower() + return cast(NormalizedName, value) + + +def canonicalize_version(version: Union[Version, str]) -> str: + """ + This is very similar to Version.__str__, but has one subtle difference + with the way it handles the release segment. + """ + if isinstance(version, str): + try: + parsed = Version(version) + except InvalidVersion: + # Legacy versions cannot be normalized + return version + else: + parsed = version + + parts = [] + + # Epoch + if parsed.epoch != 0: + parts.append(f"{parsed.epoch}!") + + # Release segment + # NB: This strips trailing '.0's to normalize + parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release))) + + # Pre-release + if parsed.pre is not None: + parts.append("".join(str(x) for x in parsed.pre)) + + # Post-release + if parsed.post is not None: + parts.append(f".post{parsed.post}") + + # Development release + if parsed.dev is not None: + parts.append(f".dev{parsed.dev}") + + # Local version segment + if parsed.local is not None: + parts.append(f"+{parsed.local}") + + return "".join(parts) + + +def parse_wheel_filename( + filename: str, +) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: + if not filename.endswith(".whl"): + raise InvalidWheelFilename( + f"Invalid wheel filename (extension must be '.whl'): {filename}" + ) + + filename = filename[:-4] + dashes = filename.count("-") + if dashes not in (4, 5): + raise InvalidWheelFilename( + f"Invalid wheel filename (wrong number of parts): {filename}" + ) + + parts = filename.split("-", dashes - 2) + name_part = parts[0] + # See PEP 427 for the rules on escaping the project name + if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: + raise InvalidWheelFilename(f"Invalid project name: {filename}") + name = canonicalize_name(name_part) + version = Version(parts[1]) + if dashes == 5: + build_part = parts[2] + build_match = _build_tag_regex.match(build_part) + if build_match is None: + raise InvalidWheelFilename( + f"Invalid build number: {build_part} in '{filename}'" + ) + build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) + else: + build = () + tags = parse_tag(parts[-1]) + return (name, version, build, tags) + + +def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: + if filename.endswith(".tar.gz"): + file_stem = filename[: -len(".tar.gz")] + elif filename.endswith(".zip"): + file_stem = filename[: -len(".zip")] + else: + raise InvalidSdistFilename( + f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" + f" {filename}" + ) + + # We are requiring a PEP 440 version, which cannot contain dashes, + # so we split on the last dash. + name_part, sep, version_part = file_stem.rpartition("-") + if not sep: + raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") + + name = canonicalize_name(name_part) + version = Version(version_part) + return (name, version) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/version.py b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/version.py new file mode 100644 index 0000000..de9a09a --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/packaging/version.py @@ -0,0 +1,504 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import collections +import itertools +import re +import warnings +from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union + +from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType + +__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] + +InfiniteTypes = Union[InfinityType, NegativeInfinityType] +PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] +SubLocalType = Union[InfiniteTypes, int, str] +LocalType = Union[ + NegativeInfinityType, + Tuple[ + Union[ + SubLocalType, + Tuple[SubLocalType, str], + Tuple[NegativeInfinityType, SubLocalType], + ], + ..., + ], +] +CmpKey = Tuple[ + int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType +] +LegacyCmpKey = Tuple[int, Tuple[str, ...]] +VersionComparisonMethod = Callable[ + [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool +] + +_Version = collections.namedtuple( + "_Version", ["epoch", "release", "dev", "pre", "post", "local"] +) + + +def parse(version: str) -> Union["LegacyVersion", "Version"]: + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion: + _key: Union[CmpKey, LegacyCmpKey] + + def __hash__(self) -> int: + return hash(self._key) + + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key + + def __eq__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key == other._key + + def __ge__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key >= other._key + + def __gt__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key + + def __ne__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key != other._key + + +class LegacyVersion(_BaseVersion): + def __init__(self, version: str) -> None: + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + warnings.warn( + "Creating a LegacyVersion has been deprecated and will be " + "removed in the next major release", + DeprecationWarning, + ) + + def __str__(self) -> str: + return self._version + + def __repr__(self) -> str: + return f"" + + @property + def public(self) -> str: + return self._version + + @property + def base_version(self) -> str: + return self._version + + @property + def epoch(self) -> int: + return -1 + + @property + def release(self) -> None: + return None + + @property + def pre(self) -> None: + return None + + @property + def post(self) -> None: + return None + + @property + def dev(self) -> None: + return None + + @property + def local(self) -> None: + return None + + @property + def is_prerelease(self) -> bool: + return False + + @property + def is_postrelease(self) -> bool: + return False + + @property + def is_devrelease(self) -> bool: + return False + + +_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) + +_legacy_version_replacement_map = { + "pre": "c", + "preview": "c", + "-": "final-", + "rc": "c", + "dev": "@", +} + + +def _parse_version_parts(s: str) -> Iterator[str]: + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version: str) -> LegacyCmpKey: + + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts: List[str] = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + + return epoch, tuple(parts) + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+
+class Version(_BaseVersion):
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+    def __init__(self, version: str) -> None:
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: '{version}'")
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        return f""
+
+    def __str__(self) -> str:
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+
+        # Development release
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+
+        # Local version segment
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        _epoch: int = self._version.epoch
+        return _epoch
+
+    @property
+    def release(self) -> Tuple[int, ...]:
+        _release: Tuple[int, ...] = self._version.release
+        return _release
+
+    @property
+    def pre(self) -> Optional[Tuple[str, int]]:
+        _pre: Optional[Tuple[str, int]] = self._version.pre
+        return _pre
+
+    @property
+    def post(self) -> Optional[int]:
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> Optional[int]:
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> Optional[str]:
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+    letter: str, number: Union[str, bytes, SupportsInt]
+) -> Optional[Tuple[str, int]]:
+
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: str) -> Optional[LocalType]:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: Tuple[int, ...],
+    pre: Optional[Tuple[str, int]],
+    post: Optional[Tuple[str, int]],
+    dev: Optional[Tuple[str, int]],
+    local: Optional[Tuple[SubLocalType]],
+) -> CmpKey:
+
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: PrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: PrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: PrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: LocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py
new file mode 100644
index 0000000..ad27940
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py
@@ -0,0 +1,3361 @@
+"""
+Package resource API
+--------------------
+
+A resource is a logical file contained within a package, or a logical
+subdirectory thereof.  The package resource API expects resource names
+to have their path parts separated with ``/``, *not* whatever the local
+path separator is.  Do not use os.path operations to manipulate resource
+names being passed into the API.
+
+The package resource API is designed to work with normal filesystem packages,
+.egg files, and unpacked .egg files.  It can also work in a limited way with
+.zip files and with custom PEP 302 loaders that support the ``get_data()``
+method.
+
+This module is deprecated. Users are directed to :mod:`importlib.resources`,
+:mod:`importlib.metadata` and :pypi:`packaging` instead.
+"""
+
+import sys
+import os
+import io
+import time
+import re
+import types
+import zipfile
+import zipimport
+import warnings
+import stat
+import functools
+import pkgutil
+import operator
+import platform
+import collections
+import plistlib
+import email.parser
+import errno
+import tempfile
+import textwrap
+import inspect
+import ntpath
+import posixpath
+import importlib
+from pkgutil import get_importer
+
+try:
+    import _imp
+except ImportError:
+    # Python 3.2 compatibility
+    import imp as _imp
+
+try:
+    FileExistsError
+except NameError:
+    FileExistsError = OSError
+
+# capture these to bypass sandboxing
+from os import utime
+
+try:
+    from os import mkdir, rename, unlink
+
+    WRITE_SUPPORT = True
+except ImportError:
+    # no write support, probably under GAE
+    WRITE_SUPPORT = False
+
+from os import open as os_open
+from os.path import isdir, split
+
+try:
+    import importlib.machinery as importlib_machinery
+
+    # access attribute to force import under delayed import mechanisms.
+    importlib_machinery.__name__
+except ImportError:
+    importlib_machinery = None
+
+from pip._internal.utils._jaraco_text import (
+    yield_lines,
+    drop_comment,
+    join_continuation,
+)
+
+from pip._vendor import platformdirs
+from pip._vendor import packaging
+
+__import__('pip._vendor.packaging.version')
+__import__('pip._vendor.packaging.specifiers')
+__import__('pip._vendor.packaging.requirements')
+__import__('pip._vendor.packaging.markers')
+__import__('pip._vendor.packaging.utils')
+
+if sys.version_info < (3, 5):
+    raise RuntimeError("Python 3.5 or later is required")
+
+# declare some globals that will be defined later to
+# satisfy the linters.
+require = None
+working_set = None
+add_activation_listener = None
+resources_stream = None
+cleanup_resources = None
+resource_dir = None
+resource_stream = None
+set_extraction_path = None
+resource_isdir = None
+resource_string = None
+iter_entry_points = None
+resource_listdir = None
+resource_filename = None
+resource_exists = None
+_distribution_finders = None
+_namespace_handlers = None
+_namespace_packages = None
+
+
+warnings.warn(
+    "pkg_resources is deprecated as an API. "
+    "See https://setuptools.pypa.io/en/latest/pkg_resources.html",
+    DeprecationWarning,
+    stacklevel=2
+)
+
+
+_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
+
+
+class PEP440Warning(RuntimeWarning):
+    """
+    Used when there is an issue with a version or specifier not complying with
+    PEP 440.
+    """
+
+
+parse_version = packaging.version.Version
+
+
+_state_vars = {}
+
+
+def _declare_state(vartype, **kw):
+    globals().update(kw)
+    _state_vars.update(dict.fromkeys(kw, vartype))
+
+
+def __getstate__():
+    state = {}
+    g = globals()
+    for k, v in _state_vars.items():
+        state[k] = g['_sget_' + v](g[k])
+    return state
+
+
+def __setstate__(state):
+    g = globals()
+    for k, v in state.items():
+        g['_sset_' + _state_vars[k]](k, g[k], v)
+    return state
+
+
+def _sget_dict(val):
+    return val.copy()
+
+
+def _sset_dict(key, ob, state):
+    ob.clear()
+    ob.update(state)
+
+
+def _sget_object(val):
+    return val.__getstate__()
+
+
+def _sset_object(key, ob, state):
+    ob.__setstate__(state)
+
+
+_sget_none = _sset_none = lambda *args: None
+
+
+def get_supported_platform():
+    """Return this platform's maximum compatible version.
+
+    distutils.util.get_platform() normally reports the minimum version
+    of macOS that would be required to *use* extensions produced by
+    distutils.  But what we want when checking compatibility is to know the
+    version of macOS that we are *running*.  To allow usage of packages that
+    explicitly require a newer version of macOS, we must also know the
+    current version of the OS.
+
+    If this condition occurs for any other platform with a version in its
+    platform strings, this function should be extended accordingly.
+    """
+    plat = get_build_platform()
+    m = macosVersionString.match(plat)
+    if m is not None and sys.platform == "darwin":
+        try:
+            plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3))
+        except ValueError:
+            # not macOS
+            pass
+    return plat
+
+
+__all__ = [
+    # Basic resource access and distribution/entry point discovery
+    'require',
+    'run_script',
+    'get_provider',
+    'get_distribution',
+    'load_entry_point',
+    'get_entry_map',
+    'get_entry_info',
+    'iter_entry_points',
+    'resource_string',
+    'resource_stream',
+    'resource_filename',
+    'resource_listdir',
+    'resource_exists',
+    'resource_isdir',
+    # Environmental control
+    'declare_namespace',
+    'working_set',
+    'add_activation_listener',
+    'find_distributions',
+    'set_extraction_path',
+    'cleanup_resources',
+    'get_default_cache',
+    # Primary implementation classes
+    'Environment',
+    'WorkingSet',
+    'ResourceManager',
+    'Distribution',
+    'Requirement',
+    'EntryPoint',
+    # Exceptions
+    'ResolutionError',
+    'VersionConflict',
+    'DistributionNotFound',
+    'UnknownExtra',
+    'ExtractionError',
+    # Warnings
+    'PEP440Warning',
+    # Parsing functions and string utilities
+    'parse_requirements',
+    'parse_version',
+    'safe_name',
+    'safe_version',
+    'get_platform',
+    'compatible_platforms',
+    'yield_lines',
+    'split_sections',
+    'safe_extra',
+    'to_filename',
+    'invalid_marker',
+    'evaluate_marker',
+    # filesystem utilities
+    'ensure_directory',
+    'normalize_path',
+    # Distribution "precedence" constants
+    'EGG_DIST',
+    'BINARY_DIST',
+    'SOURCE_DIST',
+    'CHECKOUT_DIST',
+    'DEVELOP_DIST',
+    # "Provider" interfaces, implementations, and registration/lookup APIs
+    'IMetadataProvider',
+    'IResourceProvider',
+    'FileMetadata',
+    'PathMetadata',
+    'EggMetadata',
+    'EmptyProvider',
+    'empty_provider',
+    'NullProvider',
+    'EggProvider',
+    'DefaultProvider',
+    'ZipProvider',
+    'register_finder',
+    'register_namespace_handler',
+    'register_loader_type',
+    'fixup_namespace_packages',
+    'get_importer',
+    # Warnings
+    'PkgResourcesDeprecationWarning',
+    # Deprecated/backward compatibility only
+    'run_main',
+    'AvailableDistributions',
+]
+
+
+class ResolutionError(Exception):
+    """Abstract base for dependency resolution errors"""
+
+    def __repr__(self):
+        return self.__class__.__name__ + repr(self.args)
+
+
+class VersionConflict(ResolutionError):
+    """
+    An already-installed version conflicts with the requested version.
+
+    Should be initialized with the installed Distribution and the requested
+    Requirement.
+    """
+
+    _template = "{self.dist} is installed but {self.req} is required"
+
+    @property
+    def dist(self):
+        return self.args[0]
+
+    @property
+    def req(self):
+        return self.args[1]
+
+    def report(self):
+        return self._template.format(**locals())
+
+    def with_context(self, required_by):
+        """
+        If required_by is non-empty, return a version of self that is a
+        ContextualVersionConflict.
+        """
+        if not required_by:
+            return self
+        args = self.args + (required_by,)
+        return ContextualVersionConflict(*args)
+
+
+class ContextualVersionConflict(VersionConflict):
+    """
+    A VersionConflict that accepts a third parameter, the set of the
+    requirements that required the installed Distribution.
+    """
+
+    _template = VersionConflict._template + ' by {self.required_by}'
+
+    @property
+    def required_by(self):
+        return self.args[2]
+
+
+class DistributionNotFound(ResolutionError):
+    """A requested distribution was not found"""
+
+    _template = (
+        "The '{self.req}' distribution was not found "
+        "and is required by {self.requirers_str}"
+    )
+
+    @property
+    def req(self):
+        return self.args[0]
+
+    @property
+    def requirers(self):
+        return self.args[1]
+
+    @property
+    def requirers_str(self):
+        if not self.requirers:
+            return 'the application'
+        return ', '.join(self.requirers)
+
+    def report(self):
+        return self._template.format(**locals())
+
+    def __str__(self):
+        return self.report()
+
+
+class UnknownExtra(ResolutionError):
+    """Distribution doesn't have an "extra feature" of the given name"""
+
+
+_provider_factories = {}
+
+PY_MAJOR = '{}.{}'.format(*sys.version_info)
+EGG_DIST = 3
+BINARY_DIST = 2
+SOURCE_DIST = 1
+CHECKOUT_DIST = 0
+DEVELOP_DIST = -1
+
+
+def register_loader_type(loader_type, provider_factory):
+    """Register `provider_factory` to make providers for `loader_type`
+
+    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
+    and `provider_factory` is a function that, passed a *module* object,
+    returns an ``IResourceProvider`` for that module.
+    """
+    _provider_factories[loader_type] = provider_factory
+
+
+def get_provider(moduleOrReq):
+    """Return an IResourceProvider for the named module or requirement"""
+    if isinstance(moduleOrReq, Requirement):
+        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
+    try:
+        module = sys.modules[moduleOrReq]
+    except KeyError:
+        __import__(moduleOrReq)
+        module = sys.modules[moduleOrReq]
+    loader = getattr(module, '__loader__', None)
+    return _find_adapter(_provider_factories, loader)(module)
+
+
+def _macos_vers(_cache=[]):
+    if not _cache:
+        version = platform.mac_ver()[0]
+        # fallback for MacPorts
+        if version == '':
+            plist = '/System/Library/CoreServices/SystemVersion.plist'
+            if os.path.exists(plist):
+                if hasattr(plistlib, 'readPlist'):
+                    plist_content = plistlib.readPlist(plist)
+                    if 'ProductVersion' in plist_content:
+                        version = plist_content['ProductVersion']
+
+        _cache.append(version.split('.'))
+    return _cache[0]
+
+
+def _macos_arch(machine):
+    return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
+
+
+def get_build_platform():
+    """Return this platform's string for platform-specific distributions
+
+    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
+    needs some hacks for Linux and macOS.
+    """
+    from sysconfig import get_platform
+
+    plat = get_platform()
+    if sys.platform == "darwin" and not plat.startswith('macosx-'):
+        try:
+            version = _macos_vers()
+            machine = os.uname()[4].replace(" ", "_")
+            return "macosx-%d.%d-%s" % (
+                int(version[0]),
+                int(version[1]),
+                _macos_arch(machine),
+            )
+        except ValueError:
+            # if someone is running a non-Mac darwin system, this will fall
+            # through to the default implementation
+            pass
+    return plat
+
+
+macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
+darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
+# XXX backward compat
+get_platform = get_build_platform
+
+
+def compatible_platforms(provided, required):
+    """Can code for the `provided` platform run on the `required` platform?
+
+    Returns true if either platform is ``None``, or the platforms are equal.
+
+    XXX Needs compatibility checks for Linux and other unixy OSes.
+    """
+    if provided is None or required is None or provided == required:
+        # easy case
+        return True
+
+    # macOS special cases
+    reqMac = macosVersionString.match(required)
+    if reqMac:
+        provMac = macosVersionString.match(provided)
+
+        # is this a Mac package?
+        if not provMac:
+            # this is backwards compatibility for packages built before
+            # setuptools 0.6. All packages built after this point will
+            # use the new macOS designation.
+            provDarwin = darwinVersionString.match(provided)
+            if provDarwin:
+                dversion = int(provDarwin.group(1))
+                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
+                if (
+                    dversion == 7
+                    and macosversion >= "10.3"
+                    or dversion == 8
+                    and macosversion >= "10.4"
+                ):
+                    return True
+            # egg isn't macOS or legacy darwin
+            return False
+
+        # are they the same major version and machine type?
+        if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3):
+            return False
+
+        # is the required OS major update >= the provided one?
+        if int(provMac.group(2)) > int(reqMac.group(2)):
+            return False
+
+        return True
+
+    # XXX Linux and other platforms' special cases should go here
+    return False
+
+
+def run_script(dist_spec, script_name):
+    """Locate distribution `dist_spec` and run its `script_name` script"""
+    ns = sys._getframe(1).f_globals
+    name = ns['__name__']
+    ns.clear()
+    ns['__name__'] = name
+    require(dist_spec)[0].run_script(script_name, ns)
+
+
+# backward compatibility
+run_main = run_script
+
+
+def get_distribution(dist):
+    """Return a current distribution object for a Requirement or string"""
+    if isinstance(dist, str):
+        dist = Requirement.parse(dist)
+    if isinstance(dist, Requirement):
+        dist = get_provider(dist)
+    if not isinstance(dist, Distribution):
+        raise TypeError("Expected string, Requirement, or Distribution", dist)
+    return dist
+
+
+def load_entry_point(dist, group, name):
+    """Return `name` entry point of `group` for `dist` or raise ImportError"""
+    return get_distribution(dist).load_entry_point(group, name)
+
+
+def get_entry_map(dist, group=None):
+    """Return the entry point map for `group`, or the full entry map"""
+    return get_distribution(dist).get_entry_map(group)
+
+
+def get_entry_info(dist, group, name):
+    """Return the EntryPoint object for `group`+`name`, or ``None``"""
+    return get_distribution(dist).get_entry_info(group, name)
+
+
+class IMetadataProvider:
+    def has_metadata(name):
+        """Does the package's distribution contain the named metadata?"""
+
+    def get_metadata(name):
+        """The named metadata resource as a string"""
+
+    def get_metadata_lines(name):
+        """Yield named metadata resource as list of non-blank non-comment lines
+
+        Leading and trailing whitespace is stripped from each line, and lines
+        with ``#`` as the first non-blank character are omitted."""
+
+    def metadata_isdir(name):
+        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
+
+    def metadata_listdir(name):
+        """List of metadata names in the directory (like ``os.listdir()``)"""
+
+    def run_script(script_name, namespace):
+        """Execute the named script in the supplied namespace dictionary"""
+
+
+class IResourceProvider(IMetadataProvider):
+    """An object that provides access to package resources"""
+
+    def get_resource_filename(manager, resource_name):
+        """Return a true filesystem path for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_stream(manager, resource_name):
+        """Return a readable file-like object for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_string(manager, resource_name):
+        """Return a string containing the contents of `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def has_resource(resource_name):
+        """Does the package contain the named resource?"""
+
+    def resource_isdir(resource_name):
+        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
+
+    def resource_listdir(resource_name):
+        """List of resource names in the directory (like ``os.listdir()``)"""
+
+
+class WorkingSet:
+    """A collection of active distributions on sys.path (or a similar list)"""
+
+    def __init__(self, entries=None):
+        """Create working set from list of path entries (default=sys.path)"""
+        self.entries = []
+        self.entry_keys = {}
+        self.by_key = {}
+        self.normalized_to_canonical_keys = {}
+        self.callbacks = []
+
+        if entries is None:
+            entries = sys.path
+
+        for entry in entries:
+            self.add_entry(entry)
+
+    @classmethod
+    def _build_master(cls):
+        """
+        Prepare the master working set.
+        """
+        ws = cls()
+        try:
+            from __main__ import __requires__
+        except ImportError:
+            # The main program does not list any requirements
+            return ws
+
+        # ensure the requirements are met
+        try:
+            ws.require(__requires__)
+        except VersionConflict:
+            return cls._build_from_requirements(__requires__)
+
+        return ws
+
+    @classmethod
+    def _build_from_requirements(cls, req_spec):
+        """
+        Build a working set from a requirement spec. Rewrites sys.path.
+        """
+        # try it without defaults already on sys.path
+        # by starting with an empty path
+        ws = cls([])
+        reqs = parse_requirements(req_spec)
+        dists = ws.resolve(reqs, Environment())
+        for dist in dists:
+            ws.add(dist)
+
+        # add any missing entries from sys.path
+        for entry in sys.path:
+            if entry not in ws.entries:
+                ws.add_entry(entry)
+
+        # then copy back to sys.path
+        sys.path[:] = ws.entries
+        return ws
+
+    def add_entry(self, entry):
+        """Add a path item to ``.entries``, finding any distributions on it
+
+        ``find_distributions(entry, True)`` is used to find distributions
+        corresponding to the path entry, and they are added.  `entry` is
+        always appended to ``.entries``, even if it is already present.
+        (This is because ``sys.path`` can contain the same value more than
+        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
+        equal ``sys.path``.)
+        """
+        self.entry_keys.setdefault(entry, [])
+        self.entries.append(entry)
+        for dist in find_distributions(entry, True):
+            self.add(dist, entry, False)
+
+    def __contains__(self, dist):
+        """True if `dist` is the active distribution for its project"""
+        return self.by_key.get(dist.key) == dist
+
+    def find(self, req):
+        """Find a distribution matching requirement `req`
+
+        If there is an active distribution for the requested project, this
+        returns it as long as it meets the version requirement specified by
+        `req`.  But, if there is an active distribution for the project and it
+        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+        If there is no active distribution for the requested project, ``None``
+        is returned.
+        """
+        dist = self.by_key.get(req.key)
+
+        if dist is None:
+            canonical_key = self.normalized_to_canonical_keys.get(req.key)
+
+            if canonical_key is not None:
+                req.key = canonical_key
+                dist = self.by_key.get(canonical_key)
+
+        if dist is not None and dist not in req:
+            # XXX add more info
+            raise VersionConflict(dist, req)
+        return dist
+
+    def iter_entry_points(self, group, name=None):
+        """Yield entry point objects from `group` matching `name`
+
+        If `name` is None, yields all entry points in `group` from all
+        distributions in the working set, otherwise only ones matching
+        both `group` and `name` are yielded (in distribution order).
+        """
+        return (
+            entry
+            for dist in self
+            for entry in dist.get_entry_map(group).values()
+            if name is None or name == entry.name
+        )
+
+    def run_script(self, requires, script_name):
+        """Locate distribution for `requires` and run `script_name` script"""
+        ns = sys._getframe(1).f_globals
+        name = ns['__name__']
+        ns.clear()
+        ns['__name__'] = name
+        self.require(requires)[0].run_script(script_name, ns)
+
+    def __iter__(self):
+        """Yield distributions for non-duplicate projects in the working set
+
+        The yield order is the order in which the items' path entries were
+        added to the working set.
+        """
+        seen = {}
+        for item in self.entries:
+            if item not in self.entry_keys:
+                # workaround a cache issue
+                continue
+
+            for key in self.entry_keys[item]:
+                if key not in seen:
+                    seen[key] = 1
+                    yield self.by_key[key]
+
+    def add(self, dist, entry=None, insert=True, replace=False):
+        """Add `dist` to working set, associated with `entry`
+
+        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
+        On exit from this routine, `entry` is added to the end of the working
+        set's ``.entries`` (if it wasn't already present).
+
+        `dist` is only added to the working set if it's for a project that
+        doesn't already have a distribution in the set, unless `replace=True`.
+        If it's added, any callbacks registered with the ``subscribe()`` method
+        will be called.
+        """
+        if insert:
+            dist.insert_on(self.entries, entry, replace=replace)
+
+        if entry is None:
+            entry = dist.location
+        keys = self.entry_keys.setdefault(entry, [])
+        keys2 = self.entry_keys.setdefault(dist.location, [])
+        if not replace and dist.key in self.by_key:
+            # ignore hidden distros
+            return
+
+        self.by_key[dist.key] = dist
+        normalized_name = packaging.utils.canonicalize_name(dist.key)
+        self.normalized_to_canonical_keys[normalized_name] = dist.key
+        if dist.key not in keys:
+            keys.append(dist.key)
+        if dist.key not in keys2:
+            keys2.append(dist.key)
+        self._added_new(dist)
+
+    def resolve(
+        self,
+        requirements,
+        env=None,
+        installer=None,
+        replace_conflicting=False,
+        extras=None,
+    ):
+        """List all distributions needed to (recursively) meet `requirements`
+
+        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
+        if supplied, should be an ``Environment`` instance.  If
+        not supplied, it defaults to all distributions available within any
+        entry or distribution in the working set.  `installer`, if supplied,
+        will be invoked with each requirement that cannot be met by an
+        already-installed distribution; it should return a ``Distribution`` or
+        ``None``.
+
+        Unless `replace_conflicting=True`, raises a VersionConflict exception
+        if
+        any requirements are found on the path that have the correct name but
+        the wrong version.  Otherwise, if an `installer` is supplied it will be
+        invoked to obtain the correct version of the requirement and activate
+        it.
+
+        `extras` is a list of the extras to be used with these requirements.
+        This is important because extra requirements may look like `my_req;
+        extra = "my_extra"`, which would otherwise be interpreted as a purely
+        optional requirement.  Instead, we want to be able to assert that these
+        requirements are truly required.
+        """
+
+        # set up the stack
+        requirements = list(requirements)[::-1]
+        # set of processed requirements
+        processed = {}
+        # key -> dist
+        best = {}
+        to_activate = []
+
+        req_extras = _ReqExtras()
+
+        # Mapping of requirement to set of distributions that required it;
+        # useful for reporting info about conflicts.
+        required_by = collections.defaultdict(set)
+
+        while requirements:
+            # process dependencies breadth-first
+            req = requirements.pop(0)
+            if req in processed:
+                # Ignore cyclic or redundant dependencies
+                continue
+
+            if not req_extras.markers_pass(req, extras):
+                continue
+
+            dist = self._resolve_dist(
+                req, best, replace_conflicting, env, installer, required_by, to_activate
+            )
+
+            # push the new requirements onto the stack
+            new_requirements = dist.requires(req.extras)[::-1]
+            requirements.extend(new_requirements)
+
+            # Register the new requirements needed by req
+            for new_requirement in new_requirements:
+                required_by[new_requirement].add(req.project_name)
+                req_extras[new_requirement] = req.extras
+
+            processed[req] = True
+
+        # return list of distros to activate
+        return to_activate
+
+    def _resolve_dist(
+        self, req, best, replace_conflicting, env, installer, required_by, to_activate
+    ):
+        dist = best.get(req.key)
+        if dist is None:
+            # Find the best distribution and add it to the map
+            dist = self.by_key.get(req.key)
+            if dist is None or (dist not in req and replace_conflicting):
+                ws = self
+                if env is None:
+                    if dist is None:
+                        env = Environment(self.entries)
+                    else:
+                        # Use an empty environment and workingset to avoid
+                        # any further conflicts with the conflicting
+                        # distribution
+                        env = Environment([])
+                        ws = WorkingSet([])
+                dist = best[req.key] = env.best_match(
+                    req, ws, installer, replace_conflicting=replace_conflicting
+                )
+                if dist is None:
+                    requirers = required_by.get(req, None)
+                    raise DistributionNotFound(req, requirers)
+            to_activate.append(dist)
+        if dist not in req:
+            # Oops, the "best" so far conflicts with a dependency
+            dependent_req = required_by[req]
+            raise VersionConflict(dist, req).with_context(dependent_req)
+        return dist
+
+    def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True):
+        """Find all activatable distributions in `plugin_env`
+
+        Example usage::
+
+            distributions, errors = working_set.find_plugins(
+                Environment(plugin_dirlist)
+            )
+            # add plugins+libs to sys.path
+            map(working_set.add, distributions)
+            # display errors
+            print('Could not load', errors)
+
+        The `plugin_env` should be an ``Environment`` instance that contains
+        only distributions that are in the project's "plugin directory" or
+        directories. The `full_env`, if supplied, should be an ``Environment``
+        contains all currently-available distributions.  If `full_env` is not
+        supplied, one is created automatically from the ``WorkingSet`` this
+        method is called on, which will typically mean that every directory on
+        ``sys.path`` will be scanned for distributions.
+
+        `installer` is a standard installer callback as used by the
+        ``resolve()`` method. The `fallback` flag indicates whether we should
+        attempt to resolve older versions of a plugin if the newest version
+        cannot be resolved.
+
+        This method returns a 2-tuple: (`distributions`, `error_info`), where
+        `distributions` is a list of the distributions found in `plugin_env`
+        that were loadable, along with any other distributions that are needed
+        to resolve their dependencies.  `error_info` is a dictionary mapping
+        unloadable plugin distributions to an exception instance describing the
+        error that occurred. Usually this will be a ``DistributionNotFound`` or
+        ``VersionConflict`` instance.
+        """
+
+        plugin_projects = list(plugin_env)
+        # scan project names in alphabetic order
+        plugin_projects.sort()
+
+        error_info = {}
+        distributions = {}
+
+        if full_env is None:
+            env = Environment(self.entries)
+            env += plugin_env
+        else:
+            env = full_env + plugin_env
+
+        shadow_set = self.__class__([])
+        # put all our entries in shadow_set
+        list(map(shadow_set.add, self))
+
+        for project_name in plugin_projects:
+            for dist in plugin_env[project_name]:
+                req = [dist.as_requirement()]
+
+                try:
+                    resolvees = shadow_set.resolve(req, env, installer)
+
+                except ResolutionError as v:
+                    # save error info
+                    error_info[dist] = v
+                    if fallback:
+                        # try the next older version of project
+                        continue
+                    else:
+                        # give up on this project, keep going
+                        break
+
+                else:
+                    list(map(shadow_set.add, resolvees))
+                    distributions.update(dict.fromkeys(resolvees))
+
+                    # success, no need to try any more versions of this project
+                    break
+
+        distributions = list(distributions)
+        distributions.sort()
+
+        return distributions, error_info
+
+    def require(self, *requirements):
+        """Ensure that distributions matching `requirements` are activated
+
+        `requirements` must be a string or a (possibly-nested) sequence
+        thereof, specifying the distributions and versions required.  The
+        return value is a sequence of the distributions that needed to be
+        activated to fulfill the requirements; all relevant distributions are
+        included, even if they were already activated in this working set.
+        """
+        needed = self.resolve(parse_requirements(requirements))
+
+        for dist in needed:
+            self.add(dist)
+
+        return needed
+
+    def subscribe(self, callback, existing=True):
+        """Invoke `callback` for all distributions
+
+        If `existing=True` (default),
+        call on all existing ones, as well.
+        """
+        if callback in self.callbacks:
+            return
+        self.callbacks.append(callback)
+        if not existing:
+            return
+        for dist in self:
+            callback(dist)
+
+    def _added_new(self, dist):
+        for callback in self.callbacks:
+            callback(dist)
+
+    def __getstate__(self):
+        return (
+            self.entries[:],
+            self.entry_keys.copy(),
+            self.by_key.copy(),
+            self.normalized_to_canonical_keys.copy(),
+            self.callbacks[:],
+        )
+
+    def __setstate__(self, e_k_b_n_c):
+        entries, keys, by_key, normalized_to_canonical_keys, callbacks = e_k_b_n_c
+        self.entries = entries[:]
+        self.entry_keys = keys.copy()
+        self.by_key = by_key.copy()
+        self.normalized_to_canonical_keys = normalized_to_canonical_keys.copy()
+        self.callbacks = callbacks[:]
+
+
+class _ReqExtras(dict):
+    """
+    Map each requirement to the extras that demanded it.
+    """
+
+    def markers_pass(self, req, extras=None):
+        """
+        Evaluate markers for req against each extra that
+        demanded it.
+
+        Return False if the req has a marker and fails
+        evaluation. Otherwise, return True.
+        """
+        extra_evals = (
+            req.marker.evaluate({'extra': extra})
+            for extra in self.get(req, ()) + (extras or (None,))
+        )
+        return not req.marker or any(extra_evals)
+
+
+class Environment:
+    """Searchable snapshot of distributions on a search path"""
+
+    def __init__(
+        self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR
+    ):
+        """Snapshot distributions available on a search path
+
+        Any distributions found on `search_path` are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.
+
+        `platform` is an optional string specifying the name of the platform
+        that platform-specific distributions must be compatible with.  If
+        unspecified, it defaults to the current platform.  `python` is an
+        optional string naming the desired version of Python (e.g. ``'3.6'``);
+        it defaults to the current version.
+
+        You may explicitly set `platform` (and/or `python`) to ``None`` if you
+        wish to map *all* distributions, not just those compatible with the
+        running platform or Python version.
+        """
+        self._distmap = {}
+        self.platform = platform
+        self.python = python
+        self.scan(search_path)
+
+    def can_add(self, dist):
+        """Is distribution `dist` acceptable for this environment?
+
+        The distribution must match the platform and python version
+        requirements specified when this environment was created, or False
+        is returned.
+        """
+        py_compat = (
+            self.python is None
+            or dist.py_version is None
+            or dist.py_version == self.python
+        )
+        return py_compat and compatible_platforms(dist.platform, self.platform)
+
+    def remove(self, dist):
+        """Remove `dist` from the environment"""
+        self._distmap[dist.key].remove(dist)
+
+    def scan(self, search_path=None):
+        """Scan `search_path` for distributions usable in this environment
+
+        Any distributions found are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.  Only distributions conforming to
+        the platform/python version defined at initialization are added.
+        """
+        if search_path is None:
+            search_path = sys.path
+
+        for item in search_path:
+            for dist in find_distributions(item):
+                self.add(dist)
+
+    def __getitem__(self, project_name):
+        """Return a newest-to-oldest list of distributions for `project_name`
+
+        Uses case-insensitive `project_name` comparison, assuming all the
+        project's distributions use their project's name converted to all
+        lowercase as their key.
+
+        """
+        distribution_key = project_name.lower()
+        return self._distmap.get(distribution_key, [])
+
+    def add(self, dist):
+        """Add `dist` if we ``can_add()`` it and it has not already been added"""
+        if self.can_add(dist) and dist.has_version():
+            dists = self._distmap.setdefault(dist.key, [])
+            if dist not in dists:
+                dists.append(dist)
+                dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
+
+    def best_match(self, req, working_set, installer=None, replace_conflicting=False):
+        """Find distribution best matching `req` and usable on `working_set`
+
+        This calls the ``find(req)`` method of the `working_set` to see if a
+        suitable distribution is already active.  (This may raise
+        ``VersionConflict`` if an unsuitable version of the project is already
+        active in the specified `working_set`.)  If a suitable distribution
+        isn't active, this method returns the newest distribution in the
+        environment that meets the ``Requirement`` in `req`.  If no suitable
+        distribution is found, and `installer` is supplied, then the result of
+        calling the environment's ``obtain(req, installer)`` method will be
+        returned.
+        """
+        try:
+            dist = working_set.find(req)
+        except VersionConflict:
+            if not replace_conflicting:
+                raise
+            dist = None
+        if dist is not None:
+            return dist
+        for dist in self[req.key]:
+            if dist in req:
+                return dist
+        # try to download/install
+        return self.obtain(req, installer)
+
+    def obtain(self, requirement, installer=None):
+        """Obtain a distribution matching `requirement` (e.g. via download)
+
+        Obtain a distro that matches requirement (e.g. via download).  In the
+        base ``Environment`` class, this routine just returns
+        ``installer(requirement)``, unless `installer` is None, in which case
+        None is returned instead.  This method is a hook that allows subclasses
+        to attempt other ways of obtaining a distribution before falling back
+        to the `installer` argument."""
+        if installer is not None:
+            return installer(requirement)
+
+    def __iter__(self):
+        """Yield the unique project names of the available distributions"""
+        for key in self._distmap.keys():
+            if self[key]:
+                yield key
+
+    def __iadd__(self, other):
+        """In-place addition of a distribution or environment"""
+        if isinstance(other, Distribution):
+            self.add(other)
+        elif isinstance(other, Environment):
+            for project in other:
+                for dist in other[project]:
+                    self.add(dist)
+        else:
+            raise TypeError("Can't add %r to environment" % (other,))
+        return self
+
+    def __add__(self, other):
+        """Add an environment or distribution to an environment"""
+        new = self.__class__([], platform=None, python=None)
+        for env in self, other:
+            new += env
+        return new
+
+
+# XXX backward compatibility
+AvailableDistributions = Environment
+
+
+class ExtractionError(RuntimeError):
+    """An error occurred extracting a resource
+
+    The following attributes are available from instances of this exception:
+
+    manager
+        The resource manager that raised this exception
+
+    cache_path
+        The base directory for resource extraction
+
+    original_error
+        The exception instance that caused extraction to fail
+    """
+
+
+class ResourceManager:
+    """Manage resource extraction and packages"""
+
+    extraction_path = None
+
+    def __init__(self):
+        self.cached_files = {}
+
+    def resource_exists(self, package_or_requirement, resource_name):
+        """Does the named resource exist?"""
+        return get_provider(package_or_requirement).has_resource(resource_name)
+
+    def resource_isdir(self, package_or_requirement, resource_name):
+        """Is the named resource an existing directory?"""
+        return get_provider(package_or_requirement).resource_isdir(resource_name)
+
+    def resource_filename(self, package_or_requirement, resource_name):
+        """Return a true filesystem path for specified resource"""
+        return get_provider(package_or_requirement).get_resource_filename(
+            self, resource_name
+        )
+
+    def resource_stream(self, package_or_requirement, resource_name):
+        """Return a readable file-like object for specified resource"""
+        return get_provider(package_or_requirement).get_resource_stream(
+            self, resource_name
+        )
+
+    def resource_string(self, package_or_requirement, resource_name):
+        """Return specified resource as a string"""
+        return get_provider(package_or_requirement).get_resource_string(
+            self, resource_name
+        )
+
+    def resource_listdir(self, package_or_requirement, resource_name):
+        """List the contents of the named resource directory"""
+        return get_provider(package_or_requirement).resource_listdir(resource_name)
+
+    def extraction_error(self):
+        """Give an error message for problems extracting file(s)"""
+
+        old_exc = sys.exc_info()[1]
+        cache_path = self.extraction_path or get_default_cache()
+
+        tmpl = textwrap.dedent(
+            """
+            Can't extract file(s) to egg cache
+
+            The following error occurred while trying to extract file(s)
+            to the Python egg cache:
+
+              {old_exc}
+
+            The Python egg cache directory is currently set to:
+
+              {cache_path}
+
+            Perhaps your account does not have write access to this directory?
+            You can change the cache directory by setting the PYTHON_EGG_CACHE
+            environment variable to point to an accessible directory.
+            """
+        ).lstrip()
+        err = ExtractionError(tmpl.format(**locals()))
+        err.manager = self
+        err.cache_path = cache_path
+        err.original_error = old_exc
+        raise err
+
+    def get_cache_path(self, archive_name, names=()):
+        """Return absolute location in cache for `archive_name` and `names`
+
+        The parent directory of the resulting path will be created if it does
+        not already exist.  `archive_name` should be the base filename of the
+        enclosing egg (which may not be the name of the enclosing zipfile!),
+        including its ".egg" extension.  `names`, if provided, should be a
+        sequence of path name parts "under" the egg's extraction location.
+
+        This method should only be called by resource providers that need to
+        obtain an extraction location, and only for names they intend to
+        extract, as it tracks the generated names for possible cleanup later.
+        """
+        extract_path = self.extraction_path or get_default_cache()
+        target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
+        try:
+            _bypass_ensure_directory(target_path)
+        except Exception:
+            self.extraction_error()
+
+        self._warn_unsafe_extraction_path(extract_path)
+
+        self.cached_files[target_path] = 1
+        return target_path
+
+    @staticmethod
+    def _warn_unsafe_extraction_path(path):
+        """
+        If the default extraction path is overridden and set to an insecure
+        location, such as /tmp, it opens up an opportunity for an attacker to
+        replace an extracted file with an unauthorized payload. Warn the user
+        if a known insecure location is used.
+
+        See Distribute #375 for more details.
+        """
+        if os.name == 'nt' and not path.startswith(os.environ['windir']):
+            # On Windows, permissions are generally restrictive by default
+            #  and temp directories are not writable by other users, so
+            #  bypass the warning.
+            return
+        mode = os.stat(path).st_mode
+        if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
+            msg = (
+                "Extraction path is writable by group/others "
+                "and vulnerable to attack when "
+                "used with get_resource_filename ({path}). "
+                "Consider a more secure "
+                "location (set with .set_extraction_path or the "
+                "PYTHON_EGG_CACHE environment variable)."
+            ).format(**locals())
+            warnings.warn(msg, UserWarning)
+
+    def postprocess(self, tempname, filename):
+        """Perform any platform-specific postprocessing of `tempname`
+
+        This is where Mac header rewrites should be done; other platforms don't
+        have anything special they should do.
+
+        Resource providers should call this method ONLY after successfully
+        extracting a compressed resource.  They must NOT call it on resources
+        that are already in the filesystem.
+
+        `tempname` is the current (temporary) name of the file, and `filename`
+        is the name it will be renamed to by the caller after this routine
+        returns.
+        """
+
+        if os.name == 'posix':
+            # Make the resource executable
+            mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
+            os.chmod(tempname, mode)
+
+    def set_extraction_path(self, path):
+        """Set the base path where resources will be extracted to, if needed.
+
+        If you do not call this routine before any extractions take place, the
+        path defaults to the return value of ``get_default_cache()``.  (Which
+        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
+        platform-specific fallbacks.  See that routine's documentation for more
+        details.)
+
+        Resources are extracted to subdirectories of this path based upon
+        information given by the ``IResourceProvider``.  You may set this to a
+        temporary directory, but then you must call ``cleanup_resources()`` to
+        delete the extracted files when done.  There is no guarantee that
+        ``cleanup_resources()`` will be able to remove all extracted files.
+
+        (Note: you may not change the extraction path for a given resource
+        manager once resources have been extracted, unless you first call
+        ``cleanup_resources()``.)
+        """
+        if self.cached_files:
+            raise ValueError("Can't change extraction path, files already extracted")
+
+        self.extraction_path = path
+
+    def cleanup_resources(self, force=False):
+        """
+        Delete all extracted resource files and directories, returning a list
+        of the file and directory names that could not be successfully removed.
+        This function does not have any concurrency protection, so it should
+        generally only be called when the extraction path is a temporary
+        directory exclusive to a single process.  This method is not
+        automatically called; you must call it explicitly or register it as an
+        ``atexit`` function if you wish to ensure cleanup of a temporary
+        directory used for extractions.
+        """
+        # XXX
+
+
+def get_default_cache():
+    """
+    Return the ``PYTHON_EGG_CACHE`` environment variable
+    or a platform-relevant user cache dir for an app
+    named "Python-Eggs".
+    """
+    return os.environ.get('PYTHON_EGG_CACHE') or platformdirs.user_cache_dir(
+        appname='Python-Eggs'
+    )
+
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+    """
+    Convert an arbitrary string to a standard version string
+    """
+    try:
+        # normalize the version
+        return str(packaging.version.Version(version))
+    except packaging.version.InvalidVersion:
+        version = version.replace(' ', '.')
+        return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def _forgiving_version(version):
+    """Fallback when ``safe_version`` is not safe enough
+    >>> parse_version(_forgiving_version('0.23ubuntu1'))
+    
+    >>> parse_version(_forgiving_version('0.23-'))
+    
+    >>> parse_version(_forgiving_version('0.-_'))
+    
+    >>> parse_version(_forgiving_version('42.+?1'))
+    
+    >>> parse_version(_forgiving_version('hello world'))
+    
+    """
+    version = version.replace(' ', '.')
+    match = _PEP440_FALLBACK.search(version)
+    if match:
+        safe = match["safe"]
+        rest = version[len(safe):]
+    else:
+        safe = "0"
+        rest = version
+    local = f"sanitized.{_safe_segment(rest)}".strip(".")
+    return f"{safe}.dev0+{local}"
+
+
+def _safe_segment(segment):
+    """Convert an arbitrary string into a safe segment"""
+    segment = re.sub('[^A-Za-z0-9.]+', '-', segment)
+    segment = re.sub('-[^A-Za-z0-9]+', '-', segment)
+    return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-")
+
+
+def safe_extra(extra):
+    """Convert an arbitrary string to a standard 'extra' name
+
+    Any runs of non-alphanumeric characters are replaced with a single '_',
+    and the result is always lowercased.
+    """
+    return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
+
+
+def to_filename(name):
+    """Convert a project or version name to its filename-escaped form
+
+    Any '-' characters are currently replaced with '_'.
+    """
+    return name.replace('-', '_')
+
+
+def invalid_marker(text):
+    """
+    Validate text as a PEP 508 environment marker; return an exception
+    if invalid or False otherwise.
+    """
+    try:
+        evaluate_marker(text)
+    except SyntaxError as e:
+        e.filename = None
+        e.lineno = None
+        return e
+    return False
+
+
+def evaluate_marker(text, extra=None):
+    """
+    Evaluate a PEP 508 environment marker.
+    Return a boolean indicating the marker result in this environment.
+    Raise SyntaxError if marker is invalid.
+
+    This implementation uses the 'pyparsing' module.
+    """
+    try:
+        marker = packaging.markers.Marker(text)
+        return marker.evaluate()
+    except packaging.markers.InvalidMarker as e:
+        raise SyntaxError(e) from e
+
+
+class NullProvider:
+    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
+
+    egg_name = None
+    egg_info = None
+    loader = None
+
+    def __init__(self, module):
+        self.loader = getattr(module, '__loader__', None)
+        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
+
+    def get_resource_filename(self, manager, resource_name):
+        return self._fn(self.module_path, resource_name)
+
+    def get_resource_stream(self, manager, resource_name):
+        return io.BytesIO(self.get_resource_string(manager, resource_name))
+
+    def get_resource_string(self, manager, resource_name):
+        return self._get(self._fn(self.module_path, resource_name))
+
+    def has_resource(self, resource_name):
+        return self._has(self._fn(self.module_path, resource_name))
+
+    def _get_metadata_path(self, name):
+        return self._fn(self.egg_info, name)
+
+    def has_metadata(self, name):
+        if not self.egg_info:
+            return self.egg_info
+
+        path = self._get_metadata_path(name)
+        return self._has(path)
+
+    def get_metadata(self, name):
+        if not self.egg_info:
+            return ""
+        path = self._get_metadata_path(name)
+        value = self._get(path)
+        try:
+            return value.decode('utf-8')
+        except UnicodeDecodeError as exc:
+            # Include the path in the error message to simplify
+            # troubleshooting, and without changing the exception type.
+            exc.reason += ' in {} file at path: {}'.format(name, path)
+            raise
+
+    def get_metadata_lines(self, name):
+        return yield_lines(self.get_metadata(name))
+
+    def resource_isdir(self, resource_name):
+        return self._isdir(self._fn(self.module_path, resource_name))
+
+    def metadata_isdir(self, name):
+        return self.egg_info and self._isdir(self._fn(self.egg_info, name))
+
+    def resource_listdir(self, resource_name):
+        return self._listdir(self._fn(self.module_path, resource_name))
+
+    def metadata_listdir(self, name):
+        if self.egg_info:
+            return self._listdir(self._fn(self.egg_info, name))
+        return []
+
+    def run_script(self, script_name, namespace):
+        script = 'scripts/' + script_name
+        if not self.has_metadata(script):
+            raise ResolutionError(
+                "Script {script!r} not found in metadata at {self.egg_info!r}".format(
+                    **locals()
+                ),
+            )
+        script_text = self.get_metadata(script).replace('\r\n', '\n')
+        script_text = script_text.replace('\r', '\n')
+        script_filename = self._fn(self.egg_info, script)
+        namespace['__file__'] = script_filename
+        if os.path.exists(script_filename):
+            with open(script_filename) as fid:
+                source = fid.read()
+            code = compile(source, script_filename, 'exec')
+            exec(code, namespace, namespace)
+        else:
+            from linecache import cache
+
+            cache[script_filename] = (
+                len(script_text),
+                0,
+                script_text.split('\n'),
+                script_filename,
+            )
+            script_code = compile(script_text, script_filename, 'exec')
+            exec(script_code, namespace, namespace)
+
+    def _has(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _isdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _listdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _fn(self, base, resource_name):
+        self._validate_resource_path(resource_name)
+        if resource_name:
+            return os.path.join(base, *resource_name.split('/'))
+        return base
+
+    @staticmethod
+    def _validate_resource_path(path):
+        """
+        Validate the resource paths according to the docs.
+        https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access
+
+        >>> warned = getfixture('recwarn')
+        >>> warnings.simplefilter('always')
+        >>> vrp = NullProvider._validate_resource_path
+        >>> vrp('foo/bar.txt')
+        >>> bool(warned)
+        False
+        >>> vrp('../foo/bar.txt')
+        >>> bool(warned)
+        True
+        >>> warned.clear()
+        >>> vrp('/foo/bar.txt')
+        >>> bool(warned)
+        True
+        >>> vrp('foo/../../bar.txt')
+        >>> bool(warned)
+        True
+        >>> warned.clear()
+        >>> vrp('foo/f../bar.txt')
+        >>> bool(warned)
+        False
+
+        Windows path separators are straight-up disallowed.
+        >>> vrp(r'\\foo/bar.txt')
+        Traceback (most recent call last):
+        ...
+        ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+        >>> vrp(r'C:\\foo/bar.txt')
+        Traceback (most recent call last):
+        ...
+        ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+        Blank values are allowed
+
+        >>> vrp('')
+        >>> bool(warned)
+        False
+
+        Non-string values are not.
+
+        >>> vrp(None)
+        Traceback (most recent call last):
+        ...
+        AttributeError: ...
+        """
+        invalid = (
+            os.path.pardir in path.split(posixpath.sep)
+            or posixpath.isabs(path)
+            or ntpath.isabs(path)
+        )
+        if not invalid:
+            return
+
+        msg = "Use of .. or absolute path in a resource path is not allowed."
+
+        # Aggressively disallow Windows absolute paths
+        if ntpath.isabs(path) and not posixpath.isabs(path):
+            raise ValueError(msg)
+
+        # for compatibility, warn; in future
+        # raise ValueError(msg)
+        issue_warning(
+            msg[:-1] + " and will raise exceptions in a future release.",
+            DeprecationWarning,
+        )
+
+    def _get(self, path):
+        if hasattr(self.loader, 'get_data'):
+            return self.loader.get_data(path)
+        raise NotImplementedError(
+            "Can't perform this operation for loaders without 'get_data()'"
+        )
+
+
+register_loader_type(object, NullProvider)
+
+
+def _parents(path):
+    """
+    yield all parents of path including path
+    """
+    last = None
+    while path != last:
+        yield path
+        last = path
+        path, _ = os.path.split(path)
+
+
+class EggProvider(NullProvider):
+    """Provider based on a virtual filesystem"""
+
+    def __init__(self, module):
+        super().__init__(module)
+        self._setup_prefix()
+
+    def _setup_prefix(self):
+        # Assume that metadata may be nested inside a "basket"
+        # of multiple eggs and use module_path instead of .archive.
+        eggs = filter(_is_egg_path, _parents(self.module_path))
+        egg = next(eggs, None)
+        egg and self._set_egg(egg)
+
+    def _set_egg(self, path):
+        self.egg_name = os.path.basename(path)
+        self.egg_info = os.path.join(path, 'EGG-INFO')
+        self.egg_root = path
+
+
+class DefaultProvider(EggProvider):
+    """Provides access to package resources in the filesystem"""
+
+    def _has(self, path):
+        return os.path.exists(path)
+
+    def _isdir(self, path):
+        return os.path.isdir(path)
+
+    def _listdir(self, path):
+        return os.listdir(path)
+
+    def get_resource_stream(self, manager, resource_name):
+        return open(self._fn(self.module_path, resource_name), 'rb')
+
+    def _get(self, path):
+        with open(path, 'rb') as stream:
+            return stream.read()
+
+    @classmethod
+    def _register(cls):
+        loader_names = (
+            'SourceFileLoader',
+            'SourcelessFileLoader',
+        )
+        for name in loader_names:
+            loader_cls = getattr(importlib_machinery, name, type(None))
+            register_loader_type(loader_cls, cls)
+
+
+DefaultProvider._register()
+
+
+class EmptyProvider(NullProvider):
+    """Provider that returns nothing for all requests"""
+
+    module_path = None
+
+    _isdir = _has = lambda self, path: False
+
+    def _get(self, path):
+        return ''
+
+    def _listdir(self, path):
+        return []
+
+    def __init__(self):
+        pass
+
+
+empty_provider = EmptyProvider()
+
+
+class ZipManifests(dict):
+    """
+    zip manifest builder
+    """
+
+    @classmethod
+    def build(cls, path):
+        """
+        Build a dictionary similar to the zipimport directory
+        caches, except instead of tuples, store ZipInfo objects.
+
+        Use a platform-specific path separator (os.sep) for the path keys
+        for compatibility with pypy on Windows.
+        """
+        with zipfile.ZipFile(path) as zfile:
+            items = (
+                (
+                    name.replace('/', os.sep),
+                    zfile.getinfo(name),
+                )
+                for name in zfile.namelist()
+            )
+            return dict(items)
+
+    load = build
+
+
+class MemoizedZipManifests(ZipManifests):
+    """
+    Memoized zipfile manifests.
+    """
+
+    manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
+
+    def load(self, path):
+        """
+        Load a manifest at path or return a suitable manifest already loaded.
+        """
+        path = os.path.normpath(path)
+        mtime = os.stat(path).st_mtime
+
+        if path not in self or self[path].mtime != mtime:
+            manifest = self.build(path)
+            self[path] = self.manifest_mod(manifest, mtime)
+
+        return self[path].manifest
+
+
+class ZipProvider(EggProvider):
+    """Resource support for zips and eggs"""
+
+    eagers = None
+    _zip_manifests = MemoizedZipManifests()
+
+    def __init__(self, module):
+        super().__init__(module)
+        self.zip_pre = self.loader.archive + os.sep
+
+    def _zipinfo_name(self, fspath):
+        # Convert a virtual filename (full path to file) into a zipfile subpath
+        # usable with the zipimport directory cache for our target archive
+        fspath = fspath.rstrip(os.sep)
+        if fspath == self.loader.archive:
+            return ''
+        if fspath.startswith(self.zip_pre):
+            return fspath[len(self.zip_pre) :]
+        raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre))
+
+    def _parts(self, zip_path):
+        # Convert a zipfile subpath into an egg-relative path part list.
+        # pseudo-fs path
+        fspath = self.zip_pre + zip_path
+        if fspath.startswith(self.egg_root + os.sep):
+            return fspath[len(self.egg_root) + 1 :].split(os.sep)
+        raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root))
+
+    @property
+    def zipinfo(self):
+        return self._zip_manifests.load(self.loader.archive)
+
+    def get_resource_filename(self, manager, resource_name):
+        if not self.egg_name:
+            raise NotImplementedError(
+                "resource_filename() only supported for .egg, not .zip"
+            )
+        # no need to lock for extraction, since we use temp names
+        zip_path = self._resource_to_zip(resource_name)
+        eagers = self._get_eager_resources()
+        if '/'.join(self._parts(zip_path)) in eagers:
+            for name in eagers:
+                self._extract_resource(manager, self._eager_to_zip(name))
+        return self._extract_resource(manager, zip_path)
+
+    @staticmethod
+    def _get_date_and_size(zip_stat):
+        size = zip_stat.file_size
+        # ymdhms+wday, yday, dst
+        date_time = zip_stat.date_time + (0, 0, -1)
+        # 1980 offset already done
+        timestamp = time.mktime(date_time)
+        return timestamp, size
+
+    # FIXME: 'ZipProvider._extract_resource' is too complex (12)
+    def _extract_resource(self, manager, zip_path):  # noqa: C901
+        if zip_path in self._index():
+            for name in self._index()[zip_path]:
+                last = self._extract_resource(manager, os.path.join(zip_path, name))
+            # return the extracted directory name
+            return os.path.dirname(last)
+
+        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+
+        if not WRITE_SUPPORT:
+            raise IOError(
+                '"os.rename" and "os.unlink" are not supported ' 'on this platform'
+            )
+        try:
+            real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path))
+
+            if self._is_current(real_path, zip_path):
+                return real_path
+
+            outf, tmpnam = _mkstemp(
+                ".$extract",
+                dir=os.path.dirname(real_path),
+            )
+            os.write(outf, self.loader.get_data(zip_path))
+            os.close(outf)
+            utime(tmpnam, (timestamp, timestamp))
+            manager.postprocess(tmpnam, real_path)
+
+            try:
+                rename(tmpnam, real_path)
+
+            except os.error:
+                if os.path.isfile(real_path):
+                    if self._is_current(real_path, zip_path):
+                        # the file became current since it was checked above,
+                        #  so proceed.
+                        return real_path
+                    # Windows, del old file and retry
+                    elif os.name == 'nt':
+                        unlink(real_path)
+                        rename(tmpnam, real_path)
+                        return real_path
+                raise
+
+        except os.error:
+            # report a user-friendly error
+            manager.extraction_error()
+
+        return real_path
+
+    def _is_current(self, file_path, zip_path):
+        """
+        Return True if the file_path is current for this zip_path
+        """
+        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+        if not os.path.isfile(file_path):
+            return False
+        stat = os.stat(file_path)
+        if stat.st_size != size or stat.st_mtime != timestamp:
+            return False
+        # check that the contents match
+        zip_contents = self.loader.get_data(zip_path)
+        with open(file_path, 'rb') as f:
+            file_contents = f.read()
+        return zip_contents == file_contents
+
+    def _get_eager_resources(self):
+        if self.eagers is None:
+            eagers = []
+            for name in ('native_libs.txt', 'eager_resources.txt'):
+                if self.has_metadata(name):
+                    eagers.extend(self.get_metadata_lines(name))
+            self.eagers = eagers
+        return self.eagers
+
+    def _index(self):
+        try:
+            return self._dirindex
+        except AttributeError:
+            ind = {}
+            for path in self.zipinfo:
+                parts = path.split(os.sep)
+                while parts:
+                    parent = os.sep.join(parts[:-1])
+                    if parent in ind:
+                        ind[parent].append(parts[-1])
+                        break
+                    else:
+                        ind[parent] = [parts.pop()]
+            self._dirindex = ind
+            return ind
+
+    def _has(self, fspath):
+        zip_path = self._zipinfo_name(fspath)
+        return zip_path in self.zipinfo or zip_path in self._index()
+
+    def _isdir(self, fspath):
+        return self._zipinfo_name(fspath) in self._index()
+
+    def _listdir(self, fspath):
+        return list(self._index().get(self._zipinfo_name(fspath), ()))
+
+    def _eager_to_zip(self, resource_name):
+        return self._zipinfo_name(self._fn(self.egg_root, resource_name))
+
+    def _resource_to_zip(self, resource_name):
+        return self._zipinfo_name(self._fn(self.module_path, resource_name))
+
+
+register_loader_type(zipimport.zipimporter, ZipProvider)
+
+
+class FileMetadata(EmptyProvider):
+    """Metadata handler for standalone PKG-INFO files
+
+    Usage::
+
+        metadata = FileMetadata("/path/to/PKG-INFO")
+
+    This provider rejects all data and metadata requests except for PKG-INFO,
+    which is treated as existing, and will be the contents of the file at
+    the provided location.
+    """
+
+    def __init__(self, path):
+        self.path = path
+
+    def _get_metadata_path(self, name):
+        return self.path
+
+    def has_metadata(self, name):
+        return name == 'PKG-INFO' and os.path.isfile(self.path)
+
+    def get_metadata(self, name):
+        if name != 'PKG-INFO':
+            raise KeyError("No metadata except PKG-INFO is available")
+
+        with io.open(self.path, encoding='utf-8', errors="replace") as f:
+            metadata = f.read()
+        self._warn_on_replacement(metadata)
+        return metadata
+
+    def _warn_on_replacement(self, metadata):
+        replacement_char = '�'
+        if replacement_char in metadata:
+            tmpl = "{self.path} could not be properly decoded in UTF-8"
+            msg = tmpl.format(**locals())
+            warnings.warn(msg)
+
+    def get_metadata_lines(self, name):
+        return yield_lines(self.get_metadata(name))
+
+
+class PathMetadata(DefaultProvider):
+    """Metadata provider for egg directories
+
+    Usage::
+
+        # Development eggs:
+
+        egg_info = "/path/to/PackageName.egg-info"
+        base_dir = os.path.dirname(egg_info)
+        metadata = PathMetadata(base_dir, egg_info)
+        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
+
+        # Unpacked egg directories:
+
+        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
+        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
+        dist = Distribution.from_filename(egg_path, metadata=metadata)
+    """
+
+    def __init__(self, path, egg_info):
+        self.module_path = path
+        self.egg_info = egg_info
+
+
+class EggMetadata(ZipProvider):
+    """Metadata provider for .egg files"""
+
+    def __init__(self, importer):
+        """Create a metadata provider from a zipimporter"""
+
+        self.zip_pre = importer.archive + os.sep
+        self.loader = importer
+        if importer.prefix:
+            self.module_path = os.path.join(importer.archive, importer.prefix)
+        else:
+            self.module_path = importer.archive
+        self._setup_prefix()
+
+
+_declare_state('dict', _distribution_finders={})
+
+
+def register_finder(importer_type, distribution_finder):
+    """Register `distribution_finder` to find distributions in sys.path items
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `distribution_finder` is a callable that, passed a path
+    item and the importer instance, yields ``Distribution`` instances found on
+    that path item.  See ``pkg_resources.find_on_path`` for an example."""
+    _distribution_finders[importer_type] = distribution_finder
+
+
+def find_distributions(path_item, only=False):
+    """Yield distributions accessible via `path_item`"""
+    importer = get_importer(path_item)
+    finder = _find_adapter(_distribution_finders, importer)
+    return finder(importer, path_item, only)
+
+
+def find_eggs_in_zip(importer, path_item, only=False):
+    """
+    Find eggs in zip files; possibly multiple nested eggs.
+    """
+    if importer.archive.endswith('.whl'):
+        # wheels are not supported with this finder
+        # they don't have PKG-INFO metadata, and won't ever contain eggs
+        return
+    metadata = EggMetadata(importer)
+    if metadata.has_metadata('PKG-INFO'):
+        yield Distribution.from_filename(path_item, metadata=metadata)
+    if only:
+        # don't yield nested distros
+        return
+    for subitem in metadata.resource_listdir(''):
+        if _is_egg_path(subitem):
+            subpath = os.path.join(path_item, subitem)
+            dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
+            for dist in dists:
+                yield dist
+        elif subitem.lower().endswith(('.dist-info', '.egg-info')):
+            subpath = os.path.join(path_item, subitem)
+            submeta = EggMetadata(zipimport.zipimporter(subpath))
+            submeta.egg_info = subpath
+            yield Distribution.from_location(path_item, subitem, submeta)
+
+
+register_finder(zipimport.zipimporter, find_eggs_in_zip)
+
+
+def find_nothing(importer, path_item, only=False):
+    return ()
+
+
+register_finder(object, find_nothing)
+
+
+def find_on_path(importer, path_item, only=False):
+    """Yield distributions accessible on a sys.path directory"""
+    path_item = _normalize_cached(path_item)
+
+    if _is_unpacked_egg(path_item):
+        yield Distribution.from_filename(
+            path_item,
+            metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')),
+        )
+        return
+
+    entries = (os.path.join(path_item, child) for child in safe_listdir(path_item))
+
+    # scan for .egg and .egg-info in directory
+    for entry in sorted(entries):
+        fullpath = os.path.join(path_item, entry)
+        factory = dist_factory(path_item, entry, only)
+        for dist in factory(fullpath):
+            yield dist
+
+
+def dist_factory(path_item, entry, only):
+    """Return a dist_factory for the given entry."""
+    lower = entry.lower()
+    is_egg_info = lower.endswith('.egg-info')
+    is_dist_info = lower.endswith('.dist-info') and os.path.isdir(
+        os.path.join(path_item, entry)
+    )
+    is_meta = is_egg_info or is_dist_info
+    return (
+        distributions_from_metadata
+        if is_meta
+        else find_distributions
+        if not only and _is_egg_path(entry)
+        else resolve_egg_link
+        if not only and lower.endswith('.egg-link')
+        else NoDists()
+    )
+
+
+class NoDists:
+    """
+    >>> bool(NoDists())
+    False
+
+    >>> list(NoDists()('anything'))
+    []
+    """
+
+    def __bool__(self):
+        return False
+
+    def __call__(self, fullpath):
+        return iter(())
+
+
+def safe_listdir(path):
+    """
+    Attempt to list contents of path, but suppress some exceptions.
+    """
+    try:
+        return os.listdir(path)
+    except (PermissionError, NotADirectoryError):
+        pass
+    except OSError as e:
+        # Ignore the directory if does not exist, not a directory or
+        # permission denied
+        if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT):
+            raise
+    return ()
+
+
+def distributions_from_metadata(path):
+    root = os.path.dirname(path)
+    if os.path.isdir(path):
+        if len(os.listdir(path)) == 0:
+            # empty metadata dir; skip
+            return
+        metadata = PathMetadata(root, path)
+    else:
+        metadata = FileMetadata(path)
+    entry = os.path.basename(path)
+    yield Distribution.from_location(
+        root,
+        entry,
+        metadata,
+        precedence=DEVELOP_DIST,
+    )
+
+
+def non_empty_lines(path):
+    """
+    Yield non-empty lines from file at path
+    """
+    with open(path) as f:
+        for line in f:
+            line = line.strip()
+            if line:
+                yield line
+
+
+def resolve_egg_link(path):
+    """
+    Given a path to an .egg-link, resolve distributions
+    present in the referenced path.
+    """
+    referenced_paths = non_empty_lines(path)
+    resolved_paths = (
+        os.path.join(os.path.dirname(path), ref) for ref in referenced_paths
+    )
+    dist_groups = map(find_distributions, resolved_paths)
+    return next(dist_groups, ())
+
+
+if hasattr(pkgutil, 'ImpImporter'):
+    register_finder(pkgutil.ImpImporter, find_on_path)
+
+register_finder(importlib_machinery.FileFinder, find_on_path)
+
+_declare_state('dict', _namespace_handlers={})
+_declare_state('dict', _namespace_packages={})
+
+
+def register_namespace_handler(importer_type, namespace_handler):
+    """Register `namespace_handler` to declare namespace packages
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `namespace_handler` is a callable like this::
+
+        def namespace_handler(importer, path_entry, moduleName, module):
+            # return a path_entry to use for child packages
+
+    Namespace handlers are only called if the importer object has already
+    agreed that it can handle the relevant path item, and they should only
+    return a subpath if the module __path__ does not already contain an
+    equivalent subpath.  For an example namespace handler, see
+    ``pkg_resources.file_ns_handler``.
+    """
+    _namespace_handlers[importer_type] = namespace_handler
+
+
+def _handle_ns(packageName, path_item):
+    """Ensure that named package includes a subpath of path_item (if needed)"""
+
+    importer = get_importer(path_item)
+    if importer is None:
+        return None
+
+    # use find_spec (PEP 451) and fall-back to find_module (PEP 302)
+    try:
+        spec = importer.find_spec(packageName)
+    except AttributeError:
+        # capture warnings due to #1111
+        with warnings.catch_warnings():
+            warnings.simplefilter("ignore")
+            loader = importer.find_module(packageName)
+    else:
+        loader = spec.loader if spec else None
+
+    if loader is None:
+        return None
+    module = sys.modules.get(packageName)
+    if module is None:
+        module = sys.modules[packageName] = types.ModuleType(packageName)
+        module.__path__ = []
+        _set_parent_ns(packageName)
+    elif not hasattr(module, '__path__'):
+        raise TypeError("Not a package:", packageName)
+    handler = _find_adapter(_namespace_handlers, importer)
+    subpath = handler(importer, path_item, packageName, module)
+    if subpath is not None:
+        path = module.__path__
+        path.append(subpath)
+        importlib.import_module(packageName)
+        _rebuild_mod_path(path, packageName, module)
+    return subpath
+
+
+def _rebuild_mod_path(orig_path, package_name, module):
+    """
+    Rebuild module.__path__ ensuring that all entries are ordered
+    corresponding to their sys.path order
+    """
+    sys_path = [_normalize_cached(p) for p in sys.path]
+
+    def safe_sys_path_index(entry):
+        """
+        Workaround for #520 and #513.
+        """
+        try:
+            return sys_path.index(entry)
+        except ValueError:
+            return float('inf')
+
+    def position_in_sys_path(path):
+        """
+        Return the ordinal of the path based on its position in sys.path
+        """
+        path_parts = path.split(os.sep)
+        module_parts = package_name.count('.') + 1
+        parts = path_parts[:-module_parts]
+        return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
+
+    new_path = sorted(orig_path, key=position_in_sys_path)
+    new_path = [_normalize_cached(p) for p in new_path]
+
+    if isinstance(module.__path__, list):
+        module.__path__[:] = new_path
+    else:
+        module.__path__ = new_path
+
+
+def declare_namespace(packageName):
+    """Declare that package 'packageName' is a namespace package"""
+
+    msg = (
+        f"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n"
+        "Implementing implicit namespace packages (as specified in PEP 420) "
+        "is preferred to `pkg_resources.declare_namespace`. "
+        "See https://setuptools.pypa.io/en/latest/references/"
+        "keywords.html#keyword-namespace-packages"
+    )
+    warnings.warn(msg, DeprecationWarning, stacklevel=2)
+
+    _imp.acquire_lock()
+    try:
+        if packageName in _namespace_packages:
+            return
+
+        path = sys.path
+        parent, _, _ = packageName.rpartition('.')
+
+        if parent:
+            declare_namespace(parent)
+            if parent not in _namespace_packages:
+                __import__(parent)
+            try:
+                path = sys.modules[parent].__path__
+            except AttributeError as e:
+                raise TypeError("Not a package:", parent) from e
+
+        # Track what packages are namespaces, so when new path items are added,
+        # they can be updated
+        _namespace_packages.setdefault(parent or None, []).append(packageName)
+        _namespace_packages.setdefault(packageName, [])
+
+        for path_item in path:
+            # Ensure all the parent's path items are reflected in the child,
+            # if they apply
+            _handle_ns(packageName, path_item)
+
+    finally:
+        _imp.release_lock()
+
+
+def fixup_namespace_packages(path_item, parent=None):
+    """Ensure that previously-declared namespace packages include path_item"""
+    _imp.acquire_lock()
+    try:
+        for package in _namespace_packages.get(parent, ()):
+            subpath = _handle_ns(package, path_item)
+            if subpath:
+                fixup_namespace_packages(subpath, package)
+    finally:
+        _imp.release_lock()
+
+
+def file_ns_handler(importer, path_item, packageName, module):
+    """Compute an ns-package subpath for a filesystem or zipfile importer"""
+
+    subpath = os.path.join(path_item, packageName.split('.')[-1])
+    normalized = _normalize_cached(subpath)
+    for item in module.__path__:
+        if _normalize_cached(item) == normalized:
+            break
+    else:
+        # Only return the path if it's not already there
+        return subpath
+
+
+if hasattr(pkgutil, 'ImpImporter'):
+    register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
+
+register_namespace_handler(zipimport.zipimporter, file_ns_handler)
+register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
+
+
+def null_ns_handler(importer, path_item, packageName, module):
+    return None
+
+
+register_namespace_handler(object, null_ns_handler)
+
+
+def normalize_path(filename):
+    """Normalize a file/dir name for comparison purposes"""
+    return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
+
+
+def _cygwin_patch(filename):  # pragma: nocover
+    """
+    Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
+    symlink components. Using
+    os.path.abspath() works around this limitation. A fix in os.getcwd()
+    would probably better, in Cygwin even more so, except
+    that this seems to be by design...
+    """
+    return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
+
+
+def _normalize_cached(filename, _cache={}):
+    try:
+        return _cache[filename]
+    except KeyError:
+        _cache[filename] = result = normalize_path(filename)
+        return result
+
+
+def _is_egg_path(path):
+    """
+    Determine if given path appears to be an egg.
+    """
+    return _is_zip_egg(path) or _is_unpacked_egg(path)
+
+
+def _is_zip_egg(path):
+    return (
+        path.lower().endswith('.egg')
+        and os.path.isfile(path)
+        and zipfile.is_zipfile(path)
+    )
+
+
+def _is_unpacked_egg(path):
+    """
+    Determine if given path appears to be an unpacked egg.
+    """
+    return path.lower().endswith('.egg') and os.path.isfile(
+        os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+    )
+
+
+def _set_parent_ns(packageName):
+    parts = packageName.split('.')
+    name = parts.pop()
+    if parts:
+        parent = '.'.join(parts)
+        setattr(sys.modules[parent], name, sys.modules[packageName])
+
+
+MODULE = re.compile(r"\w+(\.\w+)*$").match
+EGG_NAME = re.compile(
+    r"""
+    (?P[^-]+) (
+        -(?P[^-]+) (
+            -py(?P[^-]+) (
+                -(?P.+)
+            )?
+        )?
+    )?
+    """,
+    re.VERBOSE | re.IGNORECASE,
+).match
+
+
+class EntryPoint:
+    """Object representing an advertised importable object"""
+
+    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+        if not MODULE(module_name):
+            raise ValueError("Invalid module name", module_name)
+        self.name = name
+        self.module_name = module_name
+        self.attrs = tuple(attrs)
+        self.extras = tuple(extras)
+        self.dist = dist
+
+    def __str__(self):
+        s = "%s = %s" % (self.name, self.module_name)
+        if self.attrs:
+            s += ':' + '.'.join(self.attrs)
+        if self.extras:
+            s += ' [%s]' % ','.join(self.extras)
+        return s
+
+    def __repr__(self):
+        return "EntryPoint.parse(%r)" % str(self)
+
+    def load(self, require=True, *args, **kwargs):
+        """
+        Require packages for this EntryPoint, then resolve it.
+        """
+        if not require or args or kwargs:
+            warnings.warn(
+                "Parameters to load are deprecated.  Call .resolve and "
+                ".require separately.",
+                PkgResourcesDeprecationWarning,
+                stacklevel=2,
+            )
+        if require:
+            self.require(*args, **kwargs)
+        return self.resolve()
+
+    def resolve(self):
+        """
+        Resolve the entry point from its module and attrs.
+        """
+        module = __import__(self.module_name, fromlist=['__name__'], level=0)
+        try:
+            return functools.reduce(getattr, self.attrs, module)
+        except AttributeError as exc:
+            raise ImportError(str(exc)) from exc
+
+    def require(self, env=None, installer=None):
+        if self.extras and not self.dist:
+            raise UnknownExtra("Can't require() without a distribution", self)
+
+        # Get the requirements for this entry point with all its extras and
+        # then resolve them. We have to pass `extras` along when resolving so
+        # that the working set knows what extras we want. Otherwise, for
+        # dist-info distributions, the working set will assume that the
+        # requirements for that extra are purely optional and skip over them.
+        reqs = self.dist.requires(self.extras)
+        items = working_set.resolve(reqs, env, installer, extras=self.extras)
+        list(map(working_set.add, items))
+
+    pattern = re.compile(
+        r'\s*'
+        r'(?P.+?)\s*'
+        r'=\s*'
+        r'(?P[\w.]+)\s*'
+        r'(:\s*(?P[\w.]+))?\s*'
+        r'(?P\[.*\])?\s*$'
+    )
+
+    @classmethod
+    def parse(cls, src, dist=None):
+        """Parse a single entry point from string `src`
+
+        Entry point syntax follows the form::
+
+            name = some.module:some.attr [extra1, extra2]
+
+        The entry name and module name are required, but the ``:attrs`` and
+        ``[extras]`` parts are optional
+        """
+        m = cls.pattern.match(src)
+        if not m:
+            msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
+            raise ValueError(msg, src)
+        res = m.groupdict()
+        extras = cls._parse_extras(res['extras'])
+        attrs = res['attr'].split('.') if res['attr'] else ()
+        return cls(res['name'], res['module'], attrs, extras, dist)
+
+    @classmethod
+    def _parse_extras(cls, extras_spec):
+        if not extras_spec:
+            return ()
+        req = Requirement.parse('x' + extras_spec)
+        if req.specs:
+            raise ValueError()
+        return req.extras
+
+    @classmethod
+    def parse_group(cls, group, lines, dist=None):
+        """Parse an entry point group"""
+        if not MODULE(group):
+            raise ValueError("Invalid group name", group)
+        this = {}
+        for line in yield_lines(lines):
+            ep = cls.parse(line, dist)
+            if ep.name in this:
+                raise ValueError("Duplicate entry point", group, ep.name)
+            this[ep.name] = ep
+        return this
+
+    @classmethod
+    def parse_map(cls, data, dist=None):
+        """Parse a map of entry point groups"""
+        if isinstance(data, dict):
+            data = data.items()
+        else:
+            data = split_sections(data)
+        maps = {}
+        for group, lines in data:
+            if group is None:
+                if not lines:
+                    continue
+                raise ValueError("Entry points must be listed in groups")
+            group = group.strip()
+            if group in maps:
+                raise ValueError("Duplicate group name", group)
+            maps[group] = cls.parse_group(group, lines, dist)
+        return maps
+
+
+def _version_from_file(lines):
+    """
+    Given an iterable of lines from a Metadata file, return
+    the value of the Version field, if present, or None otherwise.
+    """
+
+    def is_version_line(line):
+        return line.lower().startswith('version:')
+
+    version_lines = filter(is_version_line, lines)
+    line = next(iter(version_lines), '')
+    _, _, value = line.partition(':')
+    return safe_version(value.strip()) or None
+
+
+class Distribution:
+    """Wrap an actual or potential sys.path entry w/metadata"""
+
+    PKG_INFO = 'PKG-INFO'
+
+    def __init__(
+        self,
+        location=None,
+        metadata=None,
+        project_name=None,
+        version=None,
+        py_version=PY_MAJOR,
+        platform=None,
+        precedence=EGG_DIST,
+    ):
+        self.project_name = safe_name(project_name or 'Unknown')
+        if version is not None:
+            self._version = safe_version(version)
+        self.py_version = py_version
+        self.platform = platform
+        self.location = location
+        self.precedence = precedence
+        self._provider = metadata or empty_provider
+
+    @classmethod
+    def from_location(cls, location, basename, metadata=None, **kw):
+        project_name, version, py_version, platform = [None] * 4
+        basename, ext = os.path.splitext(basename)
+        if ext.lower() in _distributionImpl:
+            cls = _distributionImpl[ext.lower()]
+
+            match = EGG_NAME(basename)
+            if match:
+                project_name, version, py_version, platform = match.group(
+                    'name', 'ver', 'pyver', 'plat'
+                )
+        return cls(
+            location,
+            metadata,
+            project_name=project_name,
+            version=version,
+            py_version=py_version,
+            platform=platform,
+            **kw,
+        )._reload_version()
+
+    def _reload_version(self):
+        return self
+
+    @property
+    def hashcmp(self):
+        return (
+            self._forgiving_parsed_version,
+            self.precedence,
+            self.key,
+            self.location,
+            self.py_version or '',
+            self.platform or '',
+        )
+
+    def __hash__(self):
+        return hash(self.hashcmp)
+
+    def __lt__(self, other):
+        return self.hashcmp < other.hashcmp
+
+    def __le__(self, other):
+        return self.hashcmp <= other.hashcmp
+
+    def __gt__(self, other):
+        return self.hashcmp > other.hashcmp
+
+    def __ge__(self, other):
+        return self.hashcmp >= other.hashcmp
+
+    def __eq__(self, other):
+        if not isinstance(other, self.__class__):
+            # It's not a Distribution, so they are not equal
+            return False
+        return self.hashcmp == other.hashcmp
+
+    def __ne__(self, other):
+        return not self == other
+
+    # These properties have to be lazy so that we don't have to load any
+    # metadata until/unless it's actually needed.  (i.e., some distributions
+    # may not know their name or version without loading PKG-INFO)
+
+    @property
+    def key(self):
+        try:
+            return self._key
+        except AttributeError:
+            self._key = key = self.project_name.lower()
+            return key
+
+    @property
+    def parsed_version(self):
+        if not hasattr(self, "_parsed_version"):
+            try:
+                self._parsed_version = parse_version(self.version)
+            except packaging.version.InvalidVersion as ex:
+                info = f"(package: {self.project_name})"
+                if hasattr(ex, "add_note"):
+                    ex.add_note(info)  # PEP 678
+                    raise
+                raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None
+
+        return self._parsed_version
+
+    @property
+    def _forgiving_parsed_version(self):
+        try:
+            return self.parsed_version
+        except packaging.version.InvalidVersion as ex:
+            self._parsed_version = parse_version(_forgiving_version(self.version))
+
+            notes = "\n".join(getattr(ex, "__notes__", []))  # PEP 678
+            msg = f"""!!\n\n
+            *************************************************************************
+            {str(ex)}\n{notes}
+
+            This is a long overdue deprecation.
+            For the time being, `pkg_resources` will use `{self._parsed_version}`
+            as a replacement to avoid breaking existing environments,
+            but no future compatibility is guaranteed.
+
+            If you maintain package {self.project_name} you should implement
+            the relevant changes to adequate the project to PEP 440 immediately.
+            *************************************************************************
+            \n\n!!
+            """
+            warnings.warn(msg, DeprecationWarning)
+
+            return self._parsed_version
+
+    @property
+    def version(self):
+        try:
+            return self._version
+        except AttributeError as e:
+            version = self._get_version()
+            if version is None:
+                path = self._get_metadata_path_for_display(self.PKG_INFO)
+                msg = ("Missing 'Version:' header and/or {} file at path: {}").format(
+                    self.PKG_INFO, path
+                )
+                raise ValueError(msg, self) from e
+
+            return version
+
+    @property
+    def _dep_map(self):
+        """
+        A map of extra to its list of (direct) requirements
+        for this distribution, including the null extra.
+        """
+        try:
+            return self.__dep_map
+        except AttributeError:
+            self.__dep_map = self._filter_extras(self._build_dep_map())
+        return self.__dep_map
+
+    @staticmethod
+    def _filter_extras(dm):
+        """
+        Given a mapping of extras to dependencies, strip off
+        environment markers and filter out any dependencies
+        not matching the markers.
+        """
+        for extra in list(filter(None, dm)):
+            new_extra = extra
+            reqs = dm.pop(extra)
+            new_extra, _, marker = extra.partition(':')
+            fails_marker = marker and (
+                invalid_marker(marker) or not evaluate_marker(marker)
+            )
+            if fails_marker:
+                reqs = []
+            new_extra = safe_extra(new_extra) or None
+
+            dm.setdefault(new_extra, []).extend(reqs)
+        return dm
+
+    def _build_dep_map(self):
+        dm = {}
+        for name in 'requires.txt', 'depends.txt':
+            for extra, reqs in split_sections(self._get_metadata(name)):
+                dm.setdefault(extra, []).extend(parse_requirements(reqs))
+        return dm
+
+    def requires(self, extras=()):
+        """List of Requirements needed for this distro if `extras` are used"""
+        dm = self._dep_map
+        deps = []
+        deps.extend(dm.get(None, ()))
+        for ext in extras:
+            try:
+                deps.extend(dm[safe_extra(ext)])
+            except KeyError as e:
+                raise UnknownExtra(
+                    "%s has no such extra feature %r" % (self, ext)
+                ) from e
+        return deps
+
+    def _get_metadata_path_for_display(self, name):
+        """
+        Return the path to the given metadata file, if available.
+        """
+        try:
+            # We need to access _get_metadata_path() on the provider object
+            # directly rather than through this class's __getattr__()
+            # since _get_metadata_path() is marked private.
+            path = self._provider._get_metadata_path(name)
+
+        # Handle exceptions e.g. in case the distribution's metadata
+        # provider doesn't support _get_metadata_path().
+        except Exception:
+            return '[could not detect]'
+
+        return path
+
+    def _get_metadata(self, name):
+        if self.has_metadata(name):
+            for line in self.get_metadata_lines(name):
+                yield line
+
+    def _get_version(self):
+        lines = self._get_metadata(self.PKG_INFO)
+        version = _version_from_file(lines)
+
+        return version
+
+    def activate(self, path=None, replace=False):
+        """Ensure distribution is importable on `path` (default=sys.path)"""
+        if path is None:
+            path = sys.path
+        self.insert_on(path, replace=replace)
+        if path is sys.path:
+            fixup_namespace_packages(self.location)
+            for pkg in self._get_metadata('namespace_packages.txt'):
+                if pkg in sys.modules:
+                    declare_namespace(pkg)
+
+    def egg_name(self):
+        """Return what this distribution's standard .egg filename should be"""
+        filename = "%s-%s-py%s" % (
+            to_filename(self.project_name),
+            to_filename(self.version),
+            self.py_version or PY_MAJOR,
+        )
+
+        if self.platform:
+            filename += '-' + self.platform
+        return filename
+
+    def __repr__(self):
+        if self.location:
+            return "%s (%s)" % (self, self.location)
+        else:
+            return str(self)
+
+    def __str__(self):
+        try:
+            version = getattr(self, 'version', None)
+        except ValueError:
+            version = None
+        version = version or "[unknown version]"
+        return "%s %s" % (self.project_name, version)
+
+    def __getattr__(self, attr):
+        """Delegate all unrecognized public attributes to .metadata provider"""
+        if attr.startswith('_'):
+            raise AttributeError(attr)
+        return getattr(self._provider, attr)
+
+    def __dir__(self):
+        return list(
+            set(super(Distribution, self).__dir__())
+            | set(attr for attr in self._provider.__dir__() if not attr.startswith('_'))
+        )
+
+    @classmethod
+    def from_filename(cls, filename, metadata=None, **kw):
+        return cls.from_location(
+            _normalize_cached(filename), os.path.basename(filename), metadata, **kw
+        )
+
+    def as_requirement(self):
+        """Return a ``Requirement`` that matches this distribution exactly"""
+        if isinstance(self.parsed_version, packaging.version.Version):
+            spec = "%s==%s" % (self.project_name, self.parsed_version)
+        else:
+            spec = "%s===%s" % (self.project_name, self.parsed_version)
+
+        return Requirement.parse(spec)
+
+    def load_entry_point(self, group, name):
+        """Return the `name` entry point of `group` or raise ImportError"""
+        ep = self.get_entry_info(group, name)
+        if ep is None:
+            raise ImportError("Entry point %r not found" % ((group, name),))
+        return ep.load()
+
+    def get_entry_map(self, group=None):
+        """Return the entry point map for `group`, or the full entry map"""
+        try:
+            ep_map = self._ep_map
+        except AttributeError:
+            ep_map = self._ep_map = EntryPoint.parse_map(
+                self._get_metadata('entry_points.txt'), self
+            )
+        if group is not None:
+            return ep_map.get(group, {})
+        return ep_map
+
+    def get_entry_info(self, group, name):
+        """Return the EntryPoint object for `group`+`name`, or ``None``"""
+        return self.get_entry_map(group).get(name)
+
+    # FIXME: 'Distribution.insert_on' is too complex (13)
+    def insert_on(self, path, loc=None, replace=False):  # noqa: C901
+        """Ensure self.location is on path
+
+        If replace=False (default):
+            - If location is already in path anywhere, do nothing.
+            - Else:
+              - If it's an egg and its parent directory is on path,
+                insert just ahead of the parent.
+              - Else: add to the end of path.
+        If replace=True:
+            - If location is already on path anywhere (not eggs)
+              or higher priority than its parent (eggs)
+              do nothing.
+            - Else:
+              - If it's an egg and its parent directory is on path,
+                insert just ahead of the parent,
+                removing any lower-priority entries.
+              - Else: add it to the front of path.
+        """
+
+        loc = loc or self.location
+        if not loc:
+            return
+
+        nloc = _normalize_cached(loc)
+        bdir = os.path.dirname(nloc)
+        npath = [(p and _normalize_cached(p) or p) for p in path]
+
+        for p, item in enumerate(npath):
+            if item == nloc:
+                if replace:
+                    break
+                else:
+                    # don't modify path (even removing duplicates) if
+                    # found and not replace
+                    return
+            elif item == bdir and self.precedence == EGG_DIST:
+                # if it's an .egg, give it precedence over its directory
+                # UNLESS it's already been added to sys.path and replace=False
+                if (not replace) and nloc in npath[p:]:
+                    return
+                if path is sys.path:
+                    self.check_version_conflict()
+                path.insert(p, loc)
+                npath.insert(p, nloc)
+                break
+        else:
+            if path is sys.path:
+                self.check_version_conflict()
+            if replace:
+                path.insert(0, loc)
+            else:
+                path.append(loc)
+            return
+
+        # p is the spot where we found or inserted loc; now remove duplicates
+        while True:
+            try:
+                np = npath.index(nloc, p + 1)
+            except ValueError:
+                break
+            else:
+                del npath[np], path[np]
+                # ha!
+                p = np
+
+        return
+
+    def check_version_conflict(self):
+        if self.key == 'setuptools':
+            # ignore the inevitable setuptools self-conflicts  :(
+            return
+
+        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
+        loc = normalize_path(self.location)
+        for modname in self._get_metadata('top_level.txt'):
+            if (
+                modname not in sys.modules
+                or modname in nsp
+                or modname in _namespace_packages
+            ):
+                continue
+            if modname in ('pkg_resources', 'setuptools', 'site'):
+                continue
+            fn = getattr(sys.modules[modname], '__file__', None)
+            if fn and (
+                normalize_path(fn).startswith(loc) or fn.startswith(self.location)
+            ):
+                continue
+            issue_warning(
+                "Module %s was already imported from %s, but %s is being added"
+                " to sys.path" % (modname, fn, self.location),
+            )
+
+    def has_version(self):
+        try:
+            self.version
+        except ValueError:
+            issue_warning("Unbuilt egg for " + repr(self))
+            return False
+        except SystemError:
+            # TODO: remove this except clause when python/cpython#103632 is fixed.
+            return False
+        return True
+
+    def clone(self, **kw):
+        """Copy this distribution, substituting in any changed keyword args"""
+        names = 'project_name version py_version platform location precedence'
+        for attr in names.split():
+            kw.setdefault(attr, getattr(self, attr, None))
+        kw.setdefault('metadata', self._provider)
+        return self.__class__(**kw)
+
+    @property
+    def extras(self):
+        return [dep for dep in self._dep_map if dep]
+
+
+class EggInfoDistribution(Distribution):
+    def _reload_version(self):
+        """
+        Packages installed by distutils (e.g. numpy or scipy),
+        which uses an old safe_version, and so
+        their version numbers can get mangled when
+        converted to filenames (e.g., 1.11.0.dev0+2329eae to
+        1.11.0.dev0_2329eae). These distributions will not be
+        parsed properly
+        downstream by Distribution and safe_version, so
+        take an extra step and try to get the version number from
+        the metadata file itself instead of the filename.
+        """
+        md_version = self._get_version()
+        if md_version:
+            self._version = md_version
+        return self
+
+
+class DistInfoDistribution(Distribution):
+    """
+    Wrap an actual or potential sys.path entry
+    w/metadata, .dist-info style.
+    """
+
+    PKG_INFO = 'METADATA'
+    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
+
+    @property
+    def _parsed_pkg_info(self):
+        """Parse and cache metadata"""
+        try:
+            return self._pkg_info
+        except AttributeError:
+            metadata = self.get_metadata(self.PKG_INFO)
+            self._pkg_info = email.parser.Parser().parsestr(metadata)
+            return self._pkg_info
+
+    @property
+    def _dep_map(self):
+        try:
+            return self.__dep_map
+        except AttributeError:
+            self.__dep_map = self._compute_dependencies()
+            return self.__dep_map
+
+    def _compute_dependencies(self):
+        """Recompute this distribution's dependencies."""
+        dm = self.__dep_map = {None: []}
+
+        reqs = []
+        # Including any condition expressions
+        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
+            reqs.extend(parse_requirements(req))
+
+        def reqs_for_extra(extra):
+            for req in reqs:
+                if not req.marker or req.marker.evaluate({'extra': extra}):
+                    yield req
+
+        common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None)))
+        dm[None].extend(common)
+
+        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
+            s_extra = safe_extra(extra.strip())
+            dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common]
+
+        return dm
+
+
+_distributionImpl = {
+    '.egg': Distribution,
+    '.egg-info': EggInfoDistribution,
+    '.dist-info': DistInfoDistribution,
+}
+
+
+def issue_warning(*args, **kw):
+    level = 1
+    g = globals()
+    try:
+        # find the first stack frame that is *not* code in
+        # the pkg_resources module, to use for the warning
+        while sys._getframe(level).f_globals is g:
+            level += 1
+    except ValueError:
+        pass
+    warnings.warn(stacklevel=level + 1, *args, **kw)
+
+
+def parse_requirements(strs):
+    """
+    Yield ``Requirement`` objects for each specification in `strs`.
+
+    `strs` must be a string, or a (possibly-nested) iterable thereof.
+    """
+    return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs))))
+
+
+class RequirementParseError(packaging.requirements.InvalidRequirement):
+    "Compatibility wrapper for InvalidRequirement"
+
+
+class Requirement(packaging.requirements.Requirement):
+    def __init__(self, requirement_string):
+        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
+        super(Requirement, self).__init__(requirement_string)
+        self.unsafe_name = self.name
+        project_name = safe_name(self.name)
+        self.project_name, self.key = project_name, project_name.lower()
+        self.specs = [(spec.operator, spec.version) for spec in self.specifier]
+        self.extras = tuple(map(safe_extra, self.extras))
+        self.hashCmp = (
+            self.key,
+            self.url,
+            self.specifier,
+            frozenset(self.extras),
+            str(self.marker) if self.marker else None,
+        )
+        self.__hash = hash(self.hashCmp)
+
+    def __eq__(self, other):
+        return isinstance(other, Requirement) and self.hashCmp == other.hashCmp
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __contains__(self, item):
+        if isinstance(item, Distribution):
+            if item.key != self.key:
+                return False
+
+            item = item.version
+
+        # Allow prereleases always in order to match the previous behavior of
+        # this method. In the future this should be smarter and follow PEP 440
+        # more accurately.
+        return self.specifier.contains(item, prereleases=True)
+
+    def __hash__(self):
+        return self.__hash
+
+    def __repr__(self):
+        return "Requirement.parse(%r)" % str(self)
+
+    @staticmethod
+    def parse(s):
+        (req,) = parse_requirements(s)
+        return req
+
+
+def _always_object(classes):
+    """
+    Ensure object appears in the mro even
+    for old-style classes.
+    """
+    if object not in classes:
+        return classes + (object,)
+    return classes
+
+
+def _find_adapter(registry, ob):
+    """Return an adapter factory for `ob` from `registry`"""
+    types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
+    for t in types:
+        if t in registry:
+            return registry[t]
+
+
+def ensure_directory(path):
+    """Ensure that the parent directory of `path` exists"""
+    dirname = os.path.dirname(path)
+    os.makedirs(dirname, exist_ok=True)
+
+
+def _bypass_ensure_directory(path):
+    """Sandbox-bypassing version of ensure_directory()"""
+    if not WRITE_SUPPORT:
+        raise IOError('"os.mkdir" not supported on this platform.')
+    dirname, filename = split(path)
+    if dirname and filename and not isdir(dirname):
+        _bypass_ensure_directory(dirname)
+        try:
+            mkdir(dirname, 0o755)
+        except FileExistsError:
+            pass
+
+
+def split_sections(s):
+    """Split a string or iterable thereof into (section, content) pairs
+
+    Each ``section`` is a stripped version of the section header ("[section]")
+    and each ``content`` is a list of stripped lines excluding blank lines and
+    comment-only lines.  If there are any such lines before the first section
+    header, they're returned in a first ``section`` of ``None``.
+    """
+    section = None
+    content = []
+    for line in yield_lines(s):
+        if line.startswith("["):
+            if line.endswith("]"):
+                if section or content:
+                    yield section, content
+                section = line[1:-1].strip()
+                content = []
+            else:
+                raise ValueError("Invalid section heading", line)
+        else:
+            content.append(line)
+
+    # wrap up last segment
+    yield section, content
+
+
+def _mkstemp(*args, **kw):
+    old_open = os.open
+    try:
+        # temporarily bypass sandboxing
+        os.open = os_open
+        return tempfile.mkstemp(*args, **kw)
+    finally:
+        # and then put it back
+        os.open = old_open
+
+
+# Silence the PEP440Warning by default, so that end users don't get hit by it
+# randomly just because they use pkg_resources. We want to append the rule
+# because we want earlier uses of filterwarnings to take precedence over this
+# one.
+warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
+
+
+# from jaraco.functools 1.3
+def _call_aside(f, *args, **kwargs):
+    f(*args, **kwargs)
+    return f
+
+
+@_call_aside
+def _initialize(g=globals()):
+    "Set up global resource manager (deliberately not state-saved)"
+    manager = ResourceManager()
+    g['_manager'] = manager
+    g.update(
+        (name, getattr(manager, name))
+        for name in dir(manager)
+        if not name.startswith('_')
+    )
+
+
+class PkgResourcesDeprecationWarning(Warning):
+    """
+    Base class for warning about deprecations in ``pkg_resources``
+
+    This class is not derived from ``DeprecationWarning``, and as such is
+    visible by default.
+    """
+
+
+@_call_aside
+def _initialize_master_working_set():
+    """
+    Prepare the master working set and make the ``require()``
+    API available.
+
+    This function has explicit effects on the global state
+    of pkg_resources. It is intended to be invoked once at
+    the initialization of this module.
+
+    Invocation by other packages is unsupported and done
+    at their own risk.
+    """
+    working_set = WorkingSet._build_master()
+    _declare_state('object', working_set=working_set)
+
+    require = working_set.require
+    iter_entry_points = working_set.iter_entry_points
+    add_activation_listener = working_set.subscribe
+    run_script = working_set.run_script
+    # backward compatibility
+    run_main = run_script
+    # Activate all distributions already on sys.path with replace=False and
+    # ensure that all distributions added to the working set in the future
+    # (e.g. by calling ``require()``) will get activated as well,
+    # with higher priority (replace=True).
+    tuple(dist.activate(replace=False) for dist in working_set)
+    add_activation_listener(
+        lambda dist: dist.activate(replace=True),
+        existing=False,
+    )
+    working_set.entries = []
+    # match order
+    list(map(working_set.add_entry, sys.path))
+    globals().update(locals())
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 0000000..6f32972
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__init__.py
new file mode 100644
index 0000000..5ebf595
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__init__.py
@@ -0,0 +1,566 @@
+"""
+Utilities for determining application-specific dirs. See  for details and
+usage.
+"""
+from __future__ import annotations
+
+import os
+import sys
+from typing import TYPE_CHECKING
+
+from .api import PlatformDirsABC
+from .version import __version__
+from .version import __version_tuple__ as __version_info__
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    if sys.version_info >= (3, 8):  # pragma: no cover (py38+)
+        from typing import Literal
+    else:  # pragma: no cover (py38+)
+        from pip._vendor.typing_extensions import Literal
+
+
+def _set_platform_dir_class() -> type[PlatformDirsABC]:
+    if sys.platform == "win32":
+        from pip._vendor.platformdirs.windows import Windows as Result
+    elif sys.platform == "darwin":
+        from pip._vendor.platformdirs.macos import MacOS as Result
+    else:
+        from pip._vendor.platformdirs.unix import Unix as Result
+
+    if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
+        if os.getenv("SHELL") or os.getenv("PREFIX"):
+            return Result
+
+        from pip._vendor.platformdirs.android import _android_folder
+
+        if _android_folder() is not None:
+            from pip._vendor.platformdirs.android import Android
+
+            return Android  # return to avoid redefinition of result
+
+    return Result
+
+
+PlatformDirs = _set_platform_dir_class()  #: Currently active platform
+AppDirs = PlatformDirs  #: Backwards compatibility with appdirs
+
+
+def user_data_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_data_dir
+
+
+def site_data_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data directory shared by users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_data_dir
+
+
+def user_config_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_config_dir
+
+
+def site_config_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config directory shared by the users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_config_dir
+
+
+def user_cache_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_cache_dir
+
+
+def site_cache_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_cache_dir
+
+
+def user_state_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: state directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_state_dir
+
+
+def user_log_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: log directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_log_dir
+
+
+def user_documents_dir() -> str:
+    """:returns: documents directory tied to the user"""
+    return PlatformDirs().user_documents_dir
+
+
+def user_downloads_dir() -> str:
+    """:returns: downloads directory tied to the user"""
+    return PlatformDirs().user_downloads_dir
+
+
+def user_pictures_dir() -> str:
+    """:returns: pictures directory tied to the user"""
+    return PlatformDirs().user_pictures_dir
+
+
+def user_videos_dir() -> str:
+    """:returns: videos directory tied to the user"""
+    return PlatformDirs().user_videos_dir
+
+
+def user_music_dir() -> str:
+    """:returns: music directory tied to the user"""
+    return PlatformDirs().user_music_dir
+
+
+def user_runtime_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: runtime directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_runtime_dir
+
+
+def user_data_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_data_path
+
+
+def site_data_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `multipath `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data path shared by users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_data_path
+
+
+def user_config_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_config_path
+
+
+def site_config_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config path shared by the users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_config_path
+
+
+def site_cache_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_cache_path
+
+
+def user_cache_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_cache_path
+
+
+def user_state_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: state path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_state_path
+
+
+def user_log_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: log path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_log_path
+
+
+def user_documents_path() -> Path:
+    """:returns: documents path tied to the user"""
+    return PlatformDirs().user_documents_path
+
+
+def user_downloads_path() -> Path:
+    """:returns: downloads path tied to the user"""
+    return PlatformDirs().user_downloads_path
+
+
+def user_pictures_path() -> Path:
+    """:returns: pictures path tied to the user"""
+    return PlatformDirs().user_pictures_path
+
+
+def user_videos_path() -> Path:
+    """:returns: videos path tied to the user"""
+    return PlatformDirs().user_videos_path
+
+
+def user_music_path() -> Path:
+    """:returns: music path tied to the user"""
+    return PlatformDirs().user_music_path
+
+
+def user_runtime_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: runtime path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_runtime_path
+
+
+__all__ = [
+    "__version__",
+    "__version_info__",
+    "PlatformDirs",
+    "AppDirs",
+    "PlatformDirsABC",
+    "user_data_dir",
+    "user_config_dir",
+    "user_cache_dir",
+    "user_state_dir",
+    "user_log_dir",
+    "user_documents_dir",
+    "user_downloads_dir",
+    "user_pictures_dir",
+    "user_videos_dir",
+    "user_music_dir",
+    "user_runtime_dir",
+    "site_data_dir",
+    "site_config_dir",
+    "site_cache_dir",
+    "user_data_path",
+    "user_config_path",
+    "user_cache_path",
+    "user_state_path",
+    "user_log_path",
+    "user_documents_path",
+    "user_downloads_path",
+    "user_pictures_path",
+    "user_videos_path",
+    "user_music_path",
+    "user_runtime_path",
+    "site_data_path",
+    "site_config_path",
+    "site_cache_path",
+]
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__main__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__main__.py
new file mode 100644
index 0000000..6a0d6dd
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__main__.py
@@ -0,0 +1,53 @@
+"""Main entry point."""
+from __future__ import annotations
+
+from pip._vendor.platformdirs import PlatformDirs, __version__
+
+PROPS = (
+    "user_data_dir",
+    "user_config_dir",
+    "user_cache_dir",
+    "user_state_dir",
+    "user_log_dir",
+    "user_documents_dir",
+    "user_downloads_dir",
+    "user_pictures_dir",
+    "user_videos_dir",
+    "user_music_dir",
+    "user_runtime_dir",
+    "site_data_dir",
+    "site_config_dir",
+    "site_cache_dir",
+)
+
+
+def main() -> None:
+    """Run main entry point."""
+    app_name = "MyApp"
+    app_author = "MyCompany"
+
+    print(f"-- platformdirs {__version__} --")  # noqa: T201
+
+    print("-- app dirs (with optional 'version')")  # noqa: T201
+    dirs = PlatformDirs(app_name, app_author, version="1.0")
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+    print("\n-- app dirs (without optional 'version')")  # noqa: T201
+    dirs = PlatformDirs(app_name, app_author)
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+    print("\n-- app dirs (without optional 'appauthor')")  # noqa: T201
+    dirs = PlatformDirs(app_name)
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+    print("\n-- app dirs (with disabled 'appauthor')")  # noqa: T201
+    dirs = PlatformDirs(app_name, appauthor=False)
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+
+if __name__ == "__main__":
+    main()
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 0000000..431ca95
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc
new file mode 100644
index 0000000..a1b3010
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc
new file mode 100644
index 0000000..9b53ebe
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc
new file mode 100644
index 0000000..c708216
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc
new file mode 100644
index 0000000..45e0bda
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc
new file mode 100644
index 0000000..b1b545c
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc
new file mode 100644
index 0000000..8930475
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc
new file mode 100644
index 0000000..2cd1ff3
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/android.py b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/android.py
new file mode 100644
index 0000000..76527dd
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/android.py
@@ -0,0 +1,210 @@
+"""Android."""
+from __future__ import annotations
+
+import os
+import re
+import sys
+from functools import lru_cache
+from typing import cast
+
+from .api import PlatformDirsABC
+
+
+class Android(PlatformDirsABC):
+    """
+    Follows the guidance `from here `_. Makes use of the
+    `appname `,
+    `version `,
+    `ensure_exists `.
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """:return: data directory tied to the user, e.g. ``/data/user///files/``"""
+        return self._append_app_name_and_version(cast(str, _android_folder()), "files")
+
+    @property
+    def site_data_dir(self) -> str:
+        """:return: data directory shared by users, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_config_dir(self) -> str:
+        """
+        :return: config directory tied to the user, e.g. \
+        ``/data/user///shared_prefs/``
+        """
+        return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs")
+
+    @property
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users, same as `user_config_dir`"""
+        return self.user_config_dir
+
+    @property
+    def user_cache_dir(self) -> str:
+        """:return: cache directory tied to the user, e.g. e.g. ``/data/user///cache/``"""
+        return self._append_app_name_and_version(cast(str, _android_folder()), "cache")
+
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, same as `user_cache_dir`"""
+        return self.user_cache_dir
+
+    @property
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_log_dir(self) -> str:
+        """
+        :return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it,
+          e.g. ``/data/user///cache//log``
+        """
+        path = self.user_cache_dir
+        if self.opinion:
+            path = os.path.join(path, "log")  # noqa: PTH118
+        return path
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``"""
+        return _android_documents_folder()
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user e.g. ``/storage/emulated/0/Downloads``"""
+        return _android_downloads_folder()
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user e.g. ``/storage/emulated/0/Pictures``"""
+        return _android_pictures_folder()
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user e.g. ``/storage/emulated/0/DCIM/Camera``"""
+        return _android_videos_folder()
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user e.g. ``/storage/emulated/0/Music``"""
+        return _android_music_folder()
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """
+        :return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it,
+          e.g. ``/data/user///cache//tmp``
+        """
+        path = self.user_cache_dir
+        if self.opinion:
+            path = os.path.join(path, "tmp")  # noqa: PTH118
+        return path
+
+
+@lru_cache(maxsize=1)
+def _android_folder() -> str | None:
+    """:return: base folder for the Android OS or None if it cannot be found"""
+    try:
+        # First try to get path to android app via pyjnius
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        result: str | None = context.getFilesDir().getParentFile().getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        # if fails find an android folder looking path on the sys.path
+        pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
+        for path in sys.path:
+            if pattern.match(path):
+                result = path.split("/files")[0]
+                break
+        else:
+            result = None
+    return result
+
+
+@lru_cache(maxsize=1)
+def _android_documents_folder() -> str:
+    """:return: documents folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        documents_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        documents_dir = "/storage/emulated/0/Documents"
+
+    return documents_dir
+
+
+@lru_cache(maxsize=1)
+def _android_downloads_folder() -> str:
+    """:return: downloads folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        downloads_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        downloads_dir = "/storage/emulated/0/Downloads"
+
+    return downloads_dir
+
+
+@lru_cache(maxsize=1)
+def _android_pictures_folder() -> str:
+    """:return: pictures folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        pictures_dir: str = context.getExternalFilesDir(environment.DIRECTORY_PICTURES).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        pictures_dir = "/storage/emulated/0/Pictures"
+
+    return pictures_dir
+
+
+@lru_cache(maxsize=1)
+def _android_videos_folder() -> str:
+    """:return: videos folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        videos_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DCIM).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        videos_dir = "/storage/emulated/0/DCIM/Camera"
+
+    return videos_dir
+
+
+@lru_cache(maxsize=1)
+def _android_music_folder() -> str:
+    """:return: music folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        music_dir: str = context.getExternalFilesDir(environment.DIRECTORY_MUSIC).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        music_dir = "/storage/emulated/0/Music"
+
+    return music_dir
+
+
+__all__ = [
+    "Android",
+]
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/api.py b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/api.py
new file mode 100644
index 0000000..d64ebb9
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/api.py
@@ -0,0 +1,223 @@
+"""Base API."""
+from __future__ import annotations
+
+import os
+from abc import ABC, abstractmethod
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    import sys
+
+    if sys.version_info >= (3, 8):  # pragma: no cover (py38+)
+        from typing import Literal
+    else:  # pragma: no cover (py38+)
+        from pip._vendor.typing_extensions import Literal
+
+
+class PlatformDirsABC(ABC):
+    """Abstract base class for platform directories."""
+
+    def __init__(  # noqa: PLR0913
+        self,
+        appname: str | None = None,
+        appauthor: str | None | Literal[False] = None,
+        version: str | None = None,
+        roaming: bool = False,  # noqa: FBT001, FBT002
+        multipath: bool = False,  # noqa: FBT001, FBT002
+        opinion: bool = True,  # noqa: FBT001, FBT002
+        ensure_exists: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        """
+        Create a new platform directory.
+
+        :param appname: See `appname`.
+        :param appauthor: See `appauthor`.
+        :param version: See `version`.
+        :param roaming: See `roaming`.
+        :param multipath: See `multipath`.
+        :param opinion: See `opinion`.
+        :param ensure_exists: See `ensure_exists`.
+        """
+        self.appname = appname  #: The name of application.
+        self.appauthor = appauthor
+        """
+        The name of the app author or distributing body for this application. Typically, it is the owning company name.
+        Defaults to `appname`. You may pass ``False`` to disable it.
+        """
+        self.version = version
+        """
+        An optional version path element to append to the path. You might want to use this if you want multiple versions
+        of your app to be able to run independently. If used, this would typically be ``.``.
+        """
+        self.roaming = roaming
+        """
+        Whether to use the roaming appdata directory on Windows. That means that for users on a Windows network setup
+        for roaming profiles, this user data will be synced on login (see
+        `here `_).
+        """
+        self.multipath = multipath
+        """
+        An optional parameter only applicable to Unix/Linux which indicates that the entire list of data dirs should be
+        returned. By default, the first item would only be returned.
+        """
+        self.opinion = opinion  #: A flag to indicating to use opinionated values.
+        self.ensure_exists = ensure_exists
+        """
+        Optionally create the directory (and any missing parents) upon access if it does not exist.
+        By default, no directories are created.
+        """
+
+    def _append_app_name_and_version(self, *base: str) -> str:
+        params = list(base[1:])
+        if self.appname:
+            params.append(self.appname)
+            if self.version:
+                params.append(self.version)
+        path = os.path.join(base[0], *params)  # noqa: PTH118
+        self._optionally_create_directory(path)
+        return path
+
+    def _optionally_create_directory(self, path: str) -> None:
+        if self.ensure_exists:
+            Path(path).mkdir(parents=True, exist_ok=True)
+
+    @property
+    @abstractmethod
+    def user_data_dir(self) -> str:
+        """:return: data directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def site_data_dir(self) -> str:
+        """:return: data directory shared by users"""
+
+    @property
+    @abstractmethod
+    def user_config_dir(self) -> str:
+        """:return: config directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users"""
+
+    @property
+    @abstractmethod
+    def user_cache_dir(self) -> str:
+        """:return: cache directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users"""
+
+    @property
+    @abstractmethod
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_runtime_dir(self) -> str:
+        """:return: runtime directory tied to the user"""
+
+    @property
+    def user_data_path(self) -> Path:
+        """:return: data path tied to the user"""
+        return Path(self.user_data_dir)
+
+    @property
+    def site_data_path(self) -> Path:
+        """:return: data path shared by users"""
+        return Path(self.site_data_dir)
+
+    @property
+    def user_config_path(self) -> Path:
+        """:return: config path tied to the user"""
+        return Path(self.user_config_dir)
+
+    @property
+    def site_config_path(self) -> Path:
+        """:return: config path shared by the users"""
+        return Path(self.site_config_dir)
+
+    @property
+    def user_cache_path(self) -> Path:
+        """:return: cache path tied to the user"""
+        return Path(self.user_cache_dir)
+
+    @property
+    def site_cache_path(self) -> Path:
+        """:return: cache path shared by users"""
+        return Path(self.site_cache_dir)
+
+    @property
+    def user_state_path(self) -> Path:
+        """:return: state path tied to the user"""
+        return Path(self.user_state_dir)
+
+    @property
+    def user_log_path(self) -> Path:
+        """:return: log path tied to the user"""
+        return Path(self.user_log_dir)
+
+    @property
+    def user_documents_path(self) -> Path:
+        """:return: documents path tied to the user"""
+        return Path(self.user_documents_dir)
+
+    @property
+    def user_downloads_path(self) -> Path:
+        """:return: downloads path tied to the user"""
+        return Path(self.user_downloads_dir)
+
+    @property
+    def user_pictures_path(self) -> Path:
+        """:return: pictures path tied to the user"""
+        return Path(self.user_pictures_dir)
+
+    @property
+    def user_videos_path(self) -> Path:
+        """:return: videos path tied to the user"""
+        return Path(self.user_videos_dir)
+
+    @property
+    def user_music_path(self) -> Path:
+        """:return: music path tied to the user"""
+        return Path(self.user_music_dir)
+
+    @property
+    def user_runtime_path(self) -> Path:
+        """:return: runtime path tied to the user"""
+        return Path(self.user_runtime_dir)
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/macos.py b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/macos.py
new file mode 100644
index 0000000..a753e2a
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/macos.py
@@ -0,0 +1,91 @@
+"""macOS."""
+from __future__ import annotations
+
+import os.path
+
+from .api import PlatformDirsABC
+
+
+class MacOS(PlatformDirsABC):
+    """
+    Platform directories for the macOS operating system. Follows the guidance from `Apple documentation
+    `_.
+    Makes use of the `appname `,
+    `version `,
+    `ensure_exists `.
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """:return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support"))  # noqa: PTH111
+
+    @property
+    def site_data_dir(self) -> str:
+        """:return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``"""
+        return self._append_app_name_and_version("/Library/Application Support")
+
+    @property
+    def user_config_dir(self) -> str:
+        """:return: config directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users, same as `site_data_dir`"""
+        return self.site_data_dir
+
+    @property
+    def user_cache_dir(self) -> str:
+        """:return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))  # noqa: PTH111
+
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``"""
+        return self._append_app_name_and_version("/Library/Caches")
+
+    @property
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs"))  # noqa: PTH111
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user, e.g. ``~/Documents``"""
+        return os.path.expanduser("~/Documents")  # noqa: PTH111
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
+        return os.path.expanduser("~/Downloads")  # noqa: PTH111
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
+        return os.path.expanduser("~/Pictures")  # noqa: PTH111
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user, e.g. ``~/Movies``"""
+        return os.path.expanduser("~/Movies")  # noqa: PTH111
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user, e.g. ``~/Music``"""
+        return os.path.expanduser("~/Music")  # noqa: PTH111
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """:return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems"))  # noqa: PTH111
+
+
+__all__ = [
+    "MacOS",
+]
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/unix.py b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/unix.py
new file mode 100644
index 0000000..468b0ab
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/unix.py
@@ -0,0 +1,223 @@
+"""Unix."""
+from __future__ import annotations
+
+import os
+import sys
+from configparser import ConfigParser
+from pathlib import Path
+
+from .api import PlatformDirsABC
+
+if sys.platform == "win32":
+
+    def getuid() -> int:
+        msg = "should only be used on Unix"
+        raise RuntimeError(msg)
+
+else:
+    from os import getuid
+
+
+class Unix(PlatformDirsABC):
+    """
+    On Unix/Linux, we follow the
+    `XDG Basedir Spec `_. The spec allows
+    overriding directories with environment variables. The examples show are the default values, alongside the name of
+    the environment variable that overrides them. Makes use of the
+    `appname `,
+    `version `,
+    `multipath `,
+    `opinion `,
+    `ensure_exists `.
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """
+        :return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or
+         ``$XDG_DATA_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_DATA_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.local/share")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_data_dir(self) -> str:
+        """
+        :return: data directories shared by users (if `multipath ` is
+         enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
+         path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
+        """
+        # XDG default for $XDG_DATA_DIRS; only first, if multipath is False
+        path = os.environ.get("XDG_DATA_DIRS", "")
+        if not path.strip():
+            path = f"/usr/local/share{os.pathsep}/usr/share"
+        return self._with_multi_path(path)
+
+    def _with_multi_path(self, path: str) -> str:
+        path_list = path.split(os.pathsep)
+        if not self.multipath:
+            path_list = path_list[0:1]
+        path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list]  # noqa: PTH111
+        return os.pathsep.join(path_list)
+
+    @property
+    def user_config_dir(self) -> str:
+        """
+        :return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or
+         ``$XDG_CONFIG_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_CONFIG_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.config")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_config_dir(self) -> str:
+        """
+        :return: config directories shared by users (if `multipath `
+         is enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
+         path separator), e.g. ``/etc/xdg/$appname/$version``
+        """
+        # XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
+        path = os.environ.get("XDG_CONFIG_DIRS", "")
+        if not path.strip():
+            path = "/etc/xdg"
+        return self._with_multi_path(path)
+
+    @property
+    def user_cache_dir(self) -> str:
+        """
+        :return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or
+         ``~/$XDG_CACHE_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_CACHE_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.cache")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, e.g. ``/var/tmp/$appname/$version``"""
+        return self._append_app_name_and_version("/var/tmp")  # noqa: S108
+
+    @property
+    def user_state_dir(self) -> str:
+        """
+        :return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
+         ``$XDG_STATE_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_STATE_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.local/state")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it"""
+        path = self.user_state_dir
+        if self.opinion:
+            path = os.path.join(path, "log")  # noqa: PTH118
+        return path
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user, e.g. ``~/Documents``"""
+        return _get_user_media_dir("XDG_DOCUMENTS_DIR", "~/Documents")
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
+        return _get_user_media_dir("XDG_DOWNLOAD_DIR", "~/Downloads")
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
+        return _get_user_media_dir("XDG_PICTURES_DIR", "~/Pictures")
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user, e.g. ``~/Videos``"""
+        return _get_user_media_dir("XDG_VIDEOS_DIR", "~/Videos")
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user, e.g. ``~/Music``"""
+        return _get_user_media_dir("XDG_MUSIC_DIR", "~/Music")
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """
+        :return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
+         ``$XDG_RUNTIME_DIR/$appname/$version``.
+
+         For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/user/$(id -u)/$appname/$version`` if
+         exists, otherwise ``/tmp/runtime-$(id -u)/$appname/$version``, if``$XDG_RUNTIME_DIR``
+         is not set.
+        """
+        path = os.environ.get("XDG_RUNTIME_DIR", "")
+        if not path.strip():
+            if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
+                path = f"/var/run/user/{getuid()}"
+                if not Path(path).exists():
+                    path = f"/tmp/runtime-{getuid()}"  # noqa: S108
+            else:
+                path = f"/run/user/{getuid()}"
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_data_path(self) -> Path:
+        """:return: data path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
+        return self._first_item_as_path_if_multipath(self.site_data_dir)
+
+    @property
+    def site_config_path(self) -> Path:
+        """:return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``"""
+        return self._first_item_as_path_if_multipath(self.site_config_dir)
+
+    @property
+    def site_cache_path(self) -> Path:
+        """:return: cache path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
+        return self._first_item_as_path_if_multipath(self.site_cache_dir)
+
+    def _first_item_as_path_if_multipath(self, directory: str) -> Path:
+        if self.multipath:
+            # If multipath is True, the first path is returned.
+            directory = directory.split(os.pathsep)[0]
+        return Path(directory)
+
+
+def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str:
+    media_dir = _get_user_dirs_folder(env_var)
+    if media_dir is None:
+        media_dir = os.environ.get(env_var, "").strip()
+        if not media_dir:
+            media_dir = os.path.expanduser(fallback_tilde_path)  # noqa: PTH111
+
+    return media_dir
+
+
+def _get_user_dirs_folder(key: str) -> str | None:
+    """Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/."""
+    user_dirs_config_path = Path(Unix().user_config_dir) / "user-dirs.dirs"
+    if user_dirs_config_path.exists():
+        parser = ConfigParser()
+
+        with user_dirs_config_path.open() as stream:
+            # Add fake section header, so ConfigParser doesn't complain
+            parser.read_string(f"[top]\n{stream.read()}")
+
+        if key not in parser["top"]:
+            return None
+
+        path = parser["top"][key].strip('"')
+        # Handle relative home paths
+        return path.replace("$HOME", os.path.expanduser("~"))  # noqa: PTH111
+
+    return None
+
+
+__all__ = [
+    "Unix",
+]
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/version.py b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/version.py
new file mode 100644
index 0000000..dc8c44c
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/version.py
@@ -0,0 +1,4 @@
+# file generated by setuptools_scm
+# don't change, don't track in version control
+__version__ = version = '3.8.1'
+__version_tuple__ = version_tuple = (3, 8, 1)
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/windows.py b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/windows.py
new file mode 100644
index 0000000..b52c9c6
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/platformdirs/windows.py
@@ -0,0 +1,255 @@
+"""Windows."""
+from __future__ import annotations
+
+import ctypes
+import os
+import sys
+from functools import lru_cache
+from typing import TYPE_CHECKING
+
+from .api import PlatformDirsABC
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+
+class Windows(PlatformDirsABC):
+    """
+    `MSDN on where to store app data files
+    `_.
+    Makes use of the
+    `appname `,
+    `appauthor `,
+    `version `,
+    `roaming `,
+    `opinion `,
+    `ensure_exists `.
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """
+        :return: data directory tied to the user, e.g.
+         ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or
+         ``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming)
+        """
+        const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA"
+        path = os.path.normpath(get_win_folder(const))
+        return self._append_parts(path)
+
+    def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
+        params = []
+        if self.appname:
+            if self.appauthor is not False:
+                author = self.appauthor or self.appname
+                params.append(author)
+            params.append(self.appname)
+            if opinion_value is not None and self.opinion:
+                params.append(opinion_value)
+            if self.version:
+                params.append(self.version)
+        path = os.path.join(path, *params)  # noqa: PTH118
+        self._optionally_create_directory(path)
+        return path
+
+    @property
+    def site_data_dir(self) -> str:
+        """:return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``"""
+        path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
+        return self._append_parts(path)
+
+    @property
+    def user_config_dir(self) -> str:
+        """:return: config directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users, same as `site_data_dir`"""
+        return self.site_data_dir
+
+    @property
+    def user_cache_dir(self) -> str:
+        """
+        :return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g.
+         ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version``
+        """
+        path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
+        return self._append_parts(path, opinion_value="Cache")
+
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname\\Cache\\$version``"""
+        path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
+        return self._append_parts(path, opinion_value="Cache")
+
+    @property
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it"""
+        path = self.user_data_dir
+        if self.opinion:
+            path = os.path.join(path, "Logs")  # noqa: PTH118
+            self._optionally_create_directory(path)
+        return path
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``"""
+        return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user e.g. ``%USERPROFILE%\\Downloads``"""
+        return os.path.normpath(get_win_folder("CSIDL_DOWNLOADS"))
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user e.g. ``%USERPROFILE%\\Pictures``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYPICTURES"))
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user e.g. ``%USERPROFILE%\\Videos``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYVIDEO"))
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user e.g. ``%USERPROFILE%\\Music``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYMUSIC"))
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """
+        :return: runtime directory tied to the user, e.g.
+         ``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
+        """
+        path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp"))  # noqa: PTH118
+        return self._append_parts(path)
+
+
+def get_win_folder_from_env_vars(csidl_name: str) -> str:
+    """Get folder from environment variables."""
+    result = get_win_folder_if_csidl_name_not_env_var(csidl_name)
+    if result is not None:
+        return result
+
+    env_var_name = {
+        "CSIDL_APPDATA": "APPDATA",
+        "CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE",
+        "CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
+    }.get(csidl_name)
+    if env_var_name is None:
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
+    result = os.environ.get(env_var_name)
+    if result is None:
+        msg = f"Unset environment variable: {env_var_name}"
+        raise ValueError(msg)
+    return result
+
+
+def get_win_folder_if_csidl_name_not_env_var(csidl_name: str) -> str | None:
+    """Get folder for a CSIDL name that does not exist as an environment variable."""
+    if csidl_name == "CSIDL_PERSONAL":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_DOWNLOADS":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Downloads")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYPICTURES":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Pictures")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYVIDEO":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Videos")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYMUSIC":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Music")  # noqa: PTH118
+    return None
+
+
+def get_win_folder_from_registry(csidl_name: str) -> str:
+    """
+    Get folder from the registry.
+
+    This is a fallback technique at best. I'm not sure if using the registry for these guarantees us the correct answer
+    for all CSIDL_* names.
+    """
+    shell_folder_name = {
+        "CSIDL_APPDATA": "AppData",
+        "CSIDL_COMMON_APPDATA": "Common AppData",
+        "CSIDL_LOCAL_APPDATA": "Local AppData",
+        "CSIDL_PERSONAL": "Personal",
+        "CSIDL_DOWNLOADS": "{374DE290-123F-4565-9164-39C4925E467B}",
+        "CSIDL_MYPICTURES": "My Pictures",
+        "CSIDL_MYVIDEO": "My Video",
+        "CSIDL_MYMUSIC": "My Music",
+    }.get(csidl_name)
+    if shell_folder_name is None:
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
+    if sys.platform != "win32":  # only needed for mypy type checker to know that this code runs only on Windows
+        raise NotImplementedError
+    import winreg
+
+    key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
+    directory, _ = winreg.QueryValueEx(key, shell_folder_name)
+    return str(directory)
+
+
+def get_win_folder_via_ctypes(csidl_name: str) -> str:
+    """Get folder with ctypes."""
+    # There is no 'CSIDL_DOWNLOADS'.
+    # Use 'CSIDL_PROFILE' (40) and append the default folder 'Downloads' instead.
+    # https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid
+
+    csidl_const = {
+        "CSIDL_APPDATA": 26,
+        "CSIDL_COMMON_APPDATA": 35,
+        "CSIDL_LOCAL_APPDATA": 28,
+        "CSIDL_PERSONAL": 5,
+        "CSIDL_MYPICTURES": 39,
+        "CSIDL_MYVIDEO": 14,
+        "CSIDL_MYMUSIC": 13,
+        "CSIDL_DOWNLOADS": 40,
+    }.get(csidl_name)
+    if csidl_const is None:
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
+
+    buf = ctypes.create_unicode_buffer(1024)
+    windll = getattr(ctypes, "windll")  # noqa: B009 # using getattr to avoid false positive with mypy type checker
+    windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+    # Downgrade to short path name if it has highbit chars.
+    if any(ord(c) > 255 for c in buf):  # noqa: PLR2004
+        buf2 = ctypes.create_unicode_buffer(1024)
+        if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+            buf = buf2
+
+    if csidl_name == "CSIDL_DOWNLOADS":
+        return os.path.join(buf.value, "Downloads")  # noqa: PTH118
+
+    return buf.value
+
+
+def _pick_get_win_folder() -> Callable[[str], str]:
+    if hasattr(ctypes, "windll"):
+        return get_win_folder_via_ctypes
+    try:
+        import winreg  # noqa: F401
+    except ImportError:
+        return get_win_folder_from_env_vars
+    else:
+        return get_win_folder_from_registry
+
+
+get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder())
+
+__all__ = [
+    "Windows",
+]
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__init__.py
new file mode 100644
index 0000000..39c84aa
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__init__.py
@@ -0,0 +1,82 @@
+"""
+    Pygments
+    ~~~~~~~~
+
+    Pygments is a syntax highlighting package written in Python.
+
+    It is a generic syntax highlighter for general use in all kinds of software
+    such as forum systems, wikis or other applications that need to prettify
+    source code. Highlights are:
+
+    * a wide range of common languages and markup formats is supported
+    * special attention is paid to details, increasing quality by a fair amount
+    * support for new languages and formats are added easily
+    * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
+      formats that PIL supports, and ANSI sequences
+    * it is usable as a command-line tool and as a library
+    * ... and it highlights even Brainfuck!
+
+    The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
+
+    .. _Pygments master branch:
+       https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+from io import StringIO, BytesIO
+
+__version__ = '2.15.1'
+__docformat__ = 'restructuredtext'
+
+__all__ = ['lex', 'format', 'highlight']
+
+
+def lex(code, lexer):
+    """
+    Lex `code` with the `lexer` (must be a `Lexer` instance)
+    and return an iterable of tokens. Currently, this only calls
+    `lexer.get_tokens()`.
+    """
+    try:
+        return lexer.get_tokens(code)
+    except TypeError:
+        # Heuristic to catch a common mistake.
+        from pip._vendor.pygments.lexer import RegexLexer
+        if isinstance(lexer, type) and issubclass(lexer, RegexLexer):
+            raise TypeError('lex() argument must be a lexer instance, '
+                            'not a class')
+        raise
+
+
+def format(tokens, formatter, outfile=None):  # pylint: disable=redefined-builtin
+    """
+    Format ``tokens`` (an iterable of tokens) with the formatter ``formatter``
+    (a `Formatter` instance).
+
+    If ``outfile`` is given and a valid file object (an object with a
+    ``write`` method), the result will be written to it, otherwise it
+    is returned as a string.
+    """
+    try:
+        if not outfile:
+            realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
+            formatter.format(tokens, realoutfile)
+            return realoutfile.getvalue()
+        else:
+            formatter.format(tokens, outfile)
+    except TypeError:
+        # Heuristic to catch a common mistake.
+        from pip._vendor.pygments.formatter import Formatter
+        if isinstance(formatter, type) and issubclass(formatter, Formatter):
+            raise TypeError('format() argument must be a formatter instance, '
+                            'not a class')
+        raise
+
+
+def highlight(code, lexer, formatter, outfile=None):
+    """
+    This is the most high-level highlighting function. It combines `lex` and
+    `format` in one function.
+    """
+    return format(lex(code, lexer), formatter, outfile)
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__main__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__main__.py
new file mode 100644
index 0000000..2f7f8cb
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__main__.py
@@ -0,0 +1,17 @@
+"""
+    pygments.__main__
+    ~~~~~~~~~~~~~~~~~
+
+    Main entry point for ``python -m pygments``.
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import sys
+from pip._vendor.pygments.cmdline import main
+
+try:
+    sys.exit(main(sys.argv))
+except KeyboardInterrupt:
+    sys.exit(1)
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 0000000..26be996
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc
new file mode 100644
index 0000000..11ddd8c
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/cmdline.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/cmdline.cpython-312.pyc
new file mode 100644
index 0000000..fd77479
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/cmdline.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-312.pyc
new file mode 100644
index 0000000..22f7e80
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc
new file mode 100644
index 0000000..1ae6bde
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc
new file mode 100644
index 0000000..caf06cf
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc
new file mode 100644
index 0000000..a25c102
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc
new file mode 100644
index 0000000..3dc6443
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc
new file mode 100644
index 0000000..215aa54
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc
new file mode 100644
index 0000000..84dc87f
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc
new file mode 100644
index 0000000..984e5d8
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc
new file mode 100644
index 0000000..aa16ab6
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-312.pyc
new file mode 100644
index 0000000..7ed7d18
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-312.pyc
new file mode 100644
index 0000000..8ac90cb
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc
new file mode 100644
index 0000000..5510708
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-312.pyc
new file mode 100644
index 0000000..2eeb604
Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-312.pyc differ
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/cmdline.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/cmdline.py
new file mode 100644
index 0000000..eec1775
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/cmdline.py
@@ -0,0 +1,668 @@
+"""
+    pygments.cmdline
+    ~~~~~~~~~~~~~~~~
+
+    Command line interface.
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import os
+import sys
+import shutil
+import argparse
+from textwrap import dedent
+
+from pip._vendor.pygments import __version__, highlight
+from pip._vendor.pygments.util import ClassNotFound, OptionError, docstring_headline, \
+    guess_decode, guess_decode_from_terminal, terminal_encoding, \
+    UnclosingTextIOWrapper
+from pip._vendor.pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
+    load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename
+from pip._vendor.pygments.lexers.special import TextLexer
+from pip._vendor.pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
+from pip._vendor.pygments.formatters import get_all_formatters, get_formatter_by_name, \
+    load_formatter_from_file, get_formatter_for_filename, find_formatter_class
+from pip._vendor.pygments.formatters.terminal import TerminalFormatter
+from pip._vendor.pygments.formatters.terminal256 import Terminal256Formatter, TerminalTrueColorFormatter
+from pip._vendor.pygments.filters import get_all_filters, find_filter_class
+from pip._vendor.pygments.styles import get_all_styles, get_style_by_name
+
+
+def _parse_options(o_strs):
+    opts = {}
+    if not o_strs:
+        return opts
+    for o_str in o_strs:
+        if not o_str.strip():
+            continue
+        o_args = o_str.split(',')
+        for o_arg in o_args:
+            o_arg = o_arg.strip()
+            try:
+                o_key, o_val = o_arg.split('=', 1)
+                o_key = o_key.strip()
+                o_val = o_val.strip()
+            except ValueError:
+                opts[o_arg] = True
+            else:
+                opts[o_key] = o_val
+    return opts
+
+
+def _parse_filters(f_strs):
+    filters = []
+    if not f_strs:
+        return filters
+    for f_str in f_strs:
+        if ':' in f_str:
+            fname, fopts = f_str.split(':', 1)
+            filters.append((fname, _parse_options([fopts])))
+        else:
+            filters.append((f_str, {}))
+    return filters
+
+
+def _print_help(what, name):
+    try:
+        if what == 'lexer':
+            cls = get_lexer_by_name(name)
+            print("Help on the %s lexer:" % cls.name)
+            print(dedent(cls.__doc__))
+        elif what == 'formatter':
+            cls = find_formatter_class(name)
+            print("Help on the %s formatter:" % cls.name)
+            print(dedent(cls.__doc__))
+        elif what == 'filter':
+            cls = find_filter_class(name)
+            print("Help on the %s filter:" % name)
+            print(dedent(cls.__doc__))
+        return 0
+    except (AttributeError, ValueError):
+        print("%s not found!" % what, file=sys.stderr)
+        return 1
+
+
+def _print_list(what):
+    if what == 'lexer':
+        print()
+        print("Lexers:")
+        print("~~~~~~~")
+
+        info = []
+        for fullname, names, exts, _ in get_all_lexers():
+            tup = (', '.join(names)+':', fullname,
+                   exts and '(filenames ' + ', '.join(exts) + ')' or '')
+            info.append(tup)
+        info.sort()
+        for i in info:
+            print(('* %s\n    %s %s') % i)
+
+    elif what == 'formatter':
+        print()
+        print("Formatters:")
+        print("~~~~~~~~~~~")
+
+        info = []
+        for cls in get_all_formatters():
+            doc = docstring_headline(cls)
+            tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
+                   '(filenames ' + ', '.join(cls.filenames) + ')' or '')
+            info.append(tup)
+        info.sort()
+        for i in info:
+            print(('* %s\n    %s %s') % i)
+
+    elif what == 'filter':
+        print()
+        print("Filters:")
+        print("~~~~~~~~")
+
+        for name in get_all_filters():
+            cls = find_filter_class(name)
+            print("* " + name + ':')
+            print("    %s" % docstring_headline(cls))
+
+    elif what == 'style':
+        print()
+        print("Styles:")
+        print("~~~~~~~")
+
+        for name in get_all_styles():
+            cls = get_style_by_name(name)
+            print("* " + name + ':')
+            print("    %s" % docstring_headline(cls))
+
+
+def _print_list_as_json(requested_items):
+    import json
+    result = {}
+    if 'lexer' in requested_items:
+        info = {}
+        for fullname, names, filenames, mimetypes in get_all_lexers():
+            info[fullname] = {
+                'aliases': names,
+                'filenames': filenames,
+                'mimetypes': mimetypes
+            }
+        result['lexers'] = info
+
+    if 'formatter' in requested_items:
+        info = {}
+        for cls in get_all_formatters():
+            doc = docstring_headline(cls)
+            info[cls.name] = {
+                'aliases': cls.aliases,
+                'filenames': cls.filenames,
+                'doc': doc
+            }
+        result['formatters'] = info
+
+    if 'filter' in requested_items:
+        info = {}
+        for name in get_all_filters():
+            cls = find_filter_class(name)
+            info[name] = {
+                'doc': docstring_headline(cls)
+            }
+        result['filters'] = info
+
+    if 'style' in requested_items:
+        info = {}
+        for name in get_all_styles():
+            cls = get_style_by_name(name)
+            info[name] = {
+                'doc': docstring_headline(cls)
+            }
+        result['styles'] = info
+
+    json.dump(result, sys.stdout)
+
+def main_inner(parser, argns):
+    if argns.help:
+        parser.print_help()
+        return 0
+
+    if argns.V:
+        print('Pygments version %s, (c) 2006-2023 by Georg Brandl, Matthäus '
+              'Chajdas and contributors.' % __version__)
+        return 0
+
+    def is_only_option(opt):
+        return not any(v for (k, v) in vars(argns).items() if k != opt)
+
+    # handle ``pygmentize -L``
+    if argns.L is not None:
+        arg_set = set()
+        for k, v in vars(argns).items():
+            if v:
+                arg_set.add(k)
+
+        arg_set.discard('L')
+        arg_set.discard('json')
+
+        if arg_set:
+            parser.print_help(sys.stderr)
+            return 2
+
+        # print version
+        if not argns.json:
+            main(['', '-V'])
+        allowed_types = {'lexer', 'formatter', 'filter', 'style'}
+        largs = [arg.rstrip('s') for arg in argns.L]
+        if any(arg not in allowed_types for arg in largs):
+            parser.print_help(sys.stderr)
+            return 0
+        if not largs:
+            largs = allowed_types
+        if not argns.json:
+            for arg in largs:
+                _print_list(arg)
+        else:
+            _print_list_as_json(largs)
+        return 0
+
+    # handle ``pygmentize -H``
+    if argns.H:
+        if not is_only_option('H'):
+            parser.print_help(sys.stderr)
+            return 2
+        what, name = argns.H
+        if what not in ('lexer', 'formatter', 'filter'):
+            parser.print_help(sys.stderr)
+            return 2
+        return _print_help(what, name)
+
+    # parse -O options
+    parsed_opts = _parse_options(argns.O or [])
+
+    # parse -P options
+    for p_opt in argns.P or []:
+        try:
+            name, value = p_opt.split('=', 1)
+        except ValueError:
+            parsed_opts[p_opt] = True
+        else:
+            parsed_opts[name] = value
+
+    # encodings
+    inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
+    outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
+
+    # handle ``pygmentize -N``
+    if argns.N:
+        lexer = find_lexer_class_for_filename(argns.N)
+        if lexer is None:
+            lexer = TextLexer
+
+        print(lexer.aliases[0])
+        return 0
+
+    # handle ``pygmentize -C``
+    if argns.C:
+        inp = sys.stdin.buffer.read()
+        try:
+            lexer = guess_lexer(inp, inencoding=inencoding)
+        except ClassNotFound:
+            lexer = TextLexer
+
+        print(lexer.aliases[0])
+        return 0
+
+    # handle ``pygmentize -S``
+    S_opt = argns.S
+    a_opt = argns.a
+    if S_opt is not None:
+        f_opt = argns.f
+        if not f_opt:
+            parser.print_help(sys.stderr)
+            return 2
+        if argns.l or argns.INPUTFILE:
+            parser.print_help(sys.stderr)
+            return 2
+
+        try:
+            parsed_opts['style'] = S_opt
+            fmter = get_formatter_by_name(f_opt, **parsed_opts)
+        except ClassNotFound as err:
+            print(err, file=sys.stderr)
+            return 1
+
+        print(fmter.get_style_defs(a_opt or ''))
+        return 0
+
+    # if no -S is given, -a is not allowed
+    if argns.a is not None:
+        parser.print_help(sys.stderr)
+        return 2
+
+    # parse -F options
+    F_opts = _parse_filters(argns.F or [])
+
+    # -x: allow custom (eXternal) lexers and formatters
+    allow_custom_lexer_formatter = bool(argns.x)
+
+    # select lexer
+    lexer = None
+
+    # given by name?
+    lexername = argns.l
+    if lexername:
+        # custom lexer, located relative to user's cwd
+        if allow_custom_lexer_formatter and '.py' in lexername:
+            try:
+                filename = None
+                name = None
+                if ':' in lexername:
+                    filename, name = lexername.rsplit(':', 1)
+
+                    if '.py' in name:
+                        # This can happen on Windows: If the lexername is
+                        # C:\lexer.py -- return to normal load path in that case
+                        name = None
+
+                if filename and name:
+                    lexer = load_lexer_from_file(filename, name,
+                                                 **parsed_opts)
+                else:
+                    lexer = load_lexer_from_file(lexername, **parsed_opts)
+            except ClassNotFound as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+        else:
+            try:
+                lexer = get_lexer_by_name(lexername, **parsed_opts)
+            except (OptionError, ClassNotFound) as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+
+    # read input code
+    code = None
+
+    if argns.INPUTFILE:
+        if argns.s:
+            print('Error: -s option not usable when input file specified',
+                  file=sys.stderr)
+            return 2
+
+        infn = argns.INPUTFILE
+        try:
+            with open(infn, 'rb') as infp:
+                code = infp.read()
+        except Exception as err:
+            print('Error: cannot read infile:', err, file=sys.stderr)
+            return 1
+        if not inencoding:
+            code, inencoding = guess_decode(code)
+
+        # do we have to guess the lexer?
+        if not lexer:
+            try:
+                lexer = get_lexer_for_filename(infn, code, **parsed_opts)
+            except ClassNotFound as err:
+                if argns.g:
+                    try:
+                        lexer = guess_lexer(code, **parsed_opts)
+                    except ClassNotFound:
+                        lexer = TextLexer(**parsed_opts)
+                else:
+                    print('Error:', err, file=sys.stderr)
+                    return 1
+            except OptionError as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+
+    elif not argns.s:  # treat stdin as full file (-s support is later)
+        # read code from terminal, always in binary mode since we want to
+        # decode ourselves and be tolerant with it
+        code = sys.stdin.buffer.read()  # use .buffer to get a binary stream
+        if not inencoding:
+            code, inencoding = guess_decode_from_terminal(code, sys.stdin)
+            # else the lexer will do the decoding
+        if not lexer:
+            try:
+                lexer = guess_lexer(code, **parsed_opts)
+            except ClassNotFound:
+                lexer = TextLexer(**parsed_opts)
+
+    else:  # -s option needs a lexer with -l
+        if not lexer:
+            print('Error: when using -s a lexer has to be selected with -l',
+                  file=sys.stderr)
+            return 2
+
+    # process filters
+    for fname, fopts in F_opts:
+        try:
+            lexer.add_filter(fname, **fopts)
+        except ClassNotFound as err:
+            print('Error:', err, file=sys.stderr)
+            return 1
+
+    # select formatter
+    outfn = argns.o
+    fmter = argns.f
+    if fmter:
+        # custom formatter, located relative to user's cwd
+        if allow_custom_lexer_formatter and '.py' in fmter:
+            try:
+                filename = None
+                name = None
+                if ':' in fmter:
+                    # Same logic as above for custom lexer
+                    filename, name = fmter.rsplit(':', 1)
+
+                    if '.py' in name:
+                        name = None
+
+                if filename and name:
+                    fmter = load_formatter_from_file(filename, name,
+                                                     **parsed_opts)
+                else:
+                    fmter = load_formatter_from_file(fmter, **parsed_opts)
+            except ClassNotFound as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+        else:
+            try:
+                fmter = get_formatter_by_name(fmter, **parsed_opts)
+            except (OptionError, ClassNotFound) as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+
+    if outfn:
+        if not fmter:
+            try:
+                fmter = get_formatter_for_filename(outfn, **parsed_opts)
+            except (OptionError, ClassNotFound) as err:
+                print('Error:', err, file=sys.stderr)
+                return 1
+        try:
+            outfile = open(outfn, 'wb')
+        except Exception as err:
+            print('Error: cannot open outfile:', err, file=sys.stderr)
+            return 1
+    else:
+        if not fmter:
+            if os.environ.get('COLORTERM','') in ('truecolor', '24bit'):
+                fmter = TerminalTrueColorFormatter(**parsed_opts)
+            elif '256' in os.environ.get('TERM', ''):
+                fmter = Terminal256Formatter(**parsed_opts)
+            else:
+                fmter = TerminalFormatter(**parsed_opts)
+        outfile = sys.stdout.buffer
+
+    # determine output encoding if not explicitly selected
+    if not outencoding:
+        if outfn:
+            # output file? use lexer encoding for now (can still be None)
+            fmter.encoding = inencoding
+        else:
+            # else use terminal encoding
+            fmter.encoding = terminal_encoding(sys.stdout)
+
+    # provide coloring under Windows, if possible
+    if not outfn and sys.platform in ('win32', 'cygwin') and \
+       fmter.name in ('Terminal', 'Terminal256'):  # pragma: no cover
+        # unfortunately colorama doesn't support binary streams on Py3
+        outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
+        fmter.encoding = None
+        try:
+            import pip._vendor.colorama.initialise as colorama_initialise
+        except ImportError:
+            pass
+        else:
+            outfile = colorama_initialise.wrap_stream(
+                outfile, convert=None, strip=None, autoreset=False, wrap=True)
+
+    # When using the LaTeX formatter and the option `escapeinside` is
+    # specified, we need a special lexer which collects escaped text
+    # before running the chosen language lexer.
+    escapeinside = parsed_opts.get('escapeinside', '')
+    if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
+        left = escapeinside[0]
+        right = escapeinside[1]
+        lexer = LatexEmbeddedLexer(left, right, lexer)
+
+    # ... and do it!
+    if not argns.s:
+        # process whole input as per normal...
+        try:
+            highlight(code, lexer, fmter, outfile)
+        finally:
+            if outfn:
+                outfile.close()
+        return 0
+    else:
+        # line by line processing of stdin (eg: for 'tail -f')...
+        try:
+            while 1:
+                line = sys.stdin.buffer.readline()
+                if not line:
+                    break
+                if not inencoding:
+                    line = guess_decode_from_terminal(line, sys.stdin)[0]
+                highlight(line, lexer, fmter, outfile)
+                if hasattr(outfile, 'flush'):
+                    outfile.flush()
+            return 0
+        except KeyboardInterrupt:  # pragma: no cover
+            return 0
+        finally:
+            if outfn:
+                outfile.close()
+
+
+class HelpFormatter(argparse.HelpFormatter):
+    def __init__(self, prog, indent_increment=2, max_help_position=16, width=None):
+        if width is None:
+            try:
+                width = shutil.get_terminal_size().columns - 2
+            except Exception:
+                pass
+        argparse.HelpFormatter.__init__(self, prog, indent_increment,
+                                        max_help_position, width)
+
+
+def main(args=sys.argv):
+    """
+    Main command line entry point.
+    """
+    desc = "Highlight an input file and write the result to an output file."
+    parser = argparse.ArgumentParser(description=desc, add_help=False,
+                                     formatter_class=HelpFormatter)
+
+    operation = parser.add_argument_group('Main operation')
+    lexersel = operation.add_mutually_exclusive_group()
+    lexersel.add_argument(
+        '-l', metavar='LEXER',
+        help='Specify the lexer to use.  (Query names with -L.)  If not '
+        'given and -g is not present, the lexer is guessed from the filename.')
+    lexersel.add_argument(
+        '-g', action='store_true',
+        help='Guess the lexer from the file contents, or pass through '
+        'as plain text if nothing can be guessed.')
+    operation.add_argument(
+        '-F', metavar='FILTER[:options]', action='append',
+        help='Add a filter to the token stream.  (Query names with -L.) '
+        'Filter options are given after a colon if necessary.')
+    operation.add_argument(
+        '-f', metavar='FORMATTER',
+        help='Specify the formatter to use.  (Query names with -L.) '
+        'If not given, the formatter is guessed from the output filename, '
+        'and defaults to the terminal formatter if the output is to the '
+        'terminal or an unknown file extension.')
+    operation.add_argument(
+        '-O', metavar='OPTION=value[,OPTION=value,...]', action='append',
+        help='Give options to the lexer and formatter as a comma-separated '
+        'list of key-value pairs. '
+        'Example: `-O bg=light,python=cool`.')
+    operation.add_argument(
+        '-P', metavar='OPTION=value', action='append',
+        help='Give a single option to the lexer and formatter - with this '
+        'you can pass options whose value contains commas and equal signs. '
+        'Example: `-P "heading=Pygments, the Python highlighter"`.')
+    operation.add_argument(
+        '-o', metavar='OUTPUTFILE',
+        help='Where to write the output.  Defaults to standard output.')
+
+    operation.add_argument(
+        'INPUTFILE', nargs='?',
+        help='Where to read the input.  Defaults to standard input.')
+
+    flags = parser.add_argument_group('Operation flags')
+    flags.add_argument(
+        '-v', action='store_true',
+        help='Print a detailed traceback on unhandled exceptions, which '
+        'is useful for debugging and bug reports.')
+    flags.add_argument(
+        '-s', action='store_true',
+        help='Process lines one at a time until EOF, rather than waiting to '
+        'process the entire file.  This only works for stdin, only for lexers '
+        'with no line-spanning constructs, and is intended for streaming '
+        'input such as you get from `tail -f`. '
+        'Example usage: `tail -f sql.log | pygmentize -s -l sql`.')
+    flags.add_argument(
+        '-x', action='store_true',
+        help='Allow custom lexers and formatters to be loaded from a .py file '
+        'relative to the current working directory. For example, '
+        '`-l ./customlexer.py -x`. By default, this option expects a file '
+        'with a class named CustomLexer or CustomFormatter; you can also '
+        'specify your own class name with a colon (`-l ./lexer.py:MyLexer`). '
+        'Users should be very careful not to use this option with untrusted '
+        'files, because it will import and run them.')
+    flags.add_argument('--json', help='Output as JSON. This can '
+        'be only used in conjunction with -L.',
+        default=False,
+        action='store_true')
+
+    special_modes_group = parser.add_argument_group(
+        'Special modes - do not do any highlighting')
+    special_modes = special_modes_group.add_mutually_exclusive_group()
+    special_modes.add_argument(
+        '-S', metavar='STYLE -f formatter',
+        help='Print style definitions for STYLE for a formatter '
+        'given with -f. The argument given by -a is formatter '
+        'dependent.')
+    special_modes.add_argument(
+        '-L', nargs='*', metavar='WHAT',
+        help='List lexers, formatters, styles or filters -- '
+        'give additional arguments for the thing(s) you want to list '
+        '(e.g. "styles"), or omit them to list everything.')
+    special_modes.add_argument(
+        '-N', metavar='FILENAME',
+        help='Guess and print out a lexer name based solely on the given '
+        'filename. Does not take input or highlight anything. If no specific '
+        'lexer can be determined, "text" is printed.')
+    special_modes.add_argument(
+        '-C', action='store_true',
+        help='Like -N, but print out a lexer name based solely on '
+        'a given content from standard input.')
+    special_modes.add_argument(
+        '-H', action='store', nargs=2, metavar=('NAME', 'TYPE'),
+        help='Print detailed help for the object  of type , '
+        'where  is one of "lexer", "formatter" or "filter".')
+    special_modes.add_argument(
+        '-V', action='store_true',
+        help='Print the package version.')
+    special_modes.add_argument(
+        '-h', '--help', action='store_true',
+        help='Print this help.')
+    special_modes_group.add_argument(
+        '-a', metavar='ARG',
+        help='Formatter-specific additional argument for the -S (print '
+        'style sheet) mode.')
+
+    argns = parser.parse_args(args[1:])
+
+    try:
+        return main_inner(parser, argns)
+    except BrokenPipeError:
+        # someone closed our stdout, e.g. by quitting a pager.
+        return 0
+    except Exception:
+        if argns.v:
+            print(file=sys.stderr)
+            print('*' * 65, file=sys.stderr)
+            print('An unhandled exception occurred while highlighting.',
+                  file=sys.stderr)
+            print('Please report the whole traceback to the issue tracker at',
+                  file=sys.stderr)
+            print('.',
+                  file=sys.stderr)
+            print('*' * 65, file=sys.stderr)
+            print(file=sys.stderr)
+            raise
+        import traceback
+        info = traceback.format_exception(*sys.exc_info())
+        msg = info[-1].strip()
+        if len(info) >= 3:
+            # extract relevant file and position info
+            msg += '\n   (f%s)' % info[-2].split('\n')[0].strip()[1:]
+        print(file=sys.stderr)
+        print('*** Error while highlighting:', file=sys.stderr)
+        print(msg, file=sys.stderr)
+        print('*** If this is a bug you want to report, please rerun with -v.',
+              file=sys.stderr)
+        return 1
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/console.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/console.py
new file mode 100644
index 0000000..deb4937
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/console.py
@@ -0,0 +1,70 @@
+"""
+    pygments.console
+    ~~~~~~~~~~~~~~~~
+
+    Format colored console output.
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+esc = "\x1b["
+
+codes = {}
+codes[""] = ""
+codes["reset"] = esc + "39;49;00m"
+
+codes["bold"] = esc + "01m"
+codes["faint"] = esc + "02m"
+codes["standout"] = esc + "03m"
+codes["underline"] = esc + "04m"
+codes["blink"] = esc + "05m"
+codes["overline"] = esc + "06m"
+
+dark_colors = ["black", "red", "green", "yellow", "blue",
+               "magenta", "cyan", "gray"]
+light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brightblue",
+                "brightmagenta", "brightcyan", "white"]
+
+x = 30
+for d, l in zip(dark_colors, light_colors):
+    codes[d] = esc + "%im" % x
+    codes[l] = esc + "%im" % (60 + x)
+    x += 1
+
+del d, l, x
+
+codes["white"] = codes["bold"]
+
+
+def reset_color():
+    return codes["reset"]
+
+
+def colorize(color_key, text):
+    return codes[color_key] + text + codes["reset"]
+
+
+def ansiformat(attr, text):
+    """
+    Format ``text`` with a color and/or some attributes::
+
+        color       normal color
+        *color*     bold color
+        _color_     underlined color
+        +color+     blinking color
+    """
+    result = []
+    if attr[:1] == attr[-1:] == '+':
+        result.append(codes['blink'])
+        attr = attr[1:-1]
+    if attr[:1] == attr[-1:] == '*':
+        result.append(codes['bold'])
+        attr = attr[1:-1]
+    if attr[:1] == attr[-1:] == '_':
+        result.append(codes['underline'])
+        attr = attr[1:-1]
+    result.append(codes[attr])
+    result.append(text)
+    result.append(codes['reset'])
+    return ''.join(result)
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/filter.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/filter.py
new file mode 100644
index 0000000..dafa08d
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/filter.py
@@ -0,0 +1,71 @@
+"""
+    pygments.filter
+    ~~~~~~~~~~~~~~~
+
+    Module that implements the default filter.
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+
+def apply_filters(stream, filters, lexer=None):
+    """
+    Use this method to apply an iterable of filters to
+    a stream. If lexer is given it's forwarded to the
+    filter, otherwise the filter receives `None`.
+    """
+    def _apply(filter_, stream):
+        yield from filter_.filter(lexer, stream)
+    for filter_ in filters:
+        stream = _apply(filter_, stream)
+    return stream
+
+
+def simplefilter(f):
+    """
+    Decorator that converts a function into a filter::
+
+        @simplefilter
+        def lowercase(self, lexer, stream, options):
+            for ttype, value in stream:
+                yield ttype, value.lower()
+    """
+    return type(f.__name__, (FunctionFilter,), {
+        '__module__': getattr(f, '__module__'),
+        '__doc__': f.__doc__,
+        'function': f,
+    })
+
+
+class Filter:
+    """
+    Default filter. Subclass this class or use the `simplefilter`
+    decorator to create own filters.
+    """
+
+    def __init__(self, **options):
+        self.options = options
+
+    def filter(self, lexer, stream):
+        raise NotImplementedError()
+
+
+class FunctionFilter(Filter):
+    """
+    Abstract class used by `simplefilter` to create simple
+    function filters on the fly. The `simplefilter` decorator
+    automatically creates subclasses of this class for
+    functions passed to it.
+    """
+    function = None
+
+    def __init__(self, **options):
+        if not hasattr(self, 'function'):
+            raise TypeError('%r used without bound function' %
+                            self.__class__.__name__)
+        Filter.__init__(self, **options)
+
+    def filter(self, lexer, stream):
+        # pylint: disable=not-callable
+        yield from self.function(lexer, stream, self.options)
diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__init__.py
new file mode 100644
index 0000000..5aa9ecb
--- /dev/null
+++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__init__.py
@@ -0,0 +1,940 @@
+"""
+    pygments.filters
+    ~~~~~~~~~~~~~~~~
+
+    Module containing filter lookup functions and default
+    filters.
+
+    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pip._vendor.pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
+    string_to_tokentype
+from pip._vendor.pygments.filter import Filter
+from pip._vendor.pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
+    get_choice_opt, ClassNotFound, OptionError
+from pip._vendor.pygments.plugin import find_plugin_filters
+
+
+def find_filter_class(filtername):
+    """Lookup a filter by name. Return None if not found."""
+    if filtername in FILTERS:
+        return FILTERS[filtername]
+    for name, cls in find_plugin_filters():
+        if name == filtername:
+            return cls
+    return None
+
+
+def get_filter_by_name(filtername, **options):
+    """Return an instantiated filter.
+
+    Options are passed to the filter initializer if wanted.
+    Raise a ClassNotFound if not found.
+    """
+    cls = find_filter_class(filtername)
+    if cls:
+        return cls(**options)
+    else:
+        raise ClassNotFound('filter %r not found' % filtername)
+
+
+def get_all_filters():
+    """Return a generator of all filter names."""
+    yield from FILTERS
+    for name, _ in find_plugin_filters():
+        yield name
+
+
+def _replace_special(ttype, value, regex, specialttype,
+                     replacefunc=lambda x: x):
+    last = 0
+    for match in regex.finditer(value):
+        start, end = match.start(), match.end()
+        if start != last:
+            yield ttype, value[last:start]
+        yield specialttype, replacefunc(value[start:end])
+        last = end
+    if last != len(value):
+        yield ttype, value[last:]
+
+
+class CodeTagFilter(Filter):
+    """Highlight special code tags in comments and docstrings.
+
+    Options accepted:
+
+    `codetags` : list of strings
+       A list of strings that are flagged as code tags.  The default is to
+       highlight ``XXX``, ``TODO``, ``FIXME``, ``BUG`` and ``NOTE``.
+
+    .. versionchanged:: 2.13
+       Now recognizes ``FIXME`` by default.
+    """
+
+    def __init__(self, **options):
+        Filter.__init__(self, **options)
+        tags = get_list_opt(options, 'codetags',
+                            ['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE'])
+        self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
+            re.escape(tag) for tag in tags if tag
+        ]))
+
+    def filter(self, lexer, stream):
+        regex = self.tag_re
+        for ttype, value in stream:
+            if ttype in String.Doc or \
+               ttype in Comment and \
+               ttype not in Comment.Preproc:
+                yield from _replace_special(ttype, value, regex, Comment.Special)
+            else:
+                yield ttype, value
+
+
+class SymbolFilter(Filter):
+    """Convert mathematical symbols such as \\ in Isabelle
+    or \\longrightarrow in LaTeX into Unicode characters.
+
+    This is mostly useful for HTML or console output when you want to
+    approximate the source rendering you'd see in an IDE.
+
+    Options accepted:
+
+    `lang` : string
+       The symbol language. Must be one of ``'isabelle'`` or
+       ``'latex'``.  The default is ``'isabelle'``.
+    """
+
+    latex_symbols = {
+        '\\alpha'                : '\U000003b1',
+        '\\beta'                 : '\U000003b2',
+        '\\gamma'                : '\U000003b3',
+        '\\delta'                : '\U000003b4',
+        '\\varepsilon'           : '\U000003b5',
+        '\\zeta'                 : '\U000003b6',
+        '\\eta'                  : '\U000003b7',
+        '\\vartheta'             : '\U000003b8',
+        '\\iota'                 : '\U000003b9',
+        '\\kappa'                : '\U000003ba',
+        '\\lambda'               : '\U000003bb',
+        '\\mu'                   : '\U000003bc',
+        '\\nu'                   : '\U000003bd',
+        '\\xi'                   : '\U000003be',
+        '\\pi'                   : '\U000003c0',
+        '\\varrho'               : '\U000003c1',
+        '\\sigma'                : '\U000003c3',
+        '\\tau'                  : '\U000003c4',
+        '\\upsilon'              : '\U000003c5',
+        '\\varphi'               : '\U000003c6',
+        '\\chi'                  : '\U000003c7',
+        '\\psi'                  : '\U000003c8',
+        '\\omega'                : '\U000003c9',
+        '\\Gamma'                : '\U00000393',
+        '\\Delta'                : '\U00000394',
+        '\\Theta'                : '\U00000398',
+        '\\Lambda'               : '\U0000039b',
+        '\\Xi'                   : '\U0000039e',
+        '\\Pi'                   : '\U000003a0',
+        '\\Sigma'                : '\U000003a3',
+        '\\Upsilon'              : '\U000003a5',
+        '\\Phi'                  : '\U000003a6',
+        '\\Psi'                  : '\U000003a8',
+        '\\Omega'                : '\U000003a9',
+        '\\leftarrow'            : '\U00002190',
+        '\\longleftarrow'        : '\U000027f5',
+        '\\rightarrow'           : '\U00002192',
+        '\\longrightarrow'       : '\U000027f6',
+        '\\Leftarrow'            : '\U000021d0',
+        '\\Longleftarrow'        : '\U000027f8',
+        '\\Rightarrow'           : '\U000021d2',
+        '\\Longrightarrow'       : '\U000027f9',
+        '\\leftrightarrow'       : '\U00002194',
+        '\\longleftrightarrow'   : '\U000027f7',
+        '\\Leftrightarrow'       : '\U000021d4',
+        '\\Longleftrightarrow'   : '\U000027fa',
+        '\\mapsto'               : '\U000021a6',
+        '\\longmapsto'           : '\U000027fc',
+        '\\relbar'               : '\U00002500',
+        '\\Relbar'               : '\U00002550',
+        '\\hookleftarrow'        : '\U000021a9',
+        '\\hookrightarrow'       : '\U000021aa',
+        '\\leftharpoondown'      : '\U000021bd',
+        '\\rightharpoondown'     : '\U000021c1',
+        '\\leftharpoonup'        : '\U000021bc',
+        '\\rightharpoonup'       : '\U000021c0',
+        '\\rightleftharpoons'    : '\U000021cc',
+        '\\leadsto'              : '\U0000219d',
+        '\\downharpoonleft'      : '\U000021c3',
+        '\\downharpoonright'     : '\U000021c2',
+        '\\upharpoonleft'        : '\U000021bf',
+        '\\upharpoonright'       : '\U000021be',
+        '\\restriction'          : '\U000021be',
+        '\\uparrow'              : '\U00002191',
+        '\\Uparrow'              : '\U000021d1',
+        '\\downarrow'            : '\U00002193',
+        '\\Downarrow'            : '\U000021d3',
+        '\\updownarrow'          : '\U00002195',
+        '\\Updownarrow'          : '\U000021d5',
+        '\\langle'               : '\U000027e8',
+        '\\rangle'               : '\U000027e9',
+        '\\lceil'                : '\U00002308',
+        '\\rceil'                : '\U00002309',
+        '\\lfloor'               : '\U0000230a',
+        '\\rfloor'               : '\U0000230b',
+        '\\flqq'                 : '\U000000ab',
+        '\\frqq'                 : '\U000000bb',
+        '\\bot'                  : '\U000022a5',
+        '\\top'                  : '\U000022a4',
+        '\\wedge'                : '\U00002227',
+        '\\bigwedge'             : '\U000022c0',
+        '\\vee'                  : '\U00002228',
+        '\\bigvee'               : '\U000022c1',
+        '\\forall'               : '\U00002200',
+        '\\exists'               : '\U00002203',
+        '\\nexists'              : '\U00002204',
+        '\\neg'                  : '\U000000ac',
+        '\\Box'                  : '\U000025a1',
+        '\\Diamond'              : '\U000025c7',
+        '\\vdash'                : '\U000022a2',
+        '\\models'               : '\U000022a8',
+        '\\dashv'                : '\U000022a3',
+        '\\surd'                 : '\U0000221a',
+        '\\le'                   : '\U00002264',
+        '\\ge'                   : '\U00002265',
+        '\\ll'                   : '\U0000226a',
+        '\\gg'                   : '\U0000226b',
+        '\\lesssim'              : '\U00002272',
+        '\\gtrsim'               : '\U00002273',
+        '\\lessapprox'           : '\U00002a85',
+        '\\gtrapprox'            : '\U00002a86',
+        '\\in'                   : '\U00002208',
+        '\\notin'                : '\U00002209',
+        '\\subset'               : '\U00002282',
+        '\\supset'               : '\U00002283',
+        '\\subseteq'             : '\U00002286',
+        '\\supseteq'             : '\U00002287',
+        '\\sqsubset'             : '\U0000228f',
+        '\\sqsupset'             : '\U00002290',
+        '\\sqsubseteq'           : '\U00002291',
+        '\\sqsupseteq'           : '\U00002292',
+        '\\cap'                  : '\U00002229',
+        '\\bigcap'               : '\U000022c2',
+        '\\cup'                  : '\U0000222a',
+        '\\bigcup'               : '\U000022c3',
+        '\\sqcup'                : '\U00002294',
+        '\\bigsqcup'             : '\U00002a06',
+        '\\sqcap'                : '\U00002293',
+        '\\Bigsqcap'             : '\U00002a05',
+        '\\setminus'             : '\U00002216',
+        '\\propto'               : '\U0000221d',
+        '\\uplus'                : '\U0000228e',
+        '\\bigplus'              : '\U00002a04',
+        '\\sim'                  : '\U0000223c',
+        '\\doteq'                : '\U00002250',
+        '\\simeq'                : '\U00002243',
+        '\\approx'               : '\U00002248',
+        '\\asymp'                : '\U0000224d',
+        '\\cong'                 : '\U00002245',
+        '\\equiv'                : '\U00002261',
+        '\\Join'                 : '\U000022c8',
+        '\\bowtie'               : '\U00002a1d',
+        '\\prec'                 : '\U0000227a',
+        '\\succ'                 : '\U0000227b',
+        '\\preceq'               : '\U0000227c',
+        '\\succeq'               : '\U0000227d',
+        '\\parallel'             : '\U00002225',
+        '\\mid'                  : '\U000000a6',
+        '\\pm'                   : '\U000000b1',
+        '\\mp'                   : '\U00002213',
+        '\\times'                : '\U000000d7',
+        '\\div'                  : '\U000000f7',
+        '\\cdot'                 : '\U000022c5',
+        '\\star'                 : '\U000022c6',
+        '\\circ'                 : '\U00002218',
+        '\\dagger'               : '\U00002020',
+        '\\ddagger'              : '\U00002021',
+        '\\lhd'                  : '\U000022b2',
+        '\\rhd'                  : '\U000022b3',
+        '\\unlhd'                : '\U000022b4',
+        '\\unrhd'                : '\U000022b5',
+        '\\triangleleft'         : '\U000025c3',
+        '\\triangleright'        : '\U000025b9',
+        '\\triangle'             : '\U000025b3',
+        '\\triangleq'            : '\U0000225c',
+        '\\oplus'                : '\U00002295',
+        '\\bigoplus'             : '\U00002a01',
+        '\\otimes'               : '\U00002297',
+        '\\bigotimes'            : '\U00002a02',
+        '\\odot'                 : '\U00002299',
+        '\\bigodot'              : '\U00002a00',
+        '\\ominus'               : '\U00002296',
+        '\\oslash'               : '\U00002298',
+        '\\dots'                 : '\U00002026',
+        '\\cdots'                : '\U000022ef',
+        '\\sum'                  : '\U00002211',
+        '\\prod'                 : '\U0000220f',
+        '\\coprod'               : '\U00002210',
+        '\\infty'                : '\U0000221e',
+        '\\int'                  : '\U0000222b',
+        '\\oint'                 : '\U0000222e',
+        '\\clubsuit'             : '\U00002663',
+        '\\diamondsuit'          : '\U00002662',
+        '\\heartsuit'            : '\U00002661',
+        '\\spadesuit'            : '\U00002660',
+        '\\aleph'                : '\U00002135',
+        '\\emptyset'             : '\U00002205',
+        '\\nabla'                : '\U00002207',
+        '\\partial'              : '\U00002202',
+        '\\flat'                 : '\U0000266d',
+        '\\natural'              : '\U0000266e',
+        '\\sharp'                : '\U0000266f',
+        '\\angle'                : '\U00002220',
+        '\\copyright'            : '\U000000a9',
+        '\\textregistered'       : '\U000000ae',
+        '\\textonequarter'       : '\U000000bc',
+        '\\textonehalf'          : '\U000000bd',
+        '\\textthreequarters'    : '\U000000be',
+        '\\textordfeminine'      : '\U000000aa',
+        '\\textordmasculine'     : '\U000000ba',
+        '\\euro'                 : '\U000020ac',
+        '\\pounds'               : '\U000000a3',
+        '\\yen'                  : '\U000000a5',
+        '\\textcent'             : '\U000000a2',
+        '\\textcurrency'         : '\U000000a4',
+        '\\textdegree'           : '\U000000b0',
+    }
+
+    isabelle_symbols = {
+        '\\'                 : '\U0001d7ec',
+        '\\'                  : '\U0001d7ed',
+        '\\'                  : '\U0001d7ee',
+        '\\'                : '\U0001d7ef',
+        '\\'                 : '\U0001d7f0',
+        '\\'                 : '\U0001d7f1',
+        '\\'                  : '\U0001d7f2',
+        '\\'                : '\U0001d7f3',
+        '\\'                : '\U0001d7f4',
+        '\\'                 : '\U0001d7f5',
+        '\\'                    : '\U0001d49c',
+        '\\'                    : '\U0000212c',
+        '\\'                    : '\U0001d49e',
+        '\\'                    : '\U0001d49f',
+        '\\'                    : '\U00002130',
+        '\\'                    : '\U00002131',
+        '\\'                    : '\U0001d4a2',
+        '\\'                    : '\U0000210b',
+        '\\'                    : '\U00002110',
+        '\\'                    : '\U0001d4a5',
+        '\\'                    : '\U0001d4a6',
+        '\\'                    : '\U00002112',
+        '\\'                    : '\U00002133',
+        '\\'                    : '\U0001d4a9',
+        '\\'                    : '\U0001d4aa',
+        '\\

' : '\U0001d5c9', + '\\' : '\U0001d5ca', + '\\' : '\U0001d5cb', + '\\' : '\U0001d5cc', + '\\' : '\U0001d5cd', + '\\' : '\U0001d5ce', + '\\' : '\U0001d5cf', + '\\' : '\U0001d5d0', + '\\' : '\U0001d5d1', + '\\' : '\U0001d5d2', + '\\' : '\U0001d5d3', + '\\' : '\U0001d504', + '\\' : '\U0001d505', + '\\' : '\U0000212d', + '\\

' : '\U0001d507', + '\\' : '\U0001d508', + '\\' : '\U0001d509', + '\\' : '\U0001d50a', + '\\' : '\U0000210c', + '\\' : '\U00002111', + '\\' : '\U0001d50d', + '\\' : '\U0001d50e', + '\\' : '\U0001d50f', + '\\' : '\U0001d510', + '\\' : '\U0001d511', + '\\' : '\U0001d512', + '\\' : '\U0001d513', + '\\' : '\U0001d514', + '\\' : '\U0000211c', + '\\' : '\U0001d516', + '\\' : '\U0001d517', + '\\' : '\U0001d518', + '\\' : '\U0001d519', + '\\' : '\U0001d51a', + '\\' : '\U0001d51b', + '\\' : '\U0001d51c', + '\\' : '\U00002128', + '\\' : '\U0001d51e', + '\\' : '\U0001d51f', + '\\' : '\U0001d520', + '\\
' : '\U0001d521', + '\\' : '\U0001d522', + '\\' : '\U0001d523', + '\\' : '\U0001d524', + '\\' : '\U0001d525', + '\\' : '\U0001d526', + '\\' : '\U0001d527', + '\\' : '\U0001d528', + '\\' : '\U0001d529', + '\\' : '\U0001d52a', + '\\' : '\U0001d52b', + '\\' : '\U0001d52c', + '\\' : '\U0001d52d', + '\\' : '\U0001d52e', + '\\' : '\U0001d52f', + '\\' : '\U0001d530', + '\\' : '\U0001d531', + '\\' : '\U0001d532', + '\\' : '\U0001d533', + '\\' : '\U0001d534', + '\\' : '\U0001d535', + '\\' : '\U0001d536', + '\\' : '\U0001d537', + '\\' : '\U000003b1', + '\\' : '\U000003b2', + '\\' : '\U000003b3', + '\\' : '\U000003b4', + '\\' : '\U000003b5', + '\\' : '\U000003b6', + '\\' : '\U000003b7', + '\\' : '\U000003b8', + '\\' : '\U000003b9', + '\\' : '\U000003ba', + '\\' : '\U000003bb', + '\\' : '\U000003bc', + '\\' : '\U000003bd', + '\\' : '\U000003be', + '\\' : '\U000003c0', + '\\' : '\U000003c1', + '\\' : '\U000003c3', + '\\' : '\U000003c4', + '\\' : '\U000003c5', + '\\' : '\U000003c6', + '\\' : '\U000003c7', + '\\' : '\U000003c8', + '\\' : '\U000003c9', + '\\' : '\U00000393', + '\\' : '\U00000394', + '\\' : '\U00000398', + '\\' : '\U0000039b', + '\\' : '\U0000039e', + '\\' : '\U000003a0', + '\\' : '\U000003a3', + '\\' : '\U000003a5', + '\\' : '\U000003a6', + '\\' : '\U000003a8', + '\\' : '\U000003a9', + '\\' : '\U0001d539', + '\\' : '\U00002102', + '\\' : '\U00002115', + '\\' : '\U0000211a', + '\\' : '\U0000211d', + '\\' : '\U00002124', + '\\' : '\U00002190', + '\\' : '\U000027f5', + '\\' : '\U00002192', + '\\' : '\U000027f6', + '\\' : '\U000021d0', + '\\' : '\U000027f8', + '\\' : '\U000021d2', + '\\' : '\U000027f9', + '\\' : '\U00002194', + '\\' : '\U000027f7', + '\\' : '\U000021d4', + '\\' : '\U000027fa', + '\\' : '\U000021a6', + '\\' : '\U000027fc', + '\\' : '\U00002500', + '\\' : '\U00002550', + '\\' : '\U000021a9', + '\\' : '\U000021aa', + '\\' : '\U000021bd', + '\\' : '\U000021c1', + '\\' : '\U000021bc', + '\\' : '\U000021c0', + '\\' : '\U000021cc', + '\\' : '\U0000219d', + '\\' : '\U000021c3', + '\\' : '\U000021c2', + '\\' : '\U000021bf', + '\\' : '\U000021be', + '\\' : '\U000021be', + '\\' : '\U00002237', + '\\' : '\U00002191', + '\\' : '\U000021d1', + '\\' : '\U00002193', + '\\' : '\U000021d3', + '\\' : '\U00002195', + '\\' : '\U000021d5', + '\\' : '\U000027e8', + '\\' : '\U000027e9', + '\\' : '\U00002308', + '\\' : '\U00002309', + '\\' : '\U0000230a', + '\\' : '\U0000230b', + '\\' : '\U00002987', + '\\' : '\U00002988', + '\\' : '\U000027e6', + '\\' : '\U000027e7', + '\\' : '\U00002983', + '\\' : '\U00002984', + '\\' : '\U000000ab', + '\\' : '\U000000bb', + '\\' : '\U000022a5', + '\\' : '\U000022a4', + '\\' : '\U00002227', + '\\' : '\U000022c0', + '\\' : '\U00002228', + '\\' : '\U000022c1', + '\\' : '\U00002200', + '\\' : '\U00002203', + '\\' : '\U00002204', + '\\' : '\U000000ac', + '\\' : '\U000025a1', + '\\' : '\U000025c7', + '\\' : '\U000022a2', + '\\' : '\U000022a8', + '\\' : '\U000022a9', + '\\' : '\U000022ab', + '\\' : '\U000022a3', + '\\' : '\U0000221a', + '\\' : '\U00002264', + '\\' : '\U00002265', + '\\' : '\U0000226a', + '\\' : '\U0000226b', + '\\' : '\U00002272', + '\\' : '\U00002273', + '\\' : '\U00002a85', + '\\' : '\U00002a86', + '\\' : '\U00002208', + '\\' : '\U00002209', + '\\' : '\U00002282', + '\\' : '\U00002283', + '\\' : '\U00002286', + '\\' : '\U00002287', + '\\' : '\U0000228f', + '\\' : '\U00002290', + '\\' : '\U00002291', + '\\' : '\U00002292', + '\\' : '\U00002229', + '\\' : '\U000022c2', + '\\' : '\U0000222a', + '\\' : '\U000022c3', + '\\' : '\U00002294', + '\\' : '\U00002a06', + '\\' : '\U00002293', + '\\' : '\U00002a05', + '\\' : '\U00002216', + '\\' : '\U0000221d', + '\\' : '\U0000228e', + '\\' : '\U00002a04', + '\\' : '\U00002260', + '\\' : '\U0000223c', + '\\' : '\U00002250', + '\\' : '\U00002243', + '\\' : '\U00002248', + '\\' : '\U0000224d', + '\\' : '\U00002245', + '\\' : '\U00002323', + '\\' : '\U00002261', + '\\' : '\U00002322', + '\\' : '\U000022c8', + '\\' : '\U00002a1d', + '\\' : '\U0000227a', + '\\' : '\U0000227b', + '\\' : '\U0000227c', + '\\' : '\U0000227d', + '\\' : '\U00002225', + '\\' : '\U000000a6', + '\\' : '\U000000b1', + '\\' : '\U00002213', + '\\' : '\U000000d7', + '\\
' : '\U000000f7', + '\\' : '\U000022c5', + '\\' : '\U000022c6', + '\\' : '\U00002219', + '\\' : '\U00002218', + '\\' : '\U00002020', + '\\' : '\U00002021', + '\\' : '\U000022b2', + '\\' : '\U000022b3', + '\\' : '\U000022b4', + '\\' : '\U000022b5', + '\\' : '\U000025c3', + '\\' : '\U000025b9', + '\\' : '\U000025b3', + '\\' : '\U0000225c', + '\\' : '\U00002295', + '\\' : '\U00002a01', + '\\' : '\U00002297', + '\\' : '\U00002a02', + '\\' : '\U00002299', + '\\' : '\U00002a00', + '\\' : '\U00002296', + '\\' : '\U00002298', + '\\' : '\U00002026', + '\\' : '\U000022ef', + '\\' : '\U00002211', + '\\' : '\U0000220f', + '\\' : '\U00002210', + '\\' : '\U0000221e', + '\\' : '\U0000222b', + '\\' : '\U0000222e', + '\\' : '\U00002663', + '\\' : '\U00002662', + '\\' : '\U00002661', + '\\' : '\U00002660', + '\\' : '\U00002135', + '\\' : '\U00002205', + '\\' : '\U00002207', + '\\' : '\U00002202', + '\\' : '\U0000266d', + '\\' : '\U0000266e', + '\\' : '\U0000266f', + '\\' : '\U00002220', + '\\' : '\U000000a9', + '\\' : '\U000000ae', + '\\' : '\U000000ad', + '\\' : '\U000000af', + '\\' : '\U000000bc', + '\\' : '\U000000bd', + '\\' : '\U000000be', + '\\' : '\U000000aa', + '\\' : '\U000000ba', + '\\
' : '\U000000a7', + '\\' : '\U000000b6', + '\\' : '\U000000a1', + '\\' : '\U000000bf', + '\\' : '\U000020ac', + '\\' : '\U000000a3', + '\\' : '\U000000a5', + '\\' : '\U000000a2', + '\\' : '\U000000a4', + '\\' : '\U000000b0', + '\\' : '\U00002a3f', + '\\' : '\U00002127', + '\\' : '\U000025ca', + '\\' : '\U00002118', + '\\' : '\U00002240', + '\\' : '\U000022c4', + '\\' : '\U000000b4', + '\\' : '\U00000131', + '\\' : '\U000000a8', + '\\' : '\U000000b8', + '\\' : '\U000002dd', + '\\' : '\U000003f5', + '\\' : '\U000023ce', + '\\' : '\U00002039', + '\\' : '\U0000203a', + '\\' : '\U00002302', + '\\<^sub>' : '\U000021e9', + '\\<^sup>' : '\U000021e7', + '\\<^bold>' : '\U00002759', + '\\<^bsub>' : '\U000021d8', + '\\<^esub>' : '\U000021d9', + '\\<^bsup>' : '\U000021d7', + '\\<^esup>' : '\U000021d6', + } + + lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols} + + def __init__(self, **options): + Filter.__init__(self, **options) + lang = get_choice_opt(options, 'lang', + ['isabelle', 'latex'], 'isabelle') + self.symbols = self.lang_map[lang] + + def filter(self, lexer, stream): + for ttype, value in stream: + if value in self.symbols: + yield ttype, self.symbols[value] + else: + yield ttype, value + + +class KeywordCaseFilter(Filter): + """Convert keywords to lowercase or uppercase or capitalize them, which + means first letter uppercase, rest lowercase. + + This can be useful e.g. if you highlight Pascal code and want to adapt the + code to your styleguide. + + Options accepted: + + `case` : string + The casing to convert keywords to. Must be one of ``'lower'``, + ``'upper'`` or ``'capitalize'``. The default is ``'lower'``. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + case = get_choice_opt(options, 'case', + ['lower', 'upper', 'capitalize'], 'lower') + self.convert = getattr(str, case) + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype in Keyword: + yield ttype, self.convert(value) + else: + yield ttype, value + + +class NameHighlightFilter(Filter): + """Highlight a normal Name (and Name.*) token with a different token type. + + Example:: + + filter = NameHighlightFilter( + names=['foo', 'bar', 'baz'], + tokentype=Name.Function, + ) + + This would highlight the names "foo", "bar" and "baz" + as functions. `Name.Function` is the default token type. + + Options accepted: + + `names` : list of strings + A list of names that should be given the different token type. + There is no default. + `tokentype` : TokenType or string + A token type or a string containing a token type name that is + used for highlighting the strings in `names`. The default is + `Name.Function`. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + self.names = set(get_list_opt(options, 'names', [])) + tokentype = options.get('tokentype') + if tokentype: + self.tokentype = string_to_tokentype(tokentype) + else: + self.tokentype = Name.Function + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype in Name and value in self.names: + yield self.tokentype, value + else: + yield ttype, value + + +class ErrorToken(Exception): + pass + + +class RaiseOnErrorTokenFilter(Filter): + """Raise an exception when the lexer generates an error token. + + Options accepted: + + `excclass` : Exception class + The exception class to raise. + The default is `pygments.filters.ErrorToken`. + + .. versionadded:: 0.8 + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + self.exception = options.get('excclass', ErrorToken) + try: + # issubclass() will raise TypeError if first argument is not a class + if not issubclass(self.exception, Exception): + raise TypeError + except TypeError: + raise OptionError('excclass option is not an exception class') + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype is Error: + raise self.exception(value) + yield ttype, value + + +class VisibleWhitespaceFilter(Filter): + """Convert tabs, newlines and/or spaces to visible characters. + + Options accepted: + + `spaces` : string or bool + If this is a one-character string, spaces will be replaces by this string. + If it is another true value, spaces will be replaced by ``·`` (unicode + MIDDLE DOT). If it is a false value, spaces will not be replaced. The + default is ``False``. + `tabs` : string or bool + The same as for `spaces`, but the default replacement character is ``»`` + (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value + is ``False``. Note: this will not work if the `tabsize` option for the + lexer is nonzero, as tabs will already have been expanded then. + `tabsize` : int + If tabs are to be replaced by this filter (see the `tabs` option), this + is the total number of characters that a tab should be expanded to. + The default is ``8``. + `newlines` : string or bool + The same as for `spaces`, but the default replacement character is ``¶`` + (unicode PILCROW SIGN). The default value is ``False``. + `wstokentype` : bool + If true, give whitespace the special `Whitespace` token type. This allows + styling the visible whitespace differently (e.g. greyed out), but it can + disrupt background colors. The default is ``True``. + + .. versionadded:: 0.8 + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + for name, default in [('spaces', '·'), + ('tabs', '»'), + ('newlines', '¶')]: + opt = options.get(name, False) + if isinstance(opt, str) and len(opt) == 1: + setattr(self, name, opt) + else: + setattr(self, name, (opt and default or '')) + tabsize = get_int_opt(options, 'tabsize', 8) + if self.tabs: + self.tabs += ' ' * (tabsize - 1) + if self.newlines: + self.newlines += '\n' + self.wstt = get_bool_opt(options, 'wstokentype', True) + + def filter(self, lexer, stream): + if self.wstt: + spaces = self.spaces or ' ' + tabs = self.tabs or '\t' + newlines = self.newlines or '\n' + regex = re.compile(r'\s') + + def replacefunc(wschar): + if wschar == ' ': + return spaces + elif wschar == '\t': + return tabs + elif wschar == '\n': + return newlines + return wschar + + for ttype, value in stream: + yield from _replace_special(ttype, value, regex, Whitespace, + replacefunc) + else: + spaces, tabs, newlines = self.spaces, self.tabs, self.newlines + # simpler processing + for ttype, value in stream: + if spaces: + value = value.replace(' ', spaces) + if tabs: + value = value.replace('\t', tabs) + if newlines: + value = value.replace('\n', newlines) + yield ttype, value + + +class GobbleFilter(Filter): + """Gobbles source code lines (eats initial characters). + + This filter drops the first ``n`` characters off every line of code. This + may be useful when the source code fed to the lexer is indented by a fixed + amount of space that isn't desired in the output. + + Options accepted: + + `n` : int + The number of characters to gobble. + + .. versionadded:: 1.2 + """ + def __init__(self, **options): + Filter.__init__(self, **options) + self.n = get_int_opt(options, 'n', 0) + + def gobble(self, value, left): + if left < len(value): + return value[left:], 0 + else: + return '', left - len(value) + + def filter(self, lexer, stream): + n = self.n + left = n # How many characters left to gobble. + for ttype, value in stream: + # Remove ``left`` tokens from first line, ``n`` from all others. + parts = value.split('\n') + (parts[0], left) = self.gobble(parts[0], left) + for i in range(1, len(parts)): + (parts[i], left) = self.gobble(parts[i], n) + value = '\n'.join(parts) + + if value != '': + yield ttype, value + + +class TokenMergeFilter(Filter): + """Merges consecutive tokens with the same token type in the output + stream of a lexer. + + .. versionadded:: 1.2 + """ + def __init__(self, **options): + Filter.__init__(self, **options) + + def filter(self, lexer, stream): + current_type = None + current_value = None + for ttype, value in stream: + if ttype is current_type: + current_value += value + else: + if current_type is not None: + yield current_type, current_value + current_type = ttype + current_value = value + if current_type is not None: + yield current_type, current_value + + +FILTERS = { + 'codetagify': CodeTagFilter, + 'keywordcase': KeywordCaseFilter, + 'highlight': NameHighlightFilter, + 'raiseonerror': RaiseOnErrorTokenFilter, + 'whitespace': VisibleWhitespaceFilter, + 'gobble': GobbleFilter, + 'tokenmerge': TokenMergeFilter, + 'symbols': SymbolFilter, +} diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..ad3b31f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatter.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatter.py new file mode 100644 index 0000000..3ca4892 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatter.py @@ -0,0 +1,124 @@ +""" + pygments.formatter + ~~~~~~~~~~~~~~~~~~ + + Base formatter class. + + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import codecs + +from pip._vendor.pygments.util import get_bool_opt +from pip._vendor.pygments.styles import get_style_by_name + +__all__ = ['Formatter'] + + +def _lookup_style(style): + if isinstance(style, str): + return get_style_by_name(style) + return style + + +class Formatter: + """ + Converts a token stream to text. + + Formatters should have attributes to help selecting them. These + are similar to the corresponding :class:`~pygments.lexer.Lexer` + attributes. + + .. autoattribute:: name + :no-value: + + .. autoattribute:: aliases + :no-value: + + .. autoattribute:: filenames + :no-value: + + You can pass options as keyword arguments to the constructor. + All formatters accept these basic options: + + ``style`` + The style to use, can be a string or a Style subclass + (default: "default"). Not used by e.g. the + TerminalFormatter. + ``full`` + Tells the formatter to output a "full" document, i.e. + a complete self-contained document. This doesn't have + any effect for some formatters (default: false). + ``title`` + If ``full`` is true, the title that should be used to + caption the document (default: ''). + ``encoding`` + If given, must be an encoding name. This will be used to + convert the Unicode token strings to byte strings in the + output. If it is "" or None, Unicode strings will be written + to the output file, which most file-like objects do not + support (default: None). + ``outencoding`` + Overrides ``encoding`` if given. + + """ + + #: Full name for the formatter, in human-readable form. + name = None + + #: A list of short, unique identifiers that can be used to lookup + #: the formatter from a list, e.g. using :func:`.get_formatter_by_name()`. + aliases = [] + + #: A list of fnmatch patterns that match filenames for which this + #: formatter can produce output. The patterns in this list should be unique + #: among all formatters. + filenames = [] + + #: If True, this formatter outputs Unicode strings when no encoding + #: option is given. + unicodeoutput = True + + def __init__(self, **options): + """ + As with lexers, this constructor takes arbitrary optional arguments, + and if you override it, you should first process your own options, then + call the base class implementation. + """ + self.style = _lookup_style(options.get('style', 'default')) + self.full = get_bool_opt(options, 'full', False) + self.title = options.get('title', '') + self.encoding = options.get('encoding', None) or None + if self.encoding in ('guess', 'chardet'): + # can happen for e.g. pygmentize -O encoding=guess + self.encoding = 'utf-8' + self.encoding = options.get('outencoding') or self.encoding + self.options = options + + def get_style_defs(self, arg=''): + """ + This method must return statements or declarations suitable to define + the current style for subsequent highlighted text (e.g. CSS classes + in the `HTMLFormatter`). + + The optional argument `arg` can be used to modify the generation and + is formatter dependent (it is standardized because it can be given on + the command line). + + This method is called by the ``-S`` :doc:`command-line option `, + the `arg` is then given by the ``-a`` option. + """ + return '' + + def format(self, tokensource, outfile): + """ + This method must format the tokens from the `tokensource` iterable and + write the formatted version to the file object `outfile`. + + Formatter options can control how exactly the tokens are converted. + """ + if self.encoding: + # wrap the outfile in a StreamWriter + outfile = codecs.lookup(self.encoding)[3](outfile) + return self.format_unencoded(tokensource, outfile) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__init__.py new file mode 100644 index 0000000..39db842 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__init__.py @@ -0,0 +1,158 @@ +""" + pygments.formatters + ~~~~~~~~~~~~~~~~~~~ + + Pygments formatters. + + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re +import sys +import types +import fnmatch +from os.path import basename + +from pip._vendor.pygments.formatters._mapping import FORMATTERS +from pip._vendor.pygments.plugin import find_plugin_formatters +from pip._vendor.pygments.util import ClassNotFound + +__all__ = ['get_formatter_by_name', 'get_formatter_for_filename', + 'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS) + +_formatter_cache = {} # classes by name +_pattern_cache = {} + + +def _fn_matches(fn, glob): + """Return whether the supplied file name fn matches pattern filename.""" + if glob not in _pattern_cache: + pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) + return pattern.match(fn) + return _pattern_cache[glob].match(fn) + + +def _load_formatters(module_name): + """Load a formatter (and all others in the module too).""" + mod = __import__(module_name, None, None, ['__all__']) + for formatter_name in mod.__all__: + cls = getattr(mod, formatter_name) + _formatter_cache[cls.name] = cls + + +def get_all_formatters(): + """Return a generator for all formatter classes.""" + # NB: this returns formatter classes, not info like get_all_lexers(). + for info in FORMATTERS.values(): + if info[1] not in _formatter_cache: + _load_formatters(info[0]) + yield _formatter_cache[info[1]] + for _, formatter in find_plugin_formatters(): + yield formatter + + +def find_formatter_class(alias): + """Lookup a formatter by alias. + + Returns None if not found. + """ + for module_name, name, aliases, _, _ in FORMATTERS.values(): + if alias in aliases: + if name not in _formatter_cache: + _load_formatters(module_name) + return _formatter_cache[name] + for _, cls in find_plugin_formatters(): + if alias in cls.aliases: + return cls + + +def get_formatter_by_name(_alias, **options): + """ + Return an instance of a :class:`.Formatter` subclass that has `alias` in its + aliases list. The formatter is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that + alias is found. + """ + cls = find_formatter_class(_alias) + if cls is None: + raise ClassNotFound("no formatter found for name %r" % _alias) + return cls(**options) + + +def load_formatter_from_file(filename, formattername="CustomFormatter", **options): + """ + Return a `Formatter` subclass instance loaded from the provided file, relative + to the current directory. + + The file is expected to contain a Formatter class named ``formattername`` + (by default, CustomFormatter). Users should be very careful with the input, because + this method is equivalent to running ``eval()`` on the input file. The formatter is + given the `options` at its instantiation. + + :exc:`pygments.util.ClassNotFound` is raised if there are any errors loading + the formatter. + + .. versionadded:: 2.2 + """ + try: + # This empty dict will contain the namespace for the exec'd file + custom_namespace = {} + with open(filename, 'rb') as f: + exec(f.read(), custom_namespace) + # Retrieve the class `formattername` from that namespace + if formattername not in custom_namespace: + raise ClassNotFound('no valid %s class found in %s' % + (formattername, filename)) + formatter_class = custom_namespace[formattername] + # And finally instantiate it with the options + return formatter_class(**options) + except OSError as err: + raise ClassNotFound('cannot read %s: %s' % (filename, err)) + except ClassNotFound: + raise + except Exception as err: + raise ClassNotFound('error when loading custom formatter: %s' % err) + + +def get_formatter_for_filename(fn, **options): + """ + Return a :class:`.Formatter` subclass instance that has a filename pattern + matching `fn`. The formatter is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename + is found. + """ + fn = basename(fn) + for modname, name, _, filenames, _ in FORMATTERS.values(): + for filename in filenames: + if _fn_matches(fn, filename): + if name not in _formatter_cache: + _load_formatters(modname) + return _formatter_cache[name](**options) + for cls in find_plugin_formatters(): + for filename in cls.filenames: + if _fn_matches(fn, filename): + return cls(**options) + raise ClassNotFound("no formatter found for file name %r" % fn) + + +class _automodule(types.ModuleType): + """Automatically import formatters.""" + + def __getattr__(self, name): + info = FORMATTERS.get(name) + if info: + _load_formatters(info[0]) + cls = _formatter_cache[info[1]] + setattr(self, name, cls) + return cls + raise AttributeError(name) + + +oldmod = sys.modules[__name__] +newmod = _automodule(__name__) +newmod.__dict__.update(oldmod.__dict__) +sys.modules[__name__] = newmod +del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..72cdf2f Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc new file mode 100644 index 0000000..81490e3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-312.pyc new file mode 100644 index 0000000..75750a8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/groff.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/groff.cpython-312.pyc new file mode 100644 index 0000000..d9c78ce Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/groff.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/html.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/html.cpython-312.pyc new file mode 100644 index 0000000..2f1aed8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/html.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/img.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/img.cpython-312.pyc new file mode 100644 index 0000000..bcee2fa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/img.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/irc.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/irc.cpython-312.pyc new file mode 100644 index 0000000..cc97581 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/irc.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/latex.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/latex.cpython-312.pyc new file mode 100644 index 0000000..c330caa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/latex.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/other.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/other.cpython-312.pyc new file mode 100644 index 0000000..a067c4c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/other.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc new file mode 100644 index 0000000..02608cd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-312.pyc new file mode 100644 index 0000000..91b2f71 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/svg.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/svg.cpython-312.pyc new file mode 100644 index 0000000..2739e2c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/svg.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-312.pyc new file mode 100644 index 0000000..33b8140 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-312.pyc new file mode 100644 index 0000000..3074cbb Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/_mapping.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/_mapping.py new file mode 100644 index 0000000..72ca840 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/_mapping.py @@ -0,0 +1,23 @@ +# Automatically generated by scripts/gen_mapfiles.py. +# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. + +FORMATTERS = { + 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'), + 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'GroffFormatter': ('pygments.formatters.groff', 'groff', ('groff', 'troff', 'roff'), (), 'Format tokens with groff escapes to change their color and font style.'), + 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ```` tags. By default, the content is enclosed in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). The ``
``'s CSS class can be set by the `cssclass` option."), + 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'), + 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'), + 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'), + 'PangoMarkupFormatter': ('pygments.formatters.pangomarkup', 'Pango Markup', ('pango', 'pangomarkup'), (), 'Format tokens as Pango Markup code. It can then be rendered to an SVG.'), + 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'), + 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'), + 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ```` element with explicit ``x`` and ``y`` coordinates containing ```` elements with the individual token styles.'), + 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.'), +} diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/bbcode.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/bbcode.py new file mode 100644 index 0000000..c4db8f4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/bbcode.py @@ -0,0 +1,108 @@ +""" + pygments.formatters.bbcode + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + BBcode formatter. + + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + + +from pip._vendor.pygments.formatter import Formatter +from pip._vendor.pygments.util import get_bool_opt + +__all__ = ['BBCodeFormatter'] + + +class BBCodeFormatter(Formatter): + """ + Format tokens with BBcodes. These formatting codes are used by many + bulletin boards, so you can highlight your sourcecode with pygments before + posting it there. + + This formatter has no support for background colors and borders, as there + are no common BBcode tags for that. + + Some board systems (e.g. phpBB) don't support colors in their [code] tag, + so you can't use the highlighting together with that tag. + Text in a [code] tag usually is shown with a monospace font (which this + formatter can do with the ``monofont`` option) and no spaces (which you + need for indentation) are removed. + + Additional options accepted: + + `style` + The style to use, can be a string or a Style subclass (default: + ``'default'``). + + `codetag` + If set to true, put the output into ``[code]`` tags (default: + ``false``) + + `monofont` + If set to true, add a tag to show the code with a monospace font + (default: ``false``). + """ + name = 'BBCode' + aliases = ['bbcode', 'bb'] + filenames = [] + + def __init__(self, **options): + Formatter.__init__(self, **options) + self._code = get_bool_opt(options, 'codetag', False) + self._mono = get_bool_opt(options, 'monofont', False) + + self.styles = {} + self._make_styles() + + def _make_styles(self): + for ttype, ndef in self.style: + start = end = '' + if ndef['color']: + start += '[color=#%s]' % ndef['color'] + end = '[/color]' + end + if ndef['bold']: + start += '[b]' + end = '[/b]' + end + if ndef['italic']: + start += '[i]' + end = '[/i]' + end + if ndef['underline']: + start += '[u]' + end = '[/u]' + end + # there are no common BBcodes for background-color and border + + self.styles[ttype] = start, end + + def format_unencoded(self, tokensource, outfile): + if self._code: + outfile.write('[code]') + if self._mono: + outfile.write('[font=monospace]') + + lastval = '' + lasttype = None + + for ttype, value in tokensource: + while ttype not in self.styles: + ttype = ttype.parent + if ttype == lasttype: + lastval += value + else: + if lastval: + start, end = self.styles[lasttype] + outfile.write(''.join((start, lastval, end))) + lastval = value + lasttype = ttype + + if lastval: + start, end = self.styles[lasttype] + outfile.write(''.join((start, lastval, end))) + + if self._mono: + outfile.write('[/font]') + if self._code: + outfile.write('[/code]') + if self._code or self._mono: + outfile.write('\n') diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/groff.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/groff.py new file mode 100644 index 0000000..30a528e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/groff.py @@ -0,0 +1,170 @@ +""" + pygments.formatters.groff + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Formatter for groff output. + + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import math +from pip._vendor.pygments.formatter import Formatter +from pip._vendor.pygments.util import get_bool_opt, get_int_opt + +__all__ = ['GroffFormatter'] + + +class GroffFormatter(Formatter): + """ + Format tokens with groff escapes to change their color and font style. + + .. versionadded:: 2.11 + + Additional options accepted: + + `style` + The style to use, can be a string or a Style subclass (default: + ``'default'``). + + `monospaced` + If set to true, monospace font will be used (default: ``true``). + + `linenos` + If set to true, print the line numbers (default: ``false``). + + `wrap` + Wrap lines to the specified number of characters. Disabled if set to 0 + (default: ``0``). + """ + + name = 'groff' + aliases = ['groff','troff','roff'] + filenames = [] + + def __init__(self, **options): + Formatter.__init__(self, **options) + + self.monospaced = get_bool_opt(options, 'monospaced', True) + self.linenos = get_bool_opt(options, 'linenos', False) + self._lineno = 0 + self.wrap = get_int_opt(options, 'wrap', 0) + self._linelen = 0 + + self.styles = {} + self._make_styles() + + + def _make_styles(self): + regular = '\\f[CR]' if self.monospaced else '\\f[R]' + bold = '\\f[CB]' if self.monospaced else '\\f[B]' + italic = '\\f[CI]' if self.monospaced else '\\f[I]' + + for ttype, ndef in self.style: + start = end = '' + if ndef['color']: + start += '\\m[%s]' % ndef['color'] + end = '\\m[]' + end + if ndef['bold']: + start += bold + end = regular + end + if ndef['italic']: + start += italic + end = regular + end + if ndef['bgcolor']: + start += '\\M[%s]' % ndef['bgcolor'] + end = '\\M[]' + end + + self.styles[ttype] = start, end + + + def _define_colors(self, outfile): + colors = set() + for _, ndef in self.style: + if ndef['color'] is not None: + colors.add(ndef['color']) + + for color in sorted(colors): + outfile.write('.defcolor ' + color + ' rgb #' + color + '\n') + + + def _write_lineno(self, outfile): + self._lineno += 1 + outfile.write("%s% 4d " % (self._lineno != 1 and '\n' or '', self._lineno)) + + + def _wrap_line(self, line): + length = len(line.rstrip('\n')) + space = ' ' if self.linenos else '' + newline = '' + + if length > self.wrap: + for i in range(0, math.floor(length / self.wrap)): + chunk = line[i*self.wrap:i*self.wrap+self.wrap] + newline += (chunk + '\n' + space) + remainder = length % self.wrap + if remainder > 0: + newline += line[-remainder-1:] + self._linelen = remainder + elif self._linelen + length > self.wrap: + newline = ('\n' + space) + line + self._linelen = length + else: + newline = line + self._linelen += length + + return newline + + + def _escape_chars(self, text): + text = text.replace('\\', '\\[u005C]'). \ + replace('.', '\\[char46]'). \ + replace('\'', '\\[u0027]'). \ + replace('`', '\\[u0060]'). \ + replace('~', '\\[u007E]') + copy = text + + for char in copy: + if len(char) != len(char.encode()): + uni = char.encode('unicode_escape') \ + .decode()[1:] \ + .replace('x', 'u00') \ + .upper() + text = text.replace(char, '\\[u' + uni[1:] + ']') + + return text + + + def format_unencoded(self, tokensource, outfile): + self._define_colors(outfile) + + outfile.write('.nf\n\\f[CR]\n') + + if self.linenos: + self._write_lineno(outfile) + + for ttype, value in tokensource: + while ttype not in self.styles: + ttype = ttype.parent + start, end = self.styles[ttype] + + for line in value.splitlines(True): + if self.wrap > 0: + line = self._wrap_line(line) + + if start and end: + text = self._escape_chars(line.rstrip('\n')) + if text != '': + outfile.write(''.join((start, text, end))) + else: + outfile.write(self._escape_chars(line.rstrip('\n'))) + + if line.endswith('\n'): + if self.linenos: + self._write_lineno(outfile) + self._linelen = 0 + else: + outfile.write('\n') + self._linelen = 0 + + outfile.write('\n.fi') diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/html.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/html.py new file mode 100644 index 0000000..931d7c3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/html.py @@ -0,0 +1,989 @@ +""" + pygments.formatters.html + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Formatter for HTML output. + + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import functools +import os +import sys +import os.path +from io import StringIO + +from pip._vendor.pygments.formatter import Formatter +from pip._vendor.pygments.token import Token, Text, STANDARD_TYPES +from pip._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt + +try: + import ctags +except ImportError: + ctags = None + +__all__ = ['HtmlFormatter'] + + +_escape_html_table = { + ord('&'): '&', + ord('<'): '<', + ord('>'): '>', + ord('"'): '"', + ord("'"): ''', +} + + +def escape_html(text, table=_escape_html_table): + """Escape &, <, > as well as single and double quotes for HTML.""" + return text.translate(table) + + +def webify(color): + if color.startswith('calc') or color.startswith('var'): + return color + else: + return '#' + color + + +def _get_ttype_class(ttype): + fname = STANDARD_TYPES.get(ttype) + if fname: + return fname + aname = '' + while fname is None: + aname = '-' + ttype[-1] + aname + ttype = ttype.parent + fname = STANDARD_TYPES.get(ttype) + return fname + aname + + +CSSFILE_TEMPLATE = '''\ +/* +generated by Pygments +Copyright 2006-2023 by the Pygments team. +Licensed under the BSD license, see LICENSE for details. +*/ +%(styledefs)s +''' + +DOC_HEADER = '''\ + + + + + %(title)s + + + + +

%(title)s

+ +''' + +DOC_HEADER_EXTERNALCSS = '''\ + + + + + %(title)s + + + + +

%(title)s

+ +''' + +DOC_FOOTER = '''\ + + +''' + + +class HtmlFormatter(Formatter): + r""" + Format tokens as HTML 4 ```` tags. By default, the content is enclosed + in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). + The ``
``'s CSS class can be set by the `cssclass` option. + + If the `linenos` option is set to ``"table"``, the ``
`` is
+    additionally wrapped inside a ```` which has one row and two
+    cells: one containing the line numbers and one containing the code.
+    Example:
+
+    .. sourcecode:: html
+
+        
+
+ + +
+
1
+            2
+
+
def foo(bar):
+              pass
+            
+
+ + (whitespace added to improve clarity). + + A list of lines can be specified using the `hl_lines` option to make these + lines highlighted (as of Pygments 0.11). + + With the `full` option, a complete HTML 4 document is output, including + the style definitions inside a `` + {% else %} + {{ head | safe }} + {% endif %} +{% if not embed %} + + +{% endif %} +{{ body | safe }} +{% for diagram in diagrams %} +
+

{{ diagram.title }}

+
{{ diagram.text }}
+
+ {{ diagram.svg }} +
+
+{% endfor %} +{% if not embed %} + + +{% endif %} +""" + +template = Template(jinja2_template_source) + +# Note: ideally this would be a dataclass, but we're supporting Python 3.5+ so we can't do this yet +NamedDiagram = NamedTuple( + "NamedDiagram", + [("name", str), ("diagram", typing.Optional[railroad.DiagramItem]), ("index", int)], +) +""" +A simple structure for associating a name with a railroad diagram +""" + +T = TypeVar("T") + + +class EachItem(railroad.Group): + """ + Custom railroad item to compose a: + - Group containing a + - OneOrMore containing a + - Choice of the elements in the Each + with the group label indicating that all must be matched + """ + + all_label = "[ALL]" + + def __init__(self, *items): + choice_item = railroad.Choice(len(items) - 1, *items) + one_or_more_item = railroad.OneOrMore(item=choice_item) + super().__init__(one_or_more_item, label=self.all_label) + + +class AnnotatedItem(railroad.Group): + """ + Simple subclass of Group that creates an annotation label + """ + + def __init__(self, label: str, item): + super().__init__(item=item, label="[{}]".format(label) if label else label) + + +class EditablePartial(Generic[T]): + """ + Acts like a functools.partial, but can be edited. In other words, it represents a type that hasn't yet been + constructed. + """ + + # We need this here because the railroad constructors actually transform the data, so can't be called until the + # entire tree is assembled + + def __init__(self, func: Callable[..., T], args: list, kwargs: dict): + self.func = func + self.args = args + self.kwargs = kwargs + + @classmethod + def from_call(cls, func: Callable[..., T], *args, **kwargs) -> "EditablePartial[T]": + """ + If you call this function in the same way that you would call the constructor, it will store the arguments + as you expect. For example EditablePartial.from_call(Fraction, 1, 3)() == Fraction(1, 3) + """ + return EditablePartial(func=func, args=list(args), kwargs=kwargs) + + @property + def name(self): + return self.kwargs["name"] + + def __call__(self) -> T: + """ + Evaluate the partial and return the result + """ + args = self.args.copy() + kwargs = self.kwargs.copy() + + # This is a helpful hack to allow you to specify varargs parameters (e.g. *args) as keyword args (e.g. + # args=['list', 'of', 'things']) + arg_spec = inspect.getfullargspec(self.func) + if arg_spec.varargs in self.kwargs: + args += kwargs.pop(arg_spec.varargs) + + return self.func(*args, **kwargs) + + +def railroad_to_html(diagrams: List[NamedDiagram], embed=False, **kwargs) -> str: + """ + Given a list of NamedDiagram, produce a single HTML string that visualises those diagrams + :params kwargs: kwargs to be passed in to the template + """ + data = [] + for diagram in diagrams: + if diagram.diagram is None: + continue + io = StringIO() + try: + css = kwargs.get('css') + diagram.diagram.writeStandalone(io.write, css=css) + except AttributeError: + diagram.diagram.writeSvg(io.write) + title = diagram.name + if diagram.index == 0: + title += " (root)" + data.append({"title": title, "text": "", "svg": io.getvalue()}) + + return template.render(diagrams=data, embed=embed, **kwargs) + + +def resolve_partial(partial: "EditablePartial[T]") -> T: + """ + Recursively resolves a collection of Partials into whatever type they are + """ + if isinstance(partial, EditablePartial): + partial.args = resolve_partial(partial.args) + partial.kwargs = resolve_partial(partial.kwargs) + return partial() + elif isinstance(partial, list): + return [resolve_partial(x) for x in partial] + elif isinstance(partial, dict): + return {key: resolve_partial(x) for key, x in partial.items()} + else: + return partial + + +def to_railroad( + element: pyparsing.ParserElement, + diagram_kwargs: typing.Optional[dict] = None, + vertical: int = 3, + show_results_names: bool = False, + show_groups: bool = False, +) -> List[NamedDiagram]: + """ + Convert a pyparsing element tree into a list of diagrams. This is the recommended entrypoint to diagram + creation if you want to access the Railroad tree before it is converted to HTML + :param element: base element of the parser being diagrammed + :param diagram_kwargs: kwargs to pass to the Diagram() constructor + :param vertical: (optional) - int - limit at which number of alternatives should be + shown vertically instead of horizontally + :param show_results_names - bool to indicate whether results name annotations should be + included in the diagram + :param show_groups - bool to indicate whether groups should be highlighted with an unlabeled + surrounding box + """ + # Convert the whole tree underneath the root + lookup = ConverterState(diagram_kwargs=diagram_kwargs or {}) + _to_diagram_element( + element, + lookup=lookup, + parent=None, + vertical=vertical, + show_results_names=show_results_names, + show_groups=show_groups, + ) + + root_id = id(element) + # Convert the root if it hasn't been already + if root_id in lookup: + if not element.customName: + lookup[root_id].name = "" + lookup[root_id].mark_for_extraction(root_id, lookup, force=True) + + # Now that we're finished, we can convert from intermediate structures into Railroad elements + diags = list(lookup.diagrams.values()) + if len(diags) > 1: + # collapse out duplicate diags with the same name + seen = set() + deduped_diags = [] + for d in diags: + # don't extract SkipTo elements, they are uninformative as subdiagrams + if d.name == "...": + continue + if d.name is not None and d.name not in seen: + seen.add(d.name) + deduped_diags.append(d) + resolved = [resolve_partial(partial) for partial in deduped_diags] + else: + # special case - if just one diagram, always display it, even if + # it has no name + resolved = [resolve_partial(partial) for partial in diags] + return sorted(resolved, key=lambda diag: diag.index) + + +def _should_vertical( + specification: int, exprs: Iterable[pyparsing.ParserElement] +) -> bool: + """ + Returns true if we should return a vertical list of elements + """ + if specification is None: + return False + else: + return len(_visible_exprs(exprs)) >= specification + + +class ElementState: + """ + State recorded for an individual pyparsing Element + """ + + # Note: this should be a dataclass, but we have to support Python 3.5 + def __init__( + self, + element: pyparsing.ParserElement, + converted: EditablePartial, + parent: EditablePartial, + number: int, + name: str = None, + parent_index: typing.Optional[int] = None, + ): + #: The pyparsing element that this represents + self.element: pyparsing.ParserElement = element + #: The name of the element + self.name: typing.Optional[str] = name + #: The output Railroad element in an unconverted state + self.converted: EditablePartial = converted + #: The parent Railroad element, which we store so that we can extract this if it's duplicated + self.parent: EditablePartial = parent + #: The order in which we found this element, used for sorting diagrams if this is extracted into a diagram + self.number: int = number + #: The index of this inside its parent + self.parent_index: typing.Optional[int] = parent_index + #: If true, we should extract this out into a subdiagram + self.extract: bool = False + #: If true, all of this element's children have been filled out + self.complete: bool = False + + def mark_for_extraction( + self, el_id: int, state: "ConverterState", name: str = None, force: bool = False + ): + """ + Called when this instance has been seen twice, and thus should eventually be extracted into a sub-diagram + :param el_id: id of the element + :param state: element/diagram state tracker + :param name: name to use for this element's text + :param force: If true, force extraction now, regardless of the state of this. Only useful for extracting the + root element when we know we're finished + """ + self.extract = True + + # Set the name + if not self.name: + if name: + # Allow forcing a custom name + self.name = name + elif self.element.customName: + self.name = self.element.customName + else: + self.name = "" + + # Just because this is marked for extraction doesn't mean we can do it yet. We may have to wait for children + # to be added + # Also, if this is just a string literal etc, don't bother extracting it + if force or (self.complete and _worth_extracting(self.element)): + state.extract_into_diagram(el_id) + + +class ConverterState: + """ + Stores some state that persists between recursions into the element tree + """ + + def __init__(self, diagram_kwargs: typing.Optional[dict] = None): + #: A dictionary mapping ParserElements to state relating to them + self._element_diagram_states: Dict[int, ElementState] = {} + #: A dictionary mapping ParserElement IDs to subdiagrams generated from them + self.diagrams: Dict[int, EditablePartial[NamedDiagram]] = {} + #: The index of the next unnamed element + self.unnamed_index: int = 1 + #: The index of the next element. This is used for sorting + self.index: int = 0 + #: Shared kwargs that are used to customize the construction of diagrams + self.diagram_kwargs: dict = diagram_kwargs or {} + self.extracted_diagram_names: Set[str] = set() + + def __setitem__(self, key: int, value: ElementState): + self._element_diagram_states[key] = value + + def __getitem__(self, key: int) -> ElementState: + return self._element_diagram_states[key] + + def __delitem__(self, key: int): + del self._element_diagram_states[key] + + def __contains__(self, key: int): + return key in self._element_diagram_states + + def generate_unnamed(self) -> int: + """ + Generate a number used in the name of an otherwise unnamed diagram + """ + self.unnamed_index += 1 + return self.unnamed_index + + def generate_index(self) -> int: + """ + Generate a number used to index a diagram + """ + self.index += 1 + return self.index + + def extract_into_diagram(self, el_id: int): + """ + Used when we encounter the same token twice in the same tree. When this + happens, we replace all instances of that token with a terminal, and + create a new subdiagram for the token + """ + position = self[el_id] + + # Replace the original definition of this element with a regular block + if position.parent: + ret = EditablePartial.from_call(railroad.NonTerminal, text=position.name) + if "item" in position.parent.kwargs: + position.parent.kwargs["item"] = ret + elif "items" in position.parent.kwargs: + position.parent.kwargs["items"][position.parent_index] = ret + + # If the element we're extracting is a group, skip to its content but keep the title + if position.converted.func == railroad.Group: + content = position.converted.kwargs["item"] + else: + content = position.converted + + self.diagrams[el_id] = EditablePartial.from_call( + NamedDiagram, + name=position.name, + diagram=EditablePartial.from_call( + railroad.Diagram, content, **self.diagram_kwargs + ), + index=position.number, + ) + + del self[el_id] + + +def _worth_extracting(element: pyparsing.ParserElement) -> bool: + """ + Returns true if this element is worth having its own sub-diagram. Simply, if any of its children + themselves have children, then its complex enough to extract + """ + children = element.recurse() + return any(child.recurse() for child in children) + + +def _apply_diagram_item_enhancements(fn): + """ + decorator to ensure enhancements to a diagram item (such as results name annotations) + get applied on return from _to_diagram_element (we do this since there are several + returns in _to_diagram_element) + """ + + def _inner( + element: pyparsing.ParserElement, + parent: typing.Optional[EditablePartial], + lookup: ConverterState = None, + vertical: int = None, + index: int = 0, + name_hint: str = None, + show_results_names: bool = False, + show_groups: bool = False, + ) -> typing.Optional[EditablePartial]: + ret = fn( + element, + parent, + lookup, + vertical, + index, + name_hint, + show_results_names, + show_groups, + ) + + # apply annotation for results name, if present + if show_results_names and ret is not None: + element_results_name = element.resultsName + if element_results_name: + # add "*" to indicate if this is a "list all results" name + element_results_name += "" if element.modalResults else "*" + ret = EditablePartial.from_call( + railroad.Group, item=ret, label=element_results_name + ) + + return ret + + return _inner + + +def _visible_exprs(exprs: Iterable[pyparsing.ParserElement]): + non_diagramming_exprs = ( + pyparsing.ParseElementEnhance, + pyparsing.PositionToken, + pyparsing.And._ErrorStop, + ) + return [ + e + for e in exprs + if not (e.customName or e.resultsName or isinstance(e, non_diagramming_exprs)) + ] + + +@_apply_diagram_item_enhancements +def _to_diagram_element( + element: pyparsing.ParserElement, + parent: typing.Optional[EditablePartial], + lookup: ConverterState = None, + vertical: int = None, + index: int = 0, + name_hint: str = None, + show_results_names: bool = False, + show_groups: bool = False, +) -> typing.Optional[EditablePartial]: + """ + Recursively converts a PyParsing Element to a railroad Element + :param lookup: The shared converter state that keeps track of useful things + :param index: The index of this element within the parent + :param parent: The parent of this element in the output tree + :param vertical: Controls at what point we make a list of elements vertical. If this is an integer (the default), + it sets the threshold of the number of items before we go vertical. If True, always go vertical, if False, never + do so + :param name_hint: If provided, this will override the generated name + :param show_results_names: bool flag indicating whether to add annotations for results names + :returns: The converted version of the input element, but as a Partial that hasn't yet been constructed + :param show_groups: bool flag indicating whether to show groups using bounding box + """ + exprs = element.recurse() + name = name_hint or element.customName or element.__class__.__name__ + + # Python's id() is used to provide a unique identifier for elements + el_id = id(element) + + element_results_name = element.resultsName + + # Here we basically bypass processing certain wrapper elements if they contribute nothing to the diagram + if not element.customName: + if isinstance( + element, + ( + # pyparsing.TokenConverter, + # pyparsing.Forward, + pyparsing.Located, + ), + ): + # However, if this element has a useful custom name, and its child does not, we can pass it on to the child + if exprs: + if not exprs[0].customName: + propagated_name = name + else: + propagated_name = None + + return _to_diagram_element( + element.expr, + parent=parent, + lookup=lookup, + vertical=vertical, + index=index, + name_hint=propagated_name, + show_results_names=show_results_names, + show_groups=show_groups, + ) + + # If the element isn't worth extracting, we always treat it as the first time we say it + if _worth_extracting(element): + if el_id in lookup: + # If we've seen this element exactly once before, we are only just now finding out that it's a duplicate, + # so we have to extract it into a new diagram. + looked_up = lookup[el_id] + looked_up.mark_for_extraction(el_id, lookup, name=name_hint) + ret = EditablePartial.from_call(railroad.NonTerminal, text=looked_up.name) + return ret + + elif el_id in lookup.diagrams: + # If we have seen the element at least twice before, and have already extracted it into a subdiagram, we + # just put in a marker element that refers to the sub-diagram + ret = EditablePartial.from_call( + railroad.NonTerminal, text=lookup.diagrams[el_id].kwargs["name"] + ) + return ret + + # Recursively convert child elements + # Here we find the most relevant Railroad element for matching pyparsing Element + # We use ``items=[]`` here to hold the place for where the child elements will go once created + if isinstance(element, pyparsing.And): + # detect And's created with ``expr*N`` notation - for these use a OneOrMore with a repeat + # (all will have the same name, and resultsName) + if not exprs: + return None + if len(set((e.name, e.resultsName) for e in exprs)) == 1: + ret = EditablePartial.from_call( + railroad.OneOrMore, item="", repeat=str(len(exprs)) + ) + elif _should_vertical(vertical, exprs): + ret = EditablePartial.from_call(railroad.Stack, items=[]) + else: + ret = EditablePartial.from_call(railroad.Sequence, items=[]) + elif isinstance(element, (pyparsing.Or, pyparsing.MatchFirst)): + if not exprs: + return None + if _should_vertical(vertical, exprs): + ret = EditablePartial.from_call(railroad.Choice, 0, items=[]) + else: + ret = EditablePartial.from_call(railroad.HorizontalChoice, items=[]) + elif isinstance(element, pyparsing.Each): + if not exprs: + return None + ret = EditablePartial.from_call(EachItem, items=[]) + elif isinstance(element, pyparsing.NotAny): + ret = EditablePartial.from_call(AnnotatedItem, label="NOT", item="") + elif isinstance(element, pyparsing.FollowedBy): + ret = EditablePartial.from_call(AnnotatedItem, label="LOOKAHEAD", item="") + elif isinstance(element, pyparsing.PrecededBy): + ret = EditablePartial.from_call(AnnotatedItem, label="LOOKBEHIND", item="") + elif isinstance(element, pyparsing.Group): + if show_groups: + ret = EditablePartial.from_call(AnnotatedItem, label="", item="") + else: + ret = EditablePartial.from_call(railroad.Group, label="", item="") + elif isinstance(element, pyparsing.TokenConverter): + label = type(element).__name__.lower() + if label == "tokenconverter": + ret = EditablePartial.from_call(railroad.Sequence, items=[]) + else: + ret = EditablePartial.from_call(AnnotatedItem, label=label, item="") + elif isinstance(element, pyparsing.Opt): + ret = EditablePartial.from_call(railroad.Optional, item="") + elif isinstance(element, pyparsing.OneOrMore): + ret = EditablePartial.from_call(railroad.OneOrMore, item="") + elif isinstance(element, pyparsing.ZeroOrMore): + ret = EditablePartial.from_call(railroad.ZeroOrMore, item="") + elif isinstance(element, pyparsing.Group): + ret = EditablePartial.from_call( + railroad.Group, item=None, label=element_results_name + ) + elif isinstance(element, pyparsing.Empty) and not element.customName: + # Skip unnamed "Empty" elements + ret = None + elif isinstance(element, pyparsing.ParseElementEnhance): + ret = EditablePartial.from_call(railroad.Sequence, items=[]) + elif len(exprs) > 0 and not element_results_name: + ret = EditablePartial.from_call(railroad.Group, item="", label=name) + elif len(exprs) > 0: + ret = EditablePartial.from_call(railroad.Sequence, items=[]) + else: + terminal = EditablePartial.from_call(railroad.Terminal, element.defaultName) + ret = terminal + + if ret is None: + return + + # Indicate this element's position in the tree so we can extract it if necessary + lookup[el_id] = ElementState( + element=element, + converted=ret, + parent=parent, + parent_index=index, + number=lookup.generate_index(), + ) + if element.customName: + lookup[el_id].mark_for_extraction(el_id, lookup, element.customName) + + i = 0 + for expr in exprs: + # Add a placeholder index in case we have to extract the child before we even add it to the parent + if "items" in ret.kwargs: + ret.kwargs["items"].insert(i, None) + + item = _to_diagram_element( + expr, + parent=ret, + lookup=lookup, + vertical=vertical, + index=i, + show_results_names=show_results_names, + show_groups=show_groups, + ) + + # Some elements don't need to be shown in the diagram + if item is not None: + if "item" in ret.kwargs: + ret.kwargs["item"] = item + elif "items" in ret.kwargs: + # If we've already extracted the child, don't touch this index, since it's occupied by a nonterminal + ret.kwargs["items"][i] = item + i += 1 + elif "items" in ret.kwargs: + # If we're supposed to skip this element, remove it from the parent + del ret.kwargs["items"][i] + + # If all this items children are none, skip this item + if ret and ( + ("items" in ret.kwargs and len(ret.kwargs["items"]) == 0) + or ("item" in ret.kwargs and ret.kwargs["item"] is None) + ): + ret = EditablePartial.from_call(railroad.Terminal, name) + + # Mark this element as "complete", ie it has all of its children + if el_id in lookup: + lookup[el_id].complete = True + + if el_id in lookup and lookup[el_id].extract and lookup[el_id].complete: + lookup.extract_into_diagram(el_id) + if ret is not None: + ret = EditablePartial.from_call( + railroad.NonTerminal, text=lookup.diagrams[el_id].kwargs["name"] + ) + + return ret diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..b9e6989 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/exceptions.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/exceptions.py new file mode 100644 index 0000000..12219f1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/exceptions.py @@ -0,0 +1,299 @@ +# exceptions.py + +import re +import sys +import typing + +from .util import ( + col, + line, + lineno, + _collapse_string_to_ranges, + replaced_by_pep8, +) +from .unicode import pyparsing_unicode as ppu + + +class ExceptionWordUnicode(ppu.Latin1, ppu.LatinA, ppu.LatinB, ppu.Greek, ppu.Cyrillic): + pass + + +_extract_alphanums = _collapse_string_to_ranges(ExceptionWordUnicode.alphanums) +_exception_word_extractor = re.compile("([" + _extract_alphanums + "]{1,16})|.") + + +class ParseBaseException(Exception): + """base exception class for all parsing runtime exceptions""" + + loc: int + msg: str + pstr: str + parser_element: typing.Any # "ParserElement" + args: typing.Tuple[str, int, typing.Optional[str]] + + __slots__ = ( + "loc", + "msg", + "pstr", + "parser_element", + "args", + ) + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( + self, + pstr: str, + loc: int = 0, + msg: typing.Optional[str] = None, + elem=None, + ): + self.loc = loc + if msg is None: + self.msg = pstr + self.pstr = "" + else: + self.msg = msg + self.pstr = pstr + self.parser_element = elem + self.args = (pstr, loc, msg) + + @staticmethod + def explain_exception(exc, depth=16): + """ + Method to take an exception and translate the Python internal traceback into a list + of the pyparsing expressions that caused the exception to be raised. + + Parameters: + + - exc - exception raised during parsing (need not be a ParseException, in support + of Python exceptions that might be raised in a parse action) + - depth (default=16) - number of levels back in the stack trace to list expression + and function names; if None, the full stack trace names will be listed; if 0, only + the failing input line, marker, and exception string will be shown + + Returns a multi-line string listing the ParserElements and/or function names in the + exception's stack trace. + """ + import inspect + from .core import ParserElement + + if depth is None: + depth = sys.getrecursionlimit() + ret = [] + if isinstance(exc, ParseBaseException): + ret.append(exc.line) + ret.append(" " * (exc.column - 1) + "^") + ret.append(f"{type(exc).__name__}: {exc}") + + if depth > 0: + callers = inspect.getinnerframes(exc.__traceback__, context=depth) + seen = set() + for i, ff in enumerate(callers[-depth:]): + frm = ff[0] + + f_self = frm.f_locals.get("self", None) + if isinstance(f_self, ParserElement): + if not frm.f_code.co_name.startswith( + ("parseImpl", "_parseNoCache") + ): + continue + if id(f_self) in seen: + continue + seen.add(id(f_self)) + + self_type = type(f_self) + ret.append( + f"{self_type.__module__}.{self_type.__name__} - {f_self}" + ) + + elif f_self is not None: + self_type = type(f_self) + ret.append(f"{self_type.__module__}.{self_type.__name__}") + + else: + code = frm.f_code + if code.co_name in ("wrapper", ""): + continue + + ret.append(code.co_name) + + depth -= 1 + if not depth: + break + + return "\n".join(ret) + + @classmethod + def _from_exception(cls, pe): + """ + internal factory method to simplify creating one type of ParseException + from another - avoids having __init__ signature conflicts among subclasses + """ + return cls(pe.pstr, pe.loc, pe.msg, pe.parser_element) + + @property + def line(self) -> str: + """ + Return the line of text where the exception occurred. + """ + return line(self.loc, self.pstr) + + @property + def lineno(self) -> int: + """ + Return the 1-based line number of text where the exception occurred. + """ + return lineno(self.loc, self.pstr) + + @property + def col(self) -> int: + """ + Return the 1-based column on the line of text where the exception occurred. + """ + return col(self.loc, self.pstr) + + @property + def column(self) -> int: + """ + Return the 1-based column on the line of text where the exception occurred. + """ + return col(self.loc, self.pstr) + + # pre-PEP8 compatibility + @property + def parserElement(self): + return self.parser_element + + @parserElement.setter + def parserElement(self, elem): + self.parser_element = elem + + def __str__(self) -> str: + if self.pstr: + if self.loc >= len(self.pstr): + foundstr = ", found end of text" + else: + # pull out next word at error location + found_match = _exception_word_extractor.match(self.pstr, self.loc) + if found_match is not None: + found = found_match.group(0) + else: + found = self.pstr[self.loc : self.loc + 1] + foundstr = (", found %r" % found).replace(r"\\", "\\") + else: + foundstr = "" + return f"{self.msg}{foundstr} (at char {self.loc}), (line:{self.lineno}, col:{self.column})" + + def __repr__(self): + return str(self) + + def mark_input_line( + self, marker_string: typing.Optional[str] = None, *, markerString: str = ">!<" + ) -> str: + """ + Extracts the exception line from the input string, and marks + the location of the exception with a special symbol. + """ + markerString = marker_string if marker_string is not None else markerString + line_str = self.line + line_column = self.column - 1 + if markerString: + line_str = "".join( + (line_str[:line_column], markerString, line_str[line_column:]) + ) + return line_str.strip() + + def explain(self, depth=16) -> str: + """ + Method to translate the Python internal traceback into a list + of the pyparsing expressions that caused the exception to be raised. + + Parameters: + + - depth (default=16) - number of levels back in the stack trace to list expression + and function names; if None, the full stack trace names will be listed; if 0, only + the failing input line, marker, and exception string will be shown + + Returns a multi-line string listing the ParserElements and/or function names in the + exception's stack trace. + + Example:: + + expr = pp.Word(pp.nums) * 3 + try: + expr.parse_string("123 456 A789") + except pp.ParseException as pe: + print(pe.explain(depth=0)) + + prints:: + + 123 456 A789 + ^ + ParseException: Expected W:(0-9), found 'A' (at char 8), (line:1, col:9) + + Note: the diagnostic output will include string representations of the expressions + that failed to parse. These representations will be more helpful if you use `set_name` to + give identifiable names to your expressions. Otherwise they will use the default string + forms, which may be cryptic to read. + + Note: pyparsing's default truncation of exception tracebacks may also truncate the + stack of expressions that are displayed in the ``explain`` output. To get the full listing + of parser expressions, you may have to set ``ParserElement.verbose_stacktrace = True`` + """ + return self.explain_exception(self, depth) + + # fmt: off + @replaced_by_pep8(mark_input_line) + def markInputline(self): ... + # fmt: on + + +class ParseException(ParseBaseException): + """ + Exception thrown when a parse expression doesn't match the input string + + Example:: + + try: + Word(nums).set_name("integer").parse_string("ABC") + except ParseException as pe: + print(pe) + print("column: {}".format(pe.column)) + + prints:: + + Expected integer (at char 0), (line:1, col:1) + column: 1 + + """ + + +class ParseFatalException(ParseBaseException): + """ + User-throwable exception thrown when inconsistent parse content + is found; stops all parsing immediately + """ + + +class ParseSyntaxException(ParseFatalException): + """ + Just like :class:`ParseFatalException`, but thrown internally + when an :class:`ErrorStop` ('-' operator) indicates + that parsing is to stop immediately because an unbacktrackable + syntax error has been found. + """ + + +class RecursiveGrammarException(Exception): + """ + Exception thrown by :class:`ParserElement.validate` if the + grammar could be left-recursive; parser may need to enable + left recursion using :class:`ParserElement.enable_left_recursion` + """ + + def __init__(self, parseElementList): + self.parseElementTrace = parseElementList + + def __str__(self) -> str: + return f"RecursiveGrammarException: {self.parseElementTrace}" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/helpers.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/helpers.py new file mode 100644 index 0000000..018f0d6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/helpers.py @@ -0,0 +1,1100 @@ +# helpers.py +import html.entities +import re +import sys +import typing + +from . import __diag__ +from .core import * +from .util import ( + _bslash, + _flatten, + _escape_regex_range_chars, + replaced_by_pep8, +) + + +# +# global helpers +# +def counted_array( + expr: ParserElement, + int_expr: typing.Optional[ParserElement] = None, + *, + intExpr: typing.Optional[ParserElement] = None, +) -> ParserElement: + """Helper to define a counted list of expressions. + + This helper defines a pattern of the form:: + + integer expr expr expr... + + where the leading integer tells how many expr expressions follow. + The matched tokens returns the array of expr tokens as a list - the + leading count token is suppressed. + + If ``int_expr`` is specified, it should be a pyparsing expression + that produces an integer value. + + Example:: + + counted_array(Word(alphas)).parse_string('2 ab cd ef') # -> ['ab', 'cd'] + + # in this parser, the leading integer value is given in binary, + # '10' indicating that 2 values are in the array + binary_constant = Word('01').set_parse_action(lambda t: int(t[0], 2)) + counted_array(Word(alphas), int_expr=binary_constant).parse_string('10 ab cd ef') # -> ['ab', 'cd'] + + # if other fields must be parsed after the count but before the + # list items, give the fields results names and they will + # be preserved in the returned ParseResults: + count_with_metadata = integer + Word(alphas)("type") + typed_array = counted_array(Word(alphanums), int_expr=count_with_metadata)("items") + result = typed_array.parse_string("3 bool True True False") + print(result.dump()) + + # prints + # ['True', 'True', 'False'] + # - items: ['True', 'True', 'False'] + # - type: 'bool' + """ + intExpr = intExpr or int_expr + array_expr = Forward() + + def count_field_parse_action(s, l, t): + nonlocal array_expr + n = t[0] + array_expr <<= (expr * n) if n else Empty() + # clear list contents, but keep any named results + del t[:] + + if intExpr is None: + intExpr = Word(nums).set_parse_action(lambda t: int(t[0])) + else: + intExpr = intExpr.copy() + intExpr.set_name("arrayLen") + intExpr.add_parse_action(count_field_parse_action, call_during_try=True) + return (intExpr + array_expr).set_name("(len) " + str(expr) + "...") + + +def match_previous_literal(expr: ParserElement) -> ParserElement: + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + + first = Word(nums) + second = match_previous_literal(first) + match_expr = first + ":" + second + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches a previous literal, will also match the leading + ``"1:1"`` in ``"1:10"``. If this is not desired, use + :class:`match_previous_expr`. Do *not* use with packrat parsing + enabled. + """ + rep = Forward() + + def copy_token_to_repeater(s, l, t): + if t: + if len(t) == 1: + rep << t[0] + else: + # flatten t tokens + tflat = _flatten(t.as_list()) + rep << And(Literal(tt) for tt in tflat) + else: + rep << Empty() + + expr.add_parse_action(copy_token_to_repeater, callDuringTry=True) + rep.set_name("(prev) " + str(expr)) + return rep + + +def match_previous_expr(expr: ParserElement) -> ParserElement: + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + + first = Word(nums) + second = match_previous_expr(first) + match_expr = first + ":" + second + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches by expressions, will *not* match the leading ``"1:1"`` + in ``"1:10"``; the expressions are evaluated first, and then + compared, so ``"1"`` is compared with ``"10"``. Do *not* use + with packrat parsing enabled. + """ + rep = Forward() + e2 = expr.copy() + rep <<= e2 + + def copy_token_to_repeater(s, l, t): + matchTokens = _flatten(t.as_list()) + + def must_match_these_tokens(s, l, t): + theseTokens = _flatten(t.as_list()) + if theseTokens != matchTokens: + raise ParseException( + s, l, f"Expected {matchTokens}, found{theseTokens}" + ) + + rep.set_parse_action(must_match_these_tokens, callDuringTry=True) + + expr.add_parse_action(copy_token_to_repeater, callDuringTry=True) + rep.set_name("(prev) " + str(expr)) + return rep + + +def one_of( + strs: Union[typing.Iterable[str], str], + caseless: bool = False, + use_regex: bool = True, + as_keyword: bool = False, + *, + useRegex: bool = True, + asKeyword: bool = False, +) -> ParserElement: + """Helper to quickly define a set of alternative :class:`Literal` s, + and makes sure to do longest-first testing when there is a conflict, + regardless of the input order, but returns + a :class:`MatchFirst` for best performance. + + Parameters: + + - ``strs`` - a string of space-delimited literals, or a collection of + string literals + - ``caseless`` - treat all literals as caseless - (default= ``False``) + - ``use_regex`` - as an optimization, will + generate a :class:`Regex` object; otherwise, will generate + a :class:`MatchFirst` object (if ``caseless=True`` or ``as_keyword=True``, or if + creating a :class:`Regex` raises an exception) - (default= ``True``) + - ``as_keyword`` - enforce :class:`Keyword`-style matching on the + generated expressions - (default= ``False``) + - ``asKeyword`` and ``useRegex`` are retained for pre-PEP8 compatibility, + but will be removed in a future release + + Example:: + + comp_oper = one_of("< = > <= >= !=") + var = Word(alphas) + number = Word(nums) + term = var | number + comparison_expr = term + comp_oper + term + print(comparison_expr.search_string("B = 12 AA=23 B<=AA AA>12")) + + prints:: + + [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] + """ + asKeyword = asKeyword or as_keyword + useRegex = useRegex and use_regex + + if ( + isinstance(caseless, str_type) + and __diag__.warn_on_multiple_string_args_to_oneof + ): + warnings.warn( + "More than one string argument passed to one_of, pass" + " choices as a list or space-delimited string", + stacklevel=2, + ) + + if caseless: + isequal = lambda a, b: a.upper() == b.upper() + masks = lambda a, b: b.upper().startswith(a.upper()) + parseElementClass = CaselessKeyword if asKeyword else CaselessLiteral + else: + isequal = lambda a, b: a == b + masks = lambda a, b: b.startswith(a) + parseElementClass = Keyword if asKeyword else Literal + + symbols: List[str] = [] + if isinstance(strs, str_type): + strs = typing.cast(str, strs) + symbols = strs.split() + elif isinstance(strs, Iterable): + symbols = list(strs) + else: + raise TypeError("Invalid argument to one_of, expected string or iterable") + if not symbols: + return NoMatch() + + # reorder given symbols to take care to avoid masking longer choices with shorter ones + # (but only if the given symbols are not just single characters) + if any(len(sym) > 1 for sym in symbols): + i = 0 + while i < len(symbols) - 1: + cur = symbols[i] + for j, other in enumerate(symbols[i + 1 :]): + if isequal(other, cur): + del symbols[i + j + 1] + break + elif masks(cur, other): + del symbols[i + j + 1] + symbols.insert(i, other) + break + else: + i += 1 + + if useRegex: + re_flags: int = re.IGNORECASE if caseless else 0 + + try: + if all(len(sym) == 1 for sym in symbols): + # symbols are just single characters, create range regex pattern + patt = f"[{''.join(_escape_regex_range_chars(sym) for sym in symbols)}]" + else: + patt = "|".join(re.escape(sym) for sym in symbols) + + # wrap with \b word break markers if defining as keywords + if asKeyword: + patt = rf"\b(?:{patt})\b" + + ret = Regex(patt, flags=re_flags).set_name(" | ".join(symbols)) + + if caseless: + # add parse action to return symbols as specified, not in random + # casing as found in input string + symbol_map = {sym.lower(): sym for sym in symbols} + ret.add_parse_action(lambda s, l, t: symbol_map[t[0].lower()]) + + return ret + + except re.error: + warnings.warn( + "Exception creating Regex for one_of, building MatchFirst", stacklevel=2 + ) + + # last resort, just use MatchFirst + return MatchFirst(parseElementClass(sym) for sym in symbols).set_name( + " | ".join(symbols) + ) + + +def dict_of(key: ParserElement, value: ParserElement) -> ParserElement: + """Helper to easily and clearly define a dictionary by specifying + the respective patterns for the key and value. Takes care of + defining the :class:`Dict`, :class:`ZeroOrMore`, and + :class:`Group` tokens in the proper order. The key pattern + can include delimiting markers or punctuation, as long as they are + suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the :class:`Dict` results + can include named token fields. + + Example:: + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join)) + print(attr_expr[1, ...].parse_string(text).dump()) + + attr_label = label + attr_value = Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join) + + # similar to Dict, but simpler call format + result = dict_of(attr_label, attr_value).parse_string(text) + print(result.dump()) + print(result['shape']) + print(result.shape) # object attribute access works too + print(result.as_dict()) + + prints:: + + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: 'light blue' + - posn: 'upper left' + - shape: 'SQUARE' + - texture: 'burlap' + SQUARE + SQUARE + {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} + """ + return Dict(OneOrMore(Group(key + value))) + + +def original_text_for( + expr: ParserElement, as_string: bool = True, *, asString: bool = True +) -> ParserElement: + """Helper to return the original, untokenized text for a given + expression. Useful to restore the parsed fields of an HTML start + tag into the raw tag text itself, or to revert separate tokens with + intervening whitespace back to the original matching input text. By + default, returns a string containing the original parsed text. + + If the optional ``as_string`` argument is passed as + ``False``, then the return value is + a :class:`ParseResults` containing any results names that + were originally matched, and a single token containing the original + matched text from the input string. So if the expression passed to + :class:`original_text_for` contains expressions with defined + results names, you must set ``as_string`` to ``False`` if you + want to preserve those results name values. + + The ``asString`` pre-PEP8 argument is retained for compatibility, + but will be removed in a future release. + + Example:: + + src = "this is test bold text normal text " + for tag in ("b", "i"): + opener, closer = make_html_tags(tag) + patt = original_text_for(opener + ... + closer) + print(patt.search_string(src)[0]) + + prints:: + + [' bold text '] + ['text'] + """ + asString = asString and as_string + + locMarker = Empty().set_parse_action(lambda s, loc, t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s, l, t: s[t._original_start : t._original_end] + else: + + def extractText(s, l, t): + t[:] = [s[t.pop("_original_start") : t.pop("_original_end")]] + + matchExpr.set_parse_action(extractText) + matchExpr.ignoreExprs = expr.ignoreExprs + matchExpr.suppress_warning(Diagnostics.warn_ungrouped_named_tokens_in_collection) + return matchExpr + + +def ungroup(expr: ParserElement) -> ParserElement: + """Helper to undo pyparsing's default grouping of And expressions, + even if all but one are non-empty. + """ + return TokenConverter(expr).add_parse_action(lambda t: t[0]) + + +def locatedExpr(expr: ParserElement) -> ParserElement: + """ + (DEPRECATED - future code should use the :class:`Located` class) + Helper to decorate a returned token with its starting and ending + locations in the input string. + + This helper adds the following results names: + + - ``locn_start`` - location where matched expression begins + - ``locn_end`` - location where matched expression ends + - ``value`` - the actual parsed results + + Be careful if the input text contains ```` characters, you + may want to call :class:`ParserElement.parse_with_tabs` + + Example:: + + wd = Word(alphas) + for match in locatedExpr(wd).search_string("ljsdf123lksdjjf123lkkjj1222"): + print(match) + + prints:: + + [[0, 'ljsdf', 5]] + [[8, 'lksdjjf', 15]] + [[18, 'lkkjj', 23]] + """ + locator = Empty().set_parse_action(lambda ss, ll, tt: ll) + return Group( + locator("locn_start") + + expr("value") + + locator.copy().leaveWhitespace()("locn_end") + ) + + +def nested_expr( + opener: Union[str, ParserElement] = "(", + closer: Union[str, ParserElement] = ")", + content: typing.Optional[ParserElement] = None, + ignore_expr: ParserElement = quoted_string(), + *, + ignoreExpr: ParserElement = quoted_string(), +) -> ParserElement: + """Helper method for defining nested lists enclosed in opening and + closing delimiters (``"("`` and ``")"`` are the default). + + Parameters: + + - ``opener`` - opening character for a nested list + (default= ``"("``); can also be a pyparsing expression + - ``closer`` - closing character for a nested list + (default= ``")"``); can also be a pyparsing expression + - ``content`` - expression for items within the nested lists + (default= ``None``) + - ``ignore_expr`` - expression for ignoring opening and closing delimiters + (default= :class:`quoted_string`) + - ``ignoreExpr`` - this pre-PEP8 argument is retained for compatibility + but will be removed in a future release + + If an expression is not provided for the content argument, the + nested expression will capture all whitespace-delimited content + between delimiters as a list of separate values. + + Use the ``ignore_expr`` argument to define expressions that may + contain opening or closing characters that should not be treated as + opening or closing characters for nesting, such as quoted_string or + a comment expression. Specify multiple expressions using an + :class:`Or` or :class:`MatchFirst`. The default is + :class:`quoted_string`, but if no expressions are to be ignored, then + pass ``None`` for this argument. + + Example:: + + data_type = one_of("void int short long char float double") + decl_data_type = Combine(data_type + Opt(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR, RPAR = map(Suppress, "()") + + code_body = nested_expr('{', '}', ignore_expr=(quoted_string | c_style_comment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Opt(DelimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(c_style_comment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.search_string(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + + prints:: + + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if ignoreExpr != ignore_expr: + ignoreExpr = ignore_expr if ignoreExpr == quoted_string() else ignoreExpr + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener, str_type) and isinstance(closer, str_type): + opener = typing.cast(str, opener) + closer = typing.cast(str, closer) + if len(opener) == 1 and len(closer) == 1: + if ignoreExpr is not None: + content = Combine( + OneOrMore( + ~ignoreExpr + + CharsNotIn( + opener + closer + ParserElement.DEFAULT_WHITE_CHARS, + exact=1, + ) + ) + ).set_parse_action(lambda t: t[0].strip()) + else: + content = empty.copy() + CharsNotIn( + opener + closer + ParserElement.DEFAULT_WHITE_CHARS + ).set_parse_action(lambda t: t[0].strip()) + else: + if ignoreExpr is not None: + content = Combine( + OneOrMore( + ~ignoreExpr + + ~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1) + ) + ).set_parse_action(lambda t: t[0].strip()) + else: + content = Combine( + OneOrMore( + ~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1) + ) + ).set_parse_action(lambda t: t[0].strip()) + else: + raise ValueError( + "opening and closing arguments must be strings if no content expression is given" + ) + ret = Forward() + if ignoreExpr is not None: + ret <<= Group( + Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer) + ) + else: + ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer)) + ret.set_name("nested %s%s expression" % (opener, closer)) + return ret + + +def _makeTags(tagStr, xml, suppress_LT=Suppress("<"), suppress_GT=Suppress(">")): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr, str_type): + resname = tagStr + tagStr = Keyword(tagStr, caseless=not xml) + else: + resname = tagStr.name + + tagAttrName = Word(alphas, alphanums + "_-:") + if xml: + tagAttrValue = dbl_quoted_string.copy().set_parse_action(remove_quotes) + openTag = ( + suppress_LT + + tagStr("tag") + + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue))) + + Opt("/", default=[False])("empty").set_parse_action( + lambda s, l, t: t[0] == "/" + ) + + suppress_GT + ) + else: + tagAttrValue = quoted_string.copy().set_parse_action(remove_quotes) | Word( + printables, exclude_chars=">" + ) + openTag = ( + suppress_LT + + tagStr("tag") + + Dict( + ZeroOrMore( + Group( + tagAttrName.set_parse_action(lambda t: t[0].lower()) + + Opt(Suppress("=") + tagAttrValue) + ) + ) + ) + + Opt("/", default=[False])("empty").set_parse_action( + lambda s, l, t: t[0] == "/" + ) + + suppress_GT + ) + closeTag = Combine(Literal("", adjacent=False) + + openTag.set_name("<%s>" % resname) + # add start results name in parse action now that ungrouped names are not reported at two levels + openTag.add_parse_action( + lambda t: t.__setitem__( + "start" + "".join(resname.replace(":", " ").title().split()), t.copy() + ) + ) + closeTag = closeTag( + "end" + "".join(resname.replace(":", " ").title().split()) + ).set_name("" % resname) + openTag.tag = resname + closeTag.tag = resname + openTag.tag_body = SkipTo(closeTag()) + return openTag, closeTag + + +def make_html_tags( + tag_str: Union[str, ParserElement] +) -> Tuple[ParserElement, ParserElement]: + """Helper to construct opening and closing tag expressions for HTML, + given a tag name. Matches tags in either upper or lower case, + attributes with namespaces and with quoted or unquoted values. + + Example:: + + text = 'More info at the
pyparsing wiki page' + # make_html_tags returns pyparsing expressions for the opening and + # closing tags as a 2-tuple + a, a_end = make_html_tags("A") + link_expr = a + SkipTo(a_end)("link_text") + a_end + + for link in link_expr.search_string(text): + # attributes in the tag (like "href" shown here) are + # also accessible as named results + print(link.link_text, '->', link.href) + + prints:: + + pyparsing -> https://github.com/pyparsing/pyparsing/wiki + """ + return _makeTags(tag_str, False) + + +def make_xml_tags( + tag_str: Union[str, ParserElement] +) -> Tuple[ParserElement, ParserElement]: + """Helper to construct opening and closing tag expressions for XML, + given a tag name. Matches tags only in the given upper/lower case. + + Example: similar to :class:`make_html_tags` + """ + return _makeTags(tag_str, True) + + +any_open_tag: ParserElement +any_close_tag: ParserElement +any_open_tag, any_close_tag = make_html_tags( + Word(alphas, alphanums + "_:").set_name("any tag") +) + +_htmlEntityMap = {k.rstrip(";"): v for k, v in html.entities.html5.items()} +common_html_entity = Regex("&(?P" + "|".join(_htmlEntityMap) + ");").set_name( + "common HTML entity" +) + + +def replace_html_entity(s, l, t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + + +class OpAssoc(Enum): + """Enumeration of operator associativity + - used in constructing InfixNotationOperatorSpec for :class:`infix_notation`""" + + LEFT = 1 + RIGHT = 2 + + +InfixNotationOperatorArgType = Union[ + ParserElement, str, Tuple[Union[ParserElement, str], Union[ParserElement, str]] +] +InfixNotationOperatorSpec = Union[ + Tuple[ + InfixNotationOperatorArgType, + int, + OpAssoc, + typing.Optional[ParseAction], + ], + Tuple[ + InfixNotationOperatorArgType, + int, + OpAssoc, + ], +] + + +def infix_notation( + base_expr: ParserElement, + op_list: List[InfixNotationOperatorSpec], + lpar: Union[str, ParserElement] = Suppress("("), + rpar: Union[str, ParserElement] = Suppress(")"), +) -> ParserElement: + """Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary + or binary, left- or right-associative. Parse actions can also be + attached to operator expressions. The generated parser will also + recognize the use of parentheses to override operator precedences + (see example below). + + Note: if you define a deep operator list, you may see performance + issues when using infix_notation. See + :class:`ParserElement.enable_packrat` for a mechanism to potentially + improve your parser performance. + + Parameters: + + - ``base_expr`` - expression representing the most basic operand to + be used in the expression + - ``op_list`` - list of tuples, one for each operator precedence level + in the expression grammar; each tuple is of the form ``(op_expr, + num_operands, right_left_assoc, (optional)parse_action)``, where: + + - ``op_expr`` is the pyparsing expression for the operator; may also + be a string, which will be converted to a Literal; if ``num_operands`` + is 3, ``op_expr`` is a tuple of two expressions, for the two + operators separating the 3 terms + - ``num_operands`` is the number of terms for this operator (must be 1, + 2, or 3) + - ``right_left_assoc`` is the indicator whether the operator is right + or left associative, using the pyparsing-defined constants + ``OpAssoc.RIGHT`` and ``OpAssoc.LEFT``. + - ``parse_action`` is the parse action to be associated with + expressions matching this operator expression (the parse action + tuple member may be omitted); if the parse action is passed + a tuple or list of functions, this is equivalent to calling + ``set_parse_action(*fn)`` + (:class:`ParserElement.set_parse_action`) + - ``lpar`` - expression for matching left-parentheses; if passed as a + str, then will be parsed as ``Suppress(lpar)``. If lpar is passed as + an expression (such as ``Literal('(')``), then it will be kept in + the parsed results, and grouped with them. (default= ``Suppress('(')``) + - ``rpar`` - expression for matching right-parentheses; if passed as a + str, then will be parsed as ``Suppress(rpar)``. If rpar is passed as + an expression (such as ``Literal(')')``), then it will be kept in + the parsed results, and grouped with them. (default= ``Suppress(')')``) + + Example:: + + # simple example of four-function arithmetic with ints and + # variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infix_notation(integer | varname, + [ + ('-', 1, OpAssoc.RIGHT), + (one_of('* /'), 2, OpAssoc.LEFT), + (one_of('+ -'), 2, OpAssoc.LEFT), + ]) + + arith_expr.run_tests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', full_dump=False) + + prints:: + + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + (5+x)*y + [[[5, '+', 'x'], '*', 'y']] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + + # captive version of FollowedBy that does not do parse actions or capture results names + class _FB(FollowedBy): + def parseImpl(self, instring, loc, doActions=True): + self.expr.try_parse(instring, loc) + return loc, [] + + _FB.__name__ = "FollowedBy>" + + ret = Forward() + if isinstance(lpar, str): + lpar = Suppress(lpar) + if isinstance(rpar, str): + rpar = Suppress(rpar) + + # if lpar and rpar are not suppressed, wrap in group + if not (isinstance(rpar, Suppress) and isinstance(rpar, Suppress)): + lastExpr = base_expr | Group(lpar + ret + rpar) + else: + lastExpr = base_expr | (lpar + ret + rpar) + + arity: int + rightLeftAssoc: opAssoc + pa: typing.Optional[ParseAction] + opExpr1: ParserElement + opExpr2: ParserElement + for i, operDef in enumerate(op_list): + opExpr, arity, rightLeftAssoc, pa = (operDef + (None,))[:4] # type: ignore[assignment] + if isinstance(opExpr, str_type): + opExpr = ParserElement._literalStringClass(opExpr) + opExpr = typing.cast(ParserElement, opExpr) + if arity == 3: + if not isinstance(opExpr, (tuple, list)) or len(opExpr) != 2: + raise ValueError( + "if numterms=3, opExpr must be a tuple or list of two expressions" + ) + opExpr1, opExpr2 = opExpr + term_name = f"{opExpr1}{opExpr2} term" + else: + term_name = f"{opExpr} term" + + if not 1 <= arity <= 3: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + + if rightLeftAssoc not in (OpAssoc.LEFT, OpAssoc.RIGHT): + raise ValueError("operator must indicate right or left associativity") + + thisExpr: ParserElement = Forward().set_name(term_name) + thisExpr = typing.cast(Forward, thisExpr) + if rightLeftAssoc is OpAssoc.LEFT: + if arity == 1: + matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + opExpr[1, ...]) + elif arity == 2: + if opExpr is not None: + matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group( + lastExpr + (opExpr + lastExpr)[1, ...] + ) + else: + matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr[2, ...]) + elif arity == 3: + matchExpr = _FB( + lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr + ) + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr)) + elif rightLeftAssoc is OpAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Opt): + opExpr = Opt(opExpr) + matchExpr = _FB(opExpr.expr + thisExpr) + Group(opExpr + thisExpr) + elif arity == 2: + if opExpr is not None: + matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group( + lastExpr + (opExpr + thisExpr)[1, ...] + ) + else: + matchExpr = _FB(lastExpr + thisExpr) + Group( + lastExpr + thisExpr[1, ...] + ) + elif arity == 3: + matchExpr = _FB( + lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr + ) + Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + if pa: + if isinstance(pa, (tuple, list)): + matchExpr.set_parse_action(*pa) + else: + matchExpr.set_parse_action(pa) + thisExpr <<= (matchExpr | lastExpr).setName(term_name) + lastExpr = thisExpr + ret <<= lastExpr + return ret + + +def indentedBlock(blockStatementExpr, indentStack, indent=True, backup_stacks=[]): + """ + (DEPRECATED - use :class:`IndentedBlock` class instead) + Helper method for defining space-delimited indentation blocks, + such as those used to define block statements in Python source code. + + Parameters: + + - ``blockStatementExpr`` - expression defining syntax of statement that + is repeated within the indented block + - ``indentStack`` - list created by caller to manage indentation stack + (multiple ``statementWithIndentedBlock`` expressions within a single + grammar should share a common ``indentStack``) + - ``indent`` - boolean indicating whether block must be indented beyond + the current level; set to ``False`` for block of left-most statements + (default= ``True``) + + A valid block must contain at least one ``blockStatement``. + + (Note that indentedBlock uses internal parse actions which make it + incompatible with packrat parsing.) + + Example:: + + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group("(" + Opt(delimitedList(identifier)) + ")") + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group(funcDecl + func_body) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Opt(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << (funcDef | assignment | identifier) + + module_body = stmt[1, ...] + + parseTree = module_body.parseString(data) + parseTree.pprint() + + prints:: + + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + backup_stacks.append(indentStack[:]) + + def reset_stack(): + indentStack[:] = backup_stacks[-1] + + def checkPeerIndent(s, l, t): + if l >= len(s): + return + curCol = col(l, s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseException(s, l, "illegal nesting") + raise ParseException(s, l, "not a peer entry") + + def checkSubIndent(s, l, t): + curCol = col(l, s) + if curCol > indentStack[-1]: + indentStack.append(curCol) + else: + raise ParseException(s, l, "not a subentry") + + def checkUnindent(s, l, t): + if l >= len(s): + return + curCol = col(l, s) + if not (indentStack and curCol in indentStack): + raise ParseException(s, l, "not an unindent") + if curCol < indentStack[-1]: + indentStack.pop() + + NL = OneOrMore(LineEnd().set_whitespace_chars("\t ").suppress()) + INDENT = (Empty() + Empty().set_parse_action(checkSubIndent)).set_name("INDENT") + PEER = Empty().set_parse_action(checkPeerIndent).set_name("") + UNDENT = Empty().set_parse_action(checkUnindent).set_name("UNINDENT") + if indent: + smExpr = Group( + Opt(NL) + + INDENT + + OneOrMore(PEER + Group(blockStatementExpr) + Opt(NL)) + + UNDENT + ) + else: + smExpr = Group( + Opt(NL) + + OneOrMore(PEER + Group(blockStatementExpr) + Opt(NL)) + + Opt(UNDENT) + ) + + # add a parse action to remove backup_stack from list of backups + smExpr.add_parse_action( + lambda: backup_stacks.pop(-1) and None if backup_stacks else None + ) + smExpr.set_fail_action(lambda a, b, c, d: reset_stack()) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.set_name("indented block") + + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +c_style_comment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + "*/").set_name( + "C style comment" +) +"Comment of the form ``/* ... */``" + +html_comment = Regex(r"").set_name("HTML comment") +"Comment of the form ````" + +rest_of_line = Regex(r".*").leave_whitespace().set_name("rest of line") +dbl_slash_comment = Regex(r"//(?:\\\n|[^\n])*").set_name("// comment") +"Comment of the form ``// ... (to end of line)``" + +cpp_style_comment = Combine( + Regex(r"/\*(?:[^*]|\*(?!/))*") + "*/" | dbl_slash_comment +).set_name("C++ style comment") +"Comment of either form :class:`c_style_comment` or :class:`dbl_slash_comment`" + +java_style_comment = cpp_style_comment +"Same as :class:`cpp_style_comment`" + +python_style_comment = Regex(r"#.*").set_name("Python style comment") +"Comment of the form ``# ... (to end of line)``" + + +# build list of built-in expressions, for future reference if a global default value +# gets updated +_builtin_exprs: List[ParserElement] = [ + v for v in vars().values() if isinstance(v, ParserElement) +] + + +# compatibility function, superseded by DelimitedList class +def delimited_list( + expr: Union[str, ParserElement], + delim: Union[str, ParserElement] = ",", + combine: bool = False, + min: typing.Optional[int] = None, + max: typing.Optional[int] = None, + *, + allow_trailing_delim: bool = False, +) -> ParserElement: + """(DEPRECATED - use :class:`DelimitedList` class)""" + return DelimitedList( + expr, delim, combine, min, max, allow_trailing_delim=allow_trailing_delim + ) + + +# pre-PEP8 compatible names +# fmt: off +opAssoc = OpAssoc +anyOpenTag = any_open_tag +anyCloseTag = any_close_tag +commonHTMLEntity = common_html_entity +cStyleComment = c_style_comment +htmlComment = html_comment +restOfLine = rest_of_line +dblSlashComment = dbl_slash_comment +cppStyleComment = cpp_style_comment +javaStyleComment = java_style_comment +pythonStyleComment = python_style_comment + +@replaced_by_pep8(DelimitedList) +def delimitedList(): ... + +@replaced_by_pep8(DelimitedList) +def delimited_list(): ... + +@replaced_by_pep8(counted_array) +def countedArray(): ... + +@replaced_by_pep8(match_previous_literal) +def matchPreviousLiteral(): ... + +@replaced_by_pep8(match_previous_expr) +def matchPreviousExpr(): ... + +@replaced_by_pep8(one_of) +def oneOf(): ... + +@replaced_by_pep8(dict_of) +def dictOf(): ... + +@replaced_by_pep8(original_text_for) +def originalTextFor(): ... + +@replaced_by_pep8(nested_expr) +def nestedExpr(): ... + +@replaced_by_pep8(make_html_tags) +def makeHTMLTags(): ... + +@replaced_by_pep8(make_xml_tags) +def makeXMLTags(): ... + +@replaced_by_pep8(replace_html_entity) +def replaceHTMLEntity(): ... + +@replaced_by_pep8(infix_notation) +def infixNotation(): ... +# fmt: on diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/results.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/results.py new file mode 100644 index 0000000..0313049 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/results.py @@ -0,0 +1,796 @@ +# results.py +from collections.abc import ( + MutableMapping, + Mapping, + MutableSequence, + Iterator, + Sequence, + Container, +) +import pprint +from typing import Tuple, Any, Dict, Set, List + +str_type: Tuple[type, ...] = (str, bytes) +_generator_type = type((_ for _ in ())) + + +class _ParseResultsWithOffset: + tup: Tuple["ParseResults", int] + __slots__ = ["tup"] + + def __init__(self, p1: "ParseResults", p2: int): + self.tup: Tuple[ParseResults, int] = (p1, p2) + + def __getitem__(self, i): + return self.tup[i] + + def __getstate__(self): + return self.tup + + def __setstate__(self, *args): + self.tup = args[0] + + +class ParseResults: + """Structured parse results, to provide multiple means of access to + the parsed data: + + - as a list (``len(results)``) + - by list index (``results[0], results[1]``, etc.) + - by attribute (``results.`` - see :class:`ParserElement.set_results_name`) + + Example:: + + integer = Word(nums) + date_str = (integer.set_results_name("year") + '/' + + integer.set_results_name("month") + '/' + + integer.set_results_name("day")) + # equivalent form: + # date_str = (integer("year") + '/' + # + integer("month") + '/' + # + integer("day")) + + # parse_string returns a ParseResults object + result = date_str.parse_string("1999/12/31") + + def test(s, fn=repr): + print(f"{s} -> {fn(eval(s))}") + test("list(result)") + test("result[0]") + test("result['month']") + test("result.day") + test("'month' in result") + test("'minutes' in result") + test("result.dump()", str) + + prints:: + + list(result) -> ['1999', '/', '12', '/', '31'] + result[0] -> '1999' + result['month'] -> '12' + result.day -> '31' + 'month' in result -> True + 'minutes' in result -> False + result.dump() -> ['1999', '/', '12', '/', '31'] + - day: '31' + - month: '12' + - year: '1999' + """ + + _null_values: Tuple[Any, ...] = (None, [], ()) + + _name: str + _parent: "ParseResults" + _all_names: Set[str] + _modal: bool + _toklist: List[Any] + _tokdict: Dict[str, Any] + + __slots__ = ( + "_name", + "_parent", + "_all_names", + "_modal", + "_toklist", + "_tokdict", + ) + + class List(list): + """ + Simple wrapper class to distinguish parsed list results that should be preserved + as actual Python lists, instead of being converted to :class:`ParseResults`:: + + LBRACK, RBRACK = map(pp.Suppress, "[]") + element = pp.Forward() + item = ppc.integer + element_list = LBRACK + pp.DelimitedList(element) + RBRACK + + # add parse actions to convert from ParseResults to actual Python collection types + def as_python_list(t): + return pp.ParseResults.List(t.as_list()) + element_list.add_parse_action(as_python_list) + + element <<= item | element_list + + element.run_tests(''' + 100 + [2,3,4] + [[2, 1],3,4] + [(2, 1),3,4] + (2,3,4) + ''', post_parse=lambda s, r: (r[0], type(r[0]))) + + prints:: + + 100 + (100, ) + + [2,3,4] + ([2, 3, 4], ) + + [[2, 1],3,4] + ([[2, 1], 3, 4], ) + + (Used internally by :class:`Group` when `aslist=True`.) + """ + + def __new__(cls, contained=None): + if contained is None: + contained = [] + + if not isinstance(contained, list): + raise TypeError( + f"{cls.__name__} may only be constructed with a list, not {type(contained).__name__}" + ) + + return list.__new__(cls) + + def __new__(cls, toklist=None, name=None, **kwargs): + if isinstance(toklist, ParseResults): + return toklist + self = object.__new__(cls) + self._name = None + self._parent = None + self._all_names = set() + + if toklist is None: + self._toklist = [] + elif isinstance(toklist, (list, _generator_type)): + self._toklist = ( + [toklist[:]] + if isinstance(toklist, ParseResults.List) + else list(toklist) + ) + else: + self._toklist = [toklist] + self._tokdict = dict() + return self + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( + self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance + ): + self._tokdict: Dict[str, _ParseResultsWithOffset] + self._modal = modal + if name is not None and name != "": + if isinstance(name, int): + name = str(name) + if not modal: + self._all_names = {name} + self._name = name + if toklist not in self._null_values: + if isinstance(toklist, (str_type, type)): + toklist = [toklist] + if asList: + if isinstance(toklist, ParseResults): + self[name] = _ParseResultsWithOffset( + ParseResults(toklist._toklist), 0 + ) + else: + self[name] = _ParseResultsWithOffset( + ParseResults(toklist[0]), 0 + ) + self[name]._name = name + else: + try: + self[name] = toklist[0] + except (KeyError, TypeError, IndexError): + if toklist is not self: + self[name] = toklist + else: + self._name = name + + def __getitem__(self, i): + if isinstance(i, (int, slice)): + return self._toklist[i] + else: + if i not in self._all_names: + return self._tokdict[i][-1][0] + else: + return ParseResults([v[0] for v in self._tokdict[i]]) + + def __setitem__(self, k, v, isinstance=isinstance): + if isinstance(v, _ParseResultsWithOffset): + self._tokdict[k] = self._tokdict.get(k, list()) + [v] + sub = v[0] + elif isinstance(k, (int, slice)): + self._toklist[k] = v + sub = v + else: + self._tokdict[k] = self._tokdict.get(k, list()) + [ + _ParseResultsWithOffset(v, 0) + ] + sub = v + if isinstance(sub, ParseResults): + sub._parent = self + + def __delitem__(self, i): + if isinstance(i, (int, slice)): + mylen = len(self._toklist) + del self._toklist[i] + + # convert int to slice + if isinstance(i, int): + if i < 0: + i += mylen + i = slice(i, i + 1) + # get removed indices + removed = list(range(*i.indices(mylen))) + removed.reverse() + # fixup indices in token dictionary + for name, occurrences in self._tokdict.items(): + for j in removed: + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset( + value, position - (position > j) + ) + else: + del self._tokdict[i] + + def __contains__(self, k) -> bool: + return k in self._tokdict + + def __len__(self) -> int: + return len(self._toklist) + + def __bool__(self) -> bool: + return not not (self._toklist or self._tokdict) + + def __iter__(self) -> Iterator: + return iter(self._toklist) + + def __reversed__(self) -> Iterator: + return iter(self._toklist[::-1]) + + def keys(self): + return iter(self._tokdict) + + def values(self): + return (self[k] for k in self.keys()) + + def items(self): + return ((k, self[k]) for k in self.keys()) + + def haskeys(self) -> bool: + """ + Since ``keys()`` returns an iterator, this method is helpful in bypassing + code that looks for the existence of any defined results names.""" + return not not self._tokdict + + def pop(self, *args, **kwargs): + """ + Removes and returns item at specified index (default= ``last``). + Supports both ``list`` and ``dict`` semantics for ``pop()``. If + passed no argument or an integer argument, it will use ``list`` + semantics and pop tokens from the list of parsed tokens. If passed + a non-integer argument (most likely a string), it will use ``dict`` + semantics and pop the corresponding value from any defined results + names. A second default return value argument is supported, just as in + ``dict.pop()``. + + Example:: + + numlist = Word(nums)[...] + print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321'] + + def remove_first(tokens): + tokens.pop(0) + numlist.add_parse_action(remove_first) + print(numlist.parse_string("0 123 321")) # -> ['123', '321'] + + label = Word(alphas) + patt = label("LABEL") + Word(nums)[1, ...] + print(patt.parse_string("AAB 123 321").dump()) + + # Use pop() in a parse action to remove named result (note that corresponding value is not + # removed from list form of results) + def remove_LABEL(tokens): + tokens.pop("LABEL") + return tokens + patt.add_parse_action(remove_LABEL) + print(patt.parse_string("AAB 123 321").dump()) + + prints:: + + ['AAB', '123', '321'] + - LABEL: 'AAB' + + ['AAB', '123', '321'] + """ + if not args: + args = [-1] + for k, v in kwargs.items(): + if k == "default": + args = (args[0], v) + else: + raise TypeError(f"pop() got an unexpected keyword argument {k!r}") + if isinstance(args[0], int) or len(args) == 1 or args[0] in self: + index = args[0] + ret = self[index] + del self[index] + return ret + else: + defaultvalue = args[1] + return defaultvalue + + def get(self, key, default_value=None): + """ + Returns named result matching the given key, or if there is no + such name, then returns the given ``default_value`` or ``None`` if no + ``default_value`` is specified. + + Similar to ``dict.get()``. + + Example:: + + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parse_string("1999/12/31") + print(result.get("year")) # -> '1999' + print(result.get("hour", "not specified")) # -> 'not specified' + print(result.get("hour")) # -> None + """ + if key in self: + return self[key] + else: + return default_value + + def insert(self, index, ins_string): + """ + Inserts new element at location index in the list of parsed tokens. + + Similar to ``list.insert()``. + + Example:: + + numlist = Word(nums)[...] + print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to insert the parse location in the front of the parsed results + def insert_locn(locn, tokens): + tokens.insert(0, locn) + numlist.add_parse_action(insert_locn) + print(numlist.parse_string("0 123 321")) # -> [0, '0', '123', '321'] + """ + self._toklist.insert(index, ins_string) + # fixup indices in token dictionary + for name, occurrences in self._tokdict.items(): + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset( + value, position + (position > index) + ) + + def append(self, item): + """ + Add single element to end of ``ParseResults`` list of elements. + + Example:: + + numlist = Word(nums)[...] + print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to compute the sum of the parsed integers, and add it to the end + def append_sum(tokens): + tokens.append(sum(map(int, tokens))) + numlist.add_parse_action(append_sum) + print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321', 444] + """ + self._toklist.append(item) + + def extend(self, itemseq): + """ + Add sequence of elements to end of ``ParseResults`` list of elements. + + Example:: + + patt = Word(alphas)[1, ...] + + # use a parse action to append the reverse of the matched strings, to make a palindrome + def make_palindrome(tokens): + tokens.extend(reversed([t[::-1] for t in tokens])) + return ''.join(tokens) + patt.add_parse_action(make_palindrome) + print(patt.parse_string("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' + """ + if isinstance(itemseq, ParseResults): + self.__iadd__(itemseq) + else: + self._toklist.extend(itemseq) + + def clear(self): + """ + Clear all elements and results names. + """ + del self._toklist[:] + self._tokdict.clear() + + def __getattr__(self, name): + try: + return self[name] + except KeyError: + if name.startswith("__"): + raise AttributeError(name) + return "" + + def __add__(self, other: "ParseResults") -> "ParseResults": + ret = self.copy() + ret += other + return ret + + def __iadd__(self, other: "ParseResults") -> "ParseResults": + if not other: + return self + + if other._tokdict: + offset = len(self._toklist) + addoffset = lambda a: offset if a < 0 else a + offset + otheritems = other._tokdict.items() + otherdictitems = [ + (k, _ParseResultsWithOffset(v[0], addoffset(v[1]))) + for k, vlist in otheritems + for v in vlist + ] + for k, v in otherdictitems: + self[k] = v + if isinstance(v[0], ParseResults): + v[0]._parent = self + + self._toklist += other._toklist + self._all_names |= other._all_names + return self + + def __radd__(self, other) -> "ParseResults": + if isinstance(other, int) and other == 0: + # useful for merging many ParseResults using sum() builtin + return self.copy() + else: + # this may raise a TypeError - so be it + return other + self + + def __repr__(self) -> str: + return f"{type(self).__name__}({self._toklist!r}, {self.as_dict()})" + + def __str__(self) -> str: + return ( + "[" + + ", ".join( + [ + str(i) if isinstance(i, ParseResults) else repr(i) + for i in self._toklist + ] + ) + + "]" + ) + + def _asStringList(self, sep=""): + out = [] + for item in self._toklist: + if out and sep: + out.append(sep) + if isinstance(item, ParseResults): + out += item._asStringList() + else: + out.append(str(item)) + return out + + def as_list(self) -> list: + """ + Returns the parse results as a nested list of matching tokens, all converted to strings. + + Example:: + + patt = Word(alphas)[1, ...] + result = patt.parse_string("sldkj lsdkj sldkj") + # even though the result prints in string-like form, it is actually a pyparsing ParseResults + print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] + + # Use as_list() to create an actual list + result_list = result.as_list() + print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] + """ + return [ + res.as_list() if isinstance(res, ParseResults) else res + for res in self._toklist + ] + + def as_dict(self) -> dict: + """ + Returns the named parse results as a nested dictionary. + + Example:: + + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parse_string('12/31/1999') + print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) + + result_dict = result.as_dict() + print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} + + # even though a ParseResults supports dict-like access, sometime you just need to have a dict + import json + print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable + print(json.dumps(result.as_dict())) # -> {"month": "31", "day": "1999", "year": "12"} + """ + + def to_item(obj): + if isinstance(obj, ParseResults): + return obj.as_dict() if obj.haskeys() else [to_item(v) for v in obj] + else: + return obj + + return dict((k, to_item(v)) for k, v in self.items()) + + def copy(self) -> "ParseResults": + """ + Returns a new shallow copy of a :class:`ParseResults` object. `ParseResults` + items contained within the source are shared with the copy. Use + :class:`ParseResults.deepcopy()` to create a copy with its own separate + content values. + """ + ret = ParseResults(self._toklist) + ret._tokdict = self._tokdict.copy() + ret._parent = self._parent + ret._all_names |= self._all_names + ret._name = self._name + return ret + + def deepcopy(self) -> "ParseResults": + """ + Returns a new deep copy of a :class:`ParseResults` object. + """ + ret = self.copy() + # replace values with copies if they are of known mutable types + for i, obj in enumerate(self._toklist): + if isinstance(obj, ParseResults): + self._toklist[i] = obj.deepcopy() + elif isinstance(obj, (str, bytes)): + pass + elif isinstance(obj, MutableMapping): + self._toklist[i] = dest = type(obj)() + for k, v in obj.items(): + dest[k] = v.deepcopy() if isinstance(v, ParseResults) else v + elif isinstance(obj, Container): + self._toklist[i] = type(obj)( + v.deepcopy() if isinstance(v, ParseResults) else v for v in obj + ) + return ret + + def get_name(self): + r""" + Returns the results name for this token expression. Useful when several + different expressions might match at a particular location. + + Example:: + + integer = Word(nums) + ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") + house_number_expr = Suppress('#') + Word(nums, alphanums) + user_data = (Group(house_number_expr)("house_number") + | Group(ssn_expr)("ssn") + | Group(integer)("age")) + user_info = user_data[1, ...] + + result = user_info.parse_string("22 111-22-3333 #221B") + for item in result: + print(item.get_name(), ':', item[0]) + + prints:: + + age : 22 + ssn : 111-22-3333 + house_number : 221B + """ + if self._name: + return self._name + elif self._parent: + par: "ParseResults" = self._parent + parent_tokdict_items = par._tokdict.items() + return next( + ( + k + for k, vlist in parent_tokdict_items + for v, loc in vlist + if v is self + ), + None, + ) + elif ( + len(self) == 1 + and len(self._tokdict) == 1 + and next(iter(self._tokdict.values()))[0][1] in (0, -1) + ): + return next(iter(self._tokdict.keys())) + else: + return None + + def dump(self, indent="", full=True, include_list=True, _depth=0) -> str: + """ + Diagnostic method for listing out the contents of + a :class:`ParseResults`. Accepts an optional ``indent`` argument so + that this string can be embedded in a nested display of other data. + + Example:: + + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parse_string('1999/12/31') + print(result.dump()) + + prints:: + + ['1999', '/', '12', '/', '31'] + - day: '31' + - month: '12' + - year: '1999' + """ + out = [] + NL = "\n" + out.append(indent + str(self.as_list()) if include_list else "") + + if full: + if self.haskeys(): + items = sorted((str(k), v) for k, v in self.items()) + for k, v in items: + if out: + out.append(NL) + out.append(f"{indent}{(' ' * _depth)}- {k}: ") + if isinstance(v, ParseResults): + if v: + out.append( + v.dump( + indent=indent, + full=full, + include_list=include_list, + _depth=_depth + 1, + ) + ) + else: + out.append(str(v)) + else: + out.append(repr(v)) + if any(isinstance(vv, ParseResults) for vv in self): + v = self + for i, vv in enumerate(v): + if isinstance(vv, ParseResults): + out.append( + "\n{}{}[{}]:\n{}{}{}".format( + indent, + (" " * (_depth)), + i, + indent, + (" " * (_depth + 1)), + vv.dump( + indent=indent, + full=full, + include_list=include_list, + _depth=_depth + 1, + ), + ) + ) + else: + out.append( + "\n%s%s[%d]:\n%s%s%s" + % ( + indent, + (" " * (_depth)), + i, + indent, + (" " * (_depth + 1)), + str(vv), + ) + ) + + return "".join(out) + + def pprint(self, *args, **kwargs): + """ + Pretty-printer for parsed results as a list, using the + `pprint `_ module. + Accepts additional positional or keyword args as defined for + `pprint.pprint `_ . + + Example:: + + ident = Word(alphas, alphanums) + num = Word(nums) + func = Forward() + term = ident | num | Group('(' + func + ')') + func <<= ident + Group(Optional(DelimitedList(term))) + result = func.parse_string("fna a,b,(fnb c,d,200),100") + result.pprint(width=40) + + prints:: + + ['fna', + ['a', + 'b', + ['(', 'fnb', ['c', 'd', '200'], ')'], + '100']] + """ + pprint.pprint(self.as_list(), *args, **kwargs) + + # add support for pickle protocol + def __getstate__(self): + return ( + self._toklist, + ( + self._tokdict.copy(), + None, + self._all_names, + self._name, + ), + ) + + def __setstate__(self, state): + self._toklist, (self._tokdict, par, inAccumNames, self._name) = state + self._all_names = set(inAccumNames) + self._parent = None + + def __getnewargs__(self): + return self._toklist, self._name + + def __dir__(self): + return dir(type(self)) + list(self.keys()) + + @classmethod + def from_dict(cls, other, name=None) -> "ParseResults": + """ + Helper classmethod to construct a ``ParseResults`` from a ``dict``, preserving the + name-value relations as results names. If an optional ``name`` argument is + given, a nested ``ParseResults`` will be returned. + """ + + def is_iterable(obj): + try: + iter(obj) + except Exception: + return False + # str's are iterable, but in pyparsing, we don't want to iterate over them + else: + return not isinstance(obj, str_type) + + ret = cls([]) + for k, v in other.items(): + if isinstance(v, Mapping): + ret += cls.from_dict(v, name=k) + else: + ret += cls([v], name=k, asList=is_iterable(v)) + if name is not None: + ret = cls([ret], name=name) + return ret + + asList = as_list + """Deprecated - use :class:`as_list`""" + asDict = as_dict + """Deprecated - use :class:`as_dict`""" + getName = get_name + """Deprecated - use :class:`get_name`""" + + +MutableMapping.register(ParseResults) +MutableSequence.register(ParseResults) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/testing.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/testing.py new file mode 100644 index 0000000..6a254c1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/testing.py @@ -0,0 +1,331 @@ +# testing.py + +from contextlib import contextmanager +import typing + +from .core import ( + ParserElement, + ParseException, + Keyword, + __diag__, + __compat__, +) + + +class pyparsing_test: + """ + namespace class for classes useful in writing unit tests + """ + + class reset_pyparsing_context: + """ + Context manager to be used when writing unit tests that modify pyparsing config values: + - packrat parsing + - bounded recursion parsing + - default whitespace characters. + - default keyword characters + - literal string auto-conversion class + - __diag__ settings + + Example:: + + with reset_pyparsing_context(): + # test that literals used to construct a grammar are automatically suppressed + ParserElement.inlineLiteralsUsing(Suppress) + + term = Word(alphas) | Word(nums) + group = Group('(' + term[...] + ')') + + # assert that the '()' characters are not included in the parsed tokens + self.assertParseAndCheckList(group, "(abc 123 def)", ['abc', '123', 'def']) + + # after exiting context manager, literals are converted to Literal expressions again + """ + + def __init__(self): + self._save_context = {} + + def save(self): + self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS + self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS + + self._save_context[ + "literal_string_class" + ] = ParserElement._literalStringClass + + self._save_context["verbose_stacktrace"] = ParserElement.verbose_stacktrace + + self._save_context["packrat_enabled"] = ParserElement._packratEnabled + if ParserElement._packratEnabled: + self._save_context[ + "packrat_cache_size" + ] = ParserElement.packrat_cache.size + else: + self._save_context["packrat_cache_size"] = None + self._save_context["packrat_parse"] = ParserElement._parse + self._save_context[ + "recursion_enabled" + ] = ParserElement._left_recursion_enabled + + self._save_context["__diag__"] = { + name: getattr(__diag__, name) for name in __diag__._all_names + } + + self._save_context["__compat__"] = { + "collect_all_And_tokens": __compat__.collect_all_And_tokens + } + + return self + + def restore(self): + # reset pyparsing global state + if ( + ParserElement.DEFAULT_WHITE_CHARS + != self._save_context["default_whitespace"] + ): + ParserElement.set_default_whitespace_chars( + self._save_context["default_whitespace"] + ) + + ParserElement.verbose_stacktrace = self._save_context["verbose_stacktrace"] + + Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"] + ParserElement.inlineLiteralsUsing( + self._save_context["literal_string_class"] + ) + + for name, value in self._save_context["__diag__"].items(): + (__diag__.enable if value else __diag__.disable)(name) + + ParserElement._packratEnabled = False + if self._save_context["packrat_enabled"]: + ParserElement.enable_packrat(self._save_context["packrat_cache_size"]) + else: + ParserElement._parse = self._save_context["packrat_parse"] + ParserElement._left_recursion_enabled = self._save_context[ + "recursion_enabled" + ] + + __compat__.collect_all_And_tokens = self._save_context["__compat__"] + + return self + + def copy(self): + ret = type(self)() + ret._save_context.update(self._save_context) + return ret + + def __enter__(self): + return self.save() + + def __exit__(self, *args): + self.restore() + + class TestParseResultsAsserts: + """ + A mixin class to add parse results assertion methods to normal unittest.TestCase classes. + """ + + def assertParseResultsEquals( + self, result, expected_list=None, expected_dict=None, msg=None + ): + """ + Unit test assertion to compare a :class:`ParseResults` object with an optional ``expected_list``, + and compare any defined results names with an optional ``expected_dict``. + """ + if expected_list is not None: + self.assertEqual(expected_list, result.as_list(), msg=msg) + if expected_dict is not None: + self.assertEqual(expected_dict, result.as_dict(), msg=msg) + + def assertParseAndCheckList( + self, expr, test_string, expected_list, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ``ParseResults.asList()`` is equal to the ``expected_list``. + """ + result = expr.parse_string(test_string, parse_all=True) + if verbose: + print(result.dump()) + else: + print(result.as_list()) + self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg) + + def assertParseAndCheckDict( + self, expr, test_string, expected_dict, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ``ParseResults.asDict()`` is equal to the ``expected_dict``. + """ + result = expr.parse_string(test_string, parseAll=True) + if verbose: + print(result.dump()) + else: + print(result.as_list()) + self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg) + + def assertRunTestResults( + self, run_tests_report, expected_parse_results=None, msg=None + ): + """ + Unit test assertion to evaluate output of ``ParserElement.runTests()``. If a list of + list-dict tuples is given as the ``expected_parse_results`` argument, then these are zipped + with the report tuples returned by ``runTests`` and evaluated using ``assertParseResultsEquals``. + Finally, asserts that the overall ``runTests()`` success value is ``True``. + + :param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests + :param expected_parse_results (optional): [tuple(str, list, dict, Exception)] + """ + run_test_success, run_test_results = run_tests_report + + if expected_parse_results is not None: + merged = [ + (*rpt, expected) + for rpt, expected in zip(run_test_results, expected_parse_results) + ] + for test_string, result, expected in merged: + # expected should be a tuple containing a list and/or a dict or an exception, + # and optional failure message string + # an empty tuple will skip any result validation + fail_msg = next( + (exp for exp in expected if isinstance(exp, str)), None + ) + expected_exception = next( + ( + exp + for exp in expected + if isinstance(exp, type) and issubclass(exp, Exception) + ), + None, + ) + if expected_exception is not None: + with self.assertRaises( + expected_exception=expected_exception, msg=fail_msg or msg + ): + if isinstance(result, Exception): + raise result + else: + expected_list = next( + (exp for exp in expected if isinstance(exp, list)), None + ) + expected_dict = next( + (exp for exp in expected if isinstance(exp, dict)), None + ) + if (expected_list, expected_dict) != (None, None): + self.assertParseResultsEquals( + result, + expected_list=expected_list, + expected_dict=expected_dict, + msg=fail_msg or msg, + ) + else: + # warning here maybe? + print(f"no validation for {test_string!r}") + + # do this last, in case some specific test results can be reported instead + self.assertTrue( + run_test_success, msg=msg if msg is not None else "failed runTests" + ) + + @contextmanager + def assertRaisesParseException(self, exc_type=ParseException, msg=None): + with self.assertRaises(exc_type, msg=msg): + yield + + @staticmethod + def with_line_numbers( + s: str, + start_line: typing.Optional[int] = None, + end_line: typing.Optional[int] = None, + expand_tabs: bool = True, + eol_mark: str = "|", + mark_spaces: typing.Optional[str] = None, + mark_control: typing.Optional[str] = None, + ) -> str: + """ + Helpful method for debugging a parser - prints a string with line and column numbers. + (Line and column numbers are 1-based.) + + :param s: tuple(bool, str - string to be printed with line and column numbers + :param start_line: int - (optional) starting line number in s to print (default=1) + :param end_line: int - (optional) ending line number in s to print (default=len(s)) + :param expand_tabs: bool - (optional) expand tabs to spaces, to match the pyparsing default + :param eol_mark: str - (optional) string to mark the end of lines, helps visualize trailing spaces (default="|") + :param mark_spaces: str - (optional) special character to display in place of spaces + :param mark_control: str - (optional) convert non-printing control characters to a placeholding + character; valid values: + - "unicode" - replaces control chars with Unicode symbols, such as "␍" and "␊" + - any single character string - replace control characters with given string + - None (default) - string is displayed as-is + + :return: str - input string with leading line numbers and column number headers + """ + if expand_tabs: + s = s.expandtabs() + if mark_control is not None: + mark_control = typing.cast(str, mark_control) + if mark_control == "unicode": + transtable_map = { + c: u for c, u in zip(range(0, 33), range(0x2400, 0x2433)) + } + transtable_map[127] = 0x2421 + tbl = str.maketrans(transtable_map) + eol_mark = "" + else: + ord_mark_control = ord(mark_control) + tbl = str.maketrans( + {c: ord_mark_control for c in list(range(0, 32)) + [127]} + ) + s = s.translate(tbl) + if mark_spaces is not None and mark_spaces != " ": + if mark_spaces == "unicode": + tbl = str.maketrans({9: 0x2409, 32: 0x2423}) + s = s.translate(tbl) + else: + s = s.replace(" ", mark_spaces) + if start_line is None: + start_line = 1 + if end_line is None: + end_line = len(s) + end_line = min(end_line, len(s)) + start_line = min(max(1, start_line), end_line) + + if mark_control != "unicode": + s_lines = s.splitlines()[start_line - 1 : end_line] + else: + s_lines = [line + "␊" for line in s.split("␊")[start_line - 1 : end_line]] + if not s_lines: + return "" + + lineno_width = len(str(end_line)) + max_line_len = max(len(line) for line in s_lines) + lead = " " * (lineno_width + 1) + if max_line_len >= 99: + header0 = ( + lead + + "".join( + f"{' ' * 99}{(i + 1) % 100}" + for i in range(max(max_line_len // 100, 1)) + ) + + "\n" + ) + else: + header0 = "" + header1 = ( + header0 + + lead + + "".join(f" {(i + 1) % 10}" for i in range(-(-max_line_len // 10))) + + "\n" + ) + header2 = lead + "1234567890" * (-(-max_line_len // 10)) + "\n" + return ( + header1 + + header2 + + "\n".join( + f"{i:{lineno_width}d}:{line}{eol_mark}" + for i, line in enumerate(s_lines, start=start_line) + ) + + "\n" + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/unicode.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/unicode.py new file mode 100644 index 0000000..ec0b3a4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/unicode.py @@ -0,0 +1,361 @@ +# unicode.py + +import sys +from itertools import filterfalse +from typing import List, Tuple, Union + + +class _lazyclassproperty: + def __init__(self, fn): + self.fn = fn + self.__doc__ = fn.__doc__ + self.__name__ = fn.__name__ + + def __get__(self, obj, cls): + if cls is None: + cls = type(obj) + if not hasattr(cls, "_intern") or any( + cls._intern is getattr(superclass, "_intern", []) + for superclass in cls.__mro__[1:] + ): + cls._intern = {} + attrname = self.fn.__name__ + if attrname not in cls._intern: + cls._intern[attrname] = self.fn(cls) + return cls._intern[attrname] + + +UnicodeRangeList = List[Union[Tuple[int, int], Tuple[int]]] + + +class unicode_set: + """ + A set of Unicode characters, for language-specific strings for + ``alphas``, ``nums``, ``alphanums``, and ``printables``. + A unicode_set is defined by a list of ranges in the Unicode character + set, in a class attribute ``_ranges``. Ranges can be specified using + 2-tuples or a 1-tuple, such as:: + + _ranges = [ + (0x0020, 0x007e), + (0x00a0, 0x00ff), + (0x0100,), + ] + + Ranges are left- and right-inclusive. A 1-tuple of (x,) is treated as (x, x). + + A unicode set can also be defined using multiple inheritance of other unicode sets:: + + class CJK(Chinese, Japanese, Korean): + pass + """ + + _ranges: UnicodeRangeList = [] + + @_lazyclassproperty + def _chars_for_ranges(cls): + ret = [] + for cc in cls.__mro__: + if cc is unicode_set: + break + for rr in getattr(cc, "_ranges", ()): + ret.extend(range(rr[0], rr[-1] + 1)) + return [chr(c) for c in sorted(set(ret))] + + @_lazyclassproperty + def printables(cls): + """all non-whitespace characters in this range""" + return "".join(filterfalse(str.isspace, cls._chars_for_ranges)) + + @_lazyclassproperty + def alphas(cls): + """all alphabetic characters in this range""" + return "".join(filter(str.isalpha, cls._chars_for_ranges)) + + @_lazyclassproperty + def nums(cls): + """all numeric digit characters in this range""" + return "".join(filter(str.isdigit, cls._chars_for_ranges)) + + @_lazyclassproperty + def alphanums(cls): + """all alphanumeric characters in this range""" + return cls.alphas + cls.nums + + @_lazyclassproperty + def identchars(cls): + """all characters in this range that are valid identifier characters, plus underscore '_'""" + return "".join( + sorted( + set( + "".join(filter(str.isidentifier, cls._chars_for_ranges)) + + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµº" + + "ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ" + + "_" + ) + ) + ) + + @_lazyclassproperty + def identbodychars(cls): + """ + all characters in this range that are valid identifier body characters, + plus the digits 0-9, and · (Unicode MIDDLE DOT) + """ + return "".join( + sorted( + set( + cls.identchars + + "0123456789·" + + "".join( + [c for c in cls._chars_for_ranges if ("_" + c).isidentifier()] + ) + ) + ) + ) + + @_lazyclassproperty + def identifier(cls): + """ + a pyparsing Word expression for an identifier using this range's definitions for + identchars and identbodychars + """ + from pip._vendor.pyparsing import Word + + return Word(cls.identchars, cls.identbodychars) + + +class pyparsing_unicode(unicode_set): + """ + A namespace class for defining common language unicode_sets. + """ + + # fmt: off + + # define ranges in language character sets + _ranges: UnicodeRangeList = [ + (0x0020, sys.maxunicode), + ] + + class BasicMultilingualPlane(unicode_set): + """Unicode set for the Basic Multilingual Plane""" + _ranges: UnicodeRangeList = [ + (0x0020, 0xFFFF), + ] + + class Latin1(unicode_set): + """Unicode set for Latin-1 Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0020, 0x007E), + (0x00A0, 0x00FF), + ] + + class LatinA(unicode_set): + """Unicode set for Latin-A Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0100, 0x017F), + ] + + class LatinB(unicode_set): + """Unicode set for Latin-B Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0180, 0x024F), + ] + + class Greek(unicode_set): + """Unicode set for Greek Unicode Character Ranges""" + _ranges: UnicodeRangeList = [ + (0x0342, 0x0345), + (0x0370, 0x0377), + (0x037A, 0x037F), + (0x0384, 0x038A), + (0x038C,), + (0x038E, 0x03A1), + (0x03A3, 0x03E1), + (0x03F0, 0x03FF), + (0x1D26, 0x1D2A), + (0x1D5E,), + (0x1D60,), + (0x1D66, 0x1D6A), + (0x1F00, 0x1F15), + (0x1F18, 0x1F1D), + (0x1F20, 0x1F45), + (0x1F48, 0x1F4D), + (0x1F50, 0x1F57), + (0x1F59,), + (0x1F5B,), + (0x1F5D,), + (0x1F5F, 0x1F7D), + (0x1F80, 0x1FB4), + (0x1FB6, 0x1FC4), + (0x1FC6, 0x1FD3), + (0x1FD6, 0x1FDB), + (0x1FDD, 0x1FEF), + (0x1FF2, 0x1FF4), + (0x1FF6, 0x1FFE), + (0x2129,), + (0x2719, 0x271A), + (0xAB65,), + (0x10140, 0x1018D), + (0x101A0,), + (0x1D200, 0x1D245), + (0x1F7A1, 0x1F7A7), + ] + + class Cyrillic(unicode_set): + """Unicode set for Cyrillic Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0400, 0x052F), + (0x1C80, 0x1C88), + (0x1D2B,), + (0x1D78,), + (0x2DE0, 0x2DFF), + (0xA640, 0xA672), + (0xA674, 0xA69F), + (0xFE2E, 0xFE2F), + ] + + class Chinese(unicode_set): + """Unicode set for Chinese Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x2E80, 0x2E99), + (0x2E9B, 0x2EF3), + (0x31C0, 0x31E3), + (0x3400, 0x4DB5), + (0x4E00, 0x9FEF), + (0xA700, 0xA707), + (0xF900, 0xFA6D), + (0xFA70, 0xFAD9), + (0x16FE2, 0x16FE3), + (0x1F210, 0x1F212), + (0x1F214, 0x1F23B), + (0x1F240, 0x1F248), + (0x20000, 0x2A6D6), + (0x2A700, 0x2B734), + (0x2B740, 0x2B81D), + (0x2B820, 0x2CEA1), + (0x2CEB0, 0x2EBE0), + (0x2F800, 0x2FA1D), + ] + + class Japanese(unicode_set): + """Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges""" + + class Kanji(unicode_set): + "Unicode set for Kanji Unicode Character Range" + _ranges: UnicodeRangeList = [ + (0x4E00, 0x9FBF), + (0x3000, 0x303F), + ] + + class Hiragana(unicode_set): + """Unicode set for Hiragana Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x3041, 0x3096), + (0x3099, 0x30A0), + (0x30FC,), + (0xFF70,), + (0x1B001,), + (0x1B150, 0x1B152), + (0x1F200,), + ] + + class Katakana(unicode_set): + """Unicode set for Katakana Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x3099, 0x309C), + (0x30A0, 0x30FF), + (0x31F0, 0x31FF), + (0x32D0, 0x32FE), + (0xFF65, 0xFF9F), + (0x1B000,), + (0x1B164, 0x1B167), + (0x1F201, 0x1F202), + (0x1F213,), + ] + + 漢字 = Kanji + カタカナ = Katakana + ひらがな = Hiragana + + _ranges = ( + Kanji._ranges + + Hiragana._ranges + + Katakana._ranges + ) + + class Hangul(unicode_set): + """Unicode set for Hangul (Korean) Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x1100, 0x11FF), + (0x302E, 0x302F), + (0x3131, 0x318E), + (0x3200, 0x321C), + (0x3260, 0x327B), + (0x327E,), + (0xA960, 0xA97C), + (0xAC00, 0xD7A3), + (0xD7B0, 0xD7C6), + (0xD7CB, 0xD7FB), + (0xFFA0, 0xFFBE), + (0xFFC2, 0xFFC7), + (0xFFCA, 0xFFCF), + (0xFFD2, 0xFFD7), + (0xFFDA, 0xFFDC), + ] + + Korean = Hangul + + class CJK(Chinese, Japanese, Hangul): + """Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range""" + + class Thai(unicode_set): + """Unicode set for Thai Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0E01, 0x0E3A), + (0x0E3F, 0x0E5B) + ] + + class Arabic(unicode_set): + """Unicode set for Arabic Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0600, 0x061B), + (0x061E, 0x06FF), + (0x0700, 0x077F), + ] + + class Hebrew(unicode_set): + """Unicode set for Hebrew Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0591, 0x05C7), + (0x05D0, 0x05EA), + (0x05EF, 0x05F4), + (0xFB1D, 0xFB36), + (0xFB38, 0xFB3C), + (0xFB3E,), + (0xFB40, 0xFB41), + (0xFB43, 0xFB44), + (0xFB46, 0xFB4F), + ] + + class Devanagari(unicode_set): + """Unicode set for Devanagari Unicode Character Range""" + _ranges: UnicodeRangeList = [ + (0x0900, 0x097F), + (0xA8E0, 0xA8FF) + ] + + BMP = BasicMultilingualPlane + + # add language identifiers using language Unicode + العربية = Arabic + 中文 = Chinese + кириллица = Cyrillic + Ελληνικά = Greek + עִברִית = Hebrew + 日本語 = Japanese + 한국어 = Korean + ไทย = Thai + देवनागरी = Devanagari + + # fmt: on diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/util.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/util.py new file mode 100644 index 0000000..d8d3f41 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pyparsing/util.py @@ -0,0 +1,284 @@ +# util.py +import inspect +import warnings +import types +import collections +import itertools +from functools import lru_cache, wraps +from typing import Callable, List, Union, Iterable, TypeVar, cast + +_bslash = chr(92) +C = TypeVar("C", bound=Callable) + + +class __config_flags: + """Internal class for defining compatibility and debugging flags""" + + _all_names: List[str] = [] + _fixed_names: List[str] = [] + _type_desc = "configuration" + + @classmethod + def _set(cls, dname, value): + if dname in cls._fixed_names: + warnings.warn( + f"{cls.__name__}.{dname} {cls._type_desc} is {str(getattr(cls, dname)).upper()}" + f" and cannot be overridden", + stacklevel=3, + ) + return + if dname in cls._all_names: + setattr(cls, dname, value) + else: + raise ValueError(f"no such {cls._type_desc} {dname!r}") + + enable = classmethod(lambda cls, name: cls._set(name, True)) + disable = classmethod(lambda cls, name: cls._set(name, False)) + + +@lru_cache(maxsize=128) +def col(loc: int, strg: str) -> int: + """ + Returns current column within a string, counting newlines as line separators. + The first column is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See + :class:`ParserElement.parse_string` for more + information on parsing strings containing ```` s, and suggested + methods to maintain a consistent view of the parsed string, the parse + location, and line and column positions within the parsed string. + """ + s = strg + return 1 if 0 < loc < len(s) and s[loc - 1] == "\n" else loc - s.rfind("\n", 0, loc) + + +@lru_cache(maxsize=128) +def lineno(loc: int, strg: str) -> int: + """Returns current line number within a string, counting newlines as line separators. + The first line is number 1. + + Note - the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See :class:`ParserElement.parse_string` + for more information on parsing strings containing ```` s, and + suggested methods to maintain a consistent view of the parsed string, the + parse location, and line and column positions within the parsed string. + """ + return strg.count("\n", 0, loc) + 1 + + +@lru_cache(maxsize=128) +def line(loc: int, strg: str) -> str: + """ + Returns the line of text containing loc within a string, counting newlines as line separators. + """ + last_cr = strg.rfind("\n", 0, loc) + next_cr = strg.find("\n", loc) + return strg[last_cr + 1 : next_cr] if next_cr >= 0 else strg[last_cr + 1 :] + + +class _UnboundedCache: + def __init__(self): + cache = {} + cache_get = cache.get + self.not_in_cache = not_in_cache = object() + + def get(_, key): + return cache_get(key, not_in_cache) + + def set_(_, key, value): + cache[key] = value + + def clear(_): + cache.clear() + + self.size = None + self.get = types.MethodType(get, self) + self.set = types.MethodType(set_, self) + self.clear = types.MethodType(clear, self) + + +class _FifoCache: + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + cache = {} + keyring = [object()] * size + cache_get = cache.get + cache_pop = cache.pop + keyiter = itertools.cycle(range(size)) + + def get(_, key): + return cache_get(key, not_in_cache) + + def set_(_, key, value): + cache[key] = value + i = next(keyiter) + cache_pop(keyring[i], None) + keyring[i] = key + + def clear(_): + cache.clear() + keyring[:] = [object()] * size + + self.size = size + self.get = types.MethodType(get, self) + self.set = types.MethodType(set_, self) + self.clear = types.MethodType(clear, self) + + +class LRUMemo: + """ + A memoizing mapping that retains `capacity` deleted items + + The memo tracks retained items by their access order; once `capacity` items + are retained, the least recently used item is discarded. + """ + + def __init__(self, capacity): + self._capacity = capacity + self._active = {} + self._memory = collections.OrderedDict() + + def __getitem__(self, key): + try: + return self._active[key] + except KeyError: + self._memory.move_to_end(key) + return self._memory[key] + + def __setitem__(self, key, value): + self._memory.pop(key, None) + self._active[key] = value + + def __delitem__(self, key): + try: + value = self._active.pop(key) + except KeyError: + pass + else: + while len(self._memory) >= self._capacity: + self._memory.popitem(last=False) + self._memory[key] = value + + def clear(self): + self._active.clear() + self._memory.clear() + + +class UnboundedMemo(dict): + """ + A memoizing mapping that retains all deleted items + """ + + def __delitem__(self, key): + pass + + +def _escape_regex_range_chars(s: str) -> str: + # escape these chars: ^-[] + for c in r"\^-[]": + s = s.replace(c, _bslash + c) + s = s.replace("\n", r"\n") + s = s.replace("\t", r"\t") + return str(s) + + +def _collapse_string_to_ranges( + s: Union[str, Iterable[str]], re_escape: bool = True +) -> str: + def is_consecutive(c): + c_int = ord(c) + is_consecutive.prev, prev = c_int, is_consecutive.prev + if c_int - prev > 1: + is_consecutive.value = next(is_consecutive.counter) + return is_consecutive.value + + is_consecutive.prev = 0 # type: ignore [attr-defined] + is_consecutive.counter = itertools.count() # type: ignore [attr-defined] + is_consecutive.value = -1 # type: ignore [attr-defined] + + def escape_re_range_char(c): + return "\\" + c if c in r"\^-][" else c + + def no_escape_re_range_char(c): + return c + + if not re_escape: + escape_re_range_char = no_escape_re_range_char + + ret = [] + s = "".join(sorted(set(s))) + if len(s) > 3: + for _, chars in itertools.groupby(s, key=is_consecutive): + first = last = next(chars) + last = collections.deque( + itertools.chain(iter([last]), chars), maxlen=1 + ).pop() + if first == last: + ret.append(escape_re_range_char(first)) + else: + sep = "" if ord(last) == ord(first) + 1 else "-" + ret.append( + f"{escape_re_range_char(first)}{sep}{escape_re_range_char(last)}" + ) + else: + ret = [escape_re_range_char(c) for c in s] + + return "".join(ret) + + +def _flatten(ll: list) -> list: + ret = [] + for i in ll: + if isinstance(i, list): + ret.extend(_flatten(i)) + else: + ret.append(i) + return ret + + +def _make_synonym_function(compat_name: str, fn: C) -> C: + # In a future version, uncomment the code in the internal _inner() functions + # to begin emitting DeprecationWarnings. + + # Unwrap staticmethod/classmethod + fn = getattr(fn, "__func__", fn) + + # (Presence of 'self' arg in signature is used by explain_exception() methods, so we take + # some extra steps to add it if present in decorated function.) + if "self" == list(inspect.signature(fn).parameters)[0]: + + @wraps(fn) + def _inner(self, *args, **kwargs): + # warnings.warn( + # f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=3 + # ) + return fn(self, *args, **kwargs) + + else: + + @wraps(fn) + def _inner(*args, **kwargs): + # warnings.warn( + # f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=3 + # ) + return fn(*args, **kwargs) + + _inner.__doc__ = f"""Deprecated - use :class:`{fn.__name__}`""" + _inner.__name__ = compat_name + _inner.__annotations__ = fn.__annotations__ + if isinstance(fn, types.FunctionType): + _inner.__kwdefaults__ = fn.__kwdefaults__ + elif isinstance(fn, type) and hasattr(fn, "__init__"): + _inner.__kwdefaults__ = fn.__init__.__kwdefaults__ + else: + _inner.__kwdefaults__ = None + _inner.__qualname__ = fn.__qualname__ + return cast(C, _inner) + + +def replaced_by_pep8(fn: C) -> Callable[[Callable], C]: + """ + Decorator for pre-PEP8 compatibility synonyms, to link them to the new function. + """ + return lambda other: _make_synonym_function(other.__name__, fn) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__init__.py new file mode 100644 index 0000000..ddfcf7f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__init__.py @@ -0,0 +1,23 @@ +"""Wrappers to call pyproject.toml-based build backend hooks. +""" + +from ._impl import ( + BackendInvalid, + BackendUnavailable, + BuildBackendHookCaller, + HookMissing, + UnsupportedOperation, + default_subprocess_runner, + quiet_subprocess_runner, +) + +__version__ = '1.0.0' +__all__ = [ + 'BackendUnavailable', + 'BackendInvalid', + 'HookMissing', + 'UnsupportedOperation', + 'default_subprocess_runner', + 'quiet_subprocess_runner', + 'BuildBackendHookCaller', +] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f2cf5da Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_compat.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 0000000..6691b32 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_compat.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc new file mode 100644 index 0000000..e28d2d1 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_compat.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_compat.py new file mode 100644 index 0000000..95e509c --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_compat.py @@ -0,0 +1,8 @@ +__all__ = ("tomllib",) + +import sys + +if sys.version_info >= (3, 11): + import tomllib +else: + from pip._vendor import tomli as tomllib diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_impl.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_impl.py new file mode 100644 index 0000000..37b0e65 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_impl.py @@ -0,0 +1,330 @@ +import json +import os +import sys +import tempfile +from contextlib import contextmanager +from os.path import abspath +from os.path import join as pjoin +from subprocess import STDOUT, check_call, check_output + +from ._in_process import _in_proc_script_path + + +def write_json(obj, path, **kwargs): + with open(path, 'w', encoding='utf-8') as f: + json.dump(obj, f, **kwargs) + + +def read_json(path): + with open(path, encoding='utf-8') as f: + return json.load(f) + + +class BackendUnavailable(Exception): + """Will be raised if the backend cannot be imported in the hook process.""" + def __init__(self, traceback): + self.traceback = traceback + + +class BackendInvalid(Exception): + """Will be raised if the backend is invalid.""" + def __init__(self, backend_name, backend_path, message): + super().__init__(message) + self.backend_name = backend_name + self.backend_path = backend_path + + +class HookMissing(Exception): + """Will be raised on missing hooks (if a fallback can't be used).""" + def __init__(self, hook_name): + super().__init__(hook_name) + self.hook_name = hook_name + + +class UnsupportedOperation(Exception): + """May be raised by build_sdist if the backend indicates that it can't.""" + def __init__(self, traceback): + self.traceback = traceback + + +def default_subprocess_runner(cmd, cwd=None, extra_environ=None): + """The default method of calling the wrapper subprocess. + + This uses :func:`subprocess.check_call` under the hood. + """ + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + + check_call(cmd, cwd=cwd, env=env) + + +def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None): + """Call the subprocess while suppressing output. + + This uses :func:`subprocess.check_output` under the hood. + """ + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + + check_output(cmd, cwd=cwd, env=env, stderr=STDOUT) + + +def norm_and_check(source_tree, requested): + """Normalise and check a backend path. + + Ensure that the requested backend path is specified as a relative path, + and resolves to a location under the given source tree. + + Return an absolute version of the requested path. + """ + if os.path.isabs(requested): + raise ValueError("paths must be relative") + + abs_source = os.path.abspath(source_tree) + abs_requested = os.path.normpath(os.path.join(abs_source, requested)) + # We have to use commonprefix for Python 2.7 compatibility. So we + # normalise case to avoid problems because commonprefix is a character + # based comparison :-( + norm_source = os.path.normcase(abs_source) + norm_requested = os.path.normcase(abs_requested) + if os.path.commonprefix([norm_source, norm_requested]) != norm_source: + raise ValueError("paths must be inside source tree") + + return abs_requested + + +class BuildBackendHookCaller: + """A wrapper to call the build backend hooks for a source directory. + """ + + def __init__( + self, + source_dir, + build_backend, + backend_path=None, + runner=None, + python_executable=None, + ): + """ + :param source_dir: The source directory to invoke the build backend for + :param build_backend: The build backend spec + :param backend_path: Additional path entries for the build backend spec + :param runner: The :ref:`subprocess runner ` to use + :param python_executable: + The Python executable used to invoke the build backend + """ + if runner is None: + runner = default_subprocess_runner + + self.source_dir = abspath(source_dir) + self.build_backend = build_backend + if backend_path: + backend_path = [ + norm_and_check(self.source_dir, p) for p in backend_path + ] + self.backend_path = backend_path + self._subprocess_runner = runner + if not python_executable: + python_executable = sys.executable + self.python_executable = python_executable + + @contextmanager + def subprocess_runner(self, runner): + """A context manager for temporarily overriding the default + :ref:`subprocess runner `. + + .. code-block:: python + + hook_caller = BuildBackendHookCaller(...) + with hook_caller.subprocess_runner(quiet_subprocess_runner): + ... + """ + prev = self._subprocess_runner + self._subprocess_runner = runner + try: + yield + finally: + self._subprocess_runner = prev + + def _supported_features(self): + """Return the list of optional features supported by the backend.""" + return self._call_hook('_supported_features', {}) + + def get_requires_for_build_wheel(self, config_settings=None): + """Get additional dependencies required for building a wheel. + + :returns: A list of :pep:`dependency specifiers <508>`. + :rtype: list[str] + + .. admonition:: Fallback + + If the build backend does not defined a hook with this name, an + empty list will be returned. + """ + return self._call_hook('get_requires_for_build_wheel', { + 'config_settings': config_settings + }) + + def prepare_metadata_for_build_wheel( + self, metadata_directory, config_settings=None, + _allow_fallback=True): + """Prepare a ``*.dist-info`` folder with metadata for this project. + + :returns: Name of the newly created subfolder within + ``metadata_directory``, containing the metadata. + :rtype: str + + .. admonition:: Fallback + + If the build backend does not define a hook with this name and + ``_allow_fallback`` is truthy, the backend will be asked to build a + wheel via the ``build_wheel`` hook and the dist-info extracted from + that will be returned. + """ + return self._call_hook('prepare_metadata_for_build_wheel', { + 'metadata_directory': abspath(metadata_directory), + 'config_settings': config_settings, + '_allow_fallback': _allow_fallback, + }) + + def build_wheel( + self, wheel_directory, config_settings=None, + metadata_directory=None): + """Build a wheel from this project. + + :returns: + The name of the newly created wheel within ``wheel_directory``. + + .. admonition:: Interaction with fallback + + If the ``build_wheel`` hook was called in the fallback for + :meth:`prepare_metadata_for_build_wheel`, the build backend would + not be invoked. Instead, the previously built wheel will be copied + to ``wheel_directory`` and the name of that file will be returned. + """ + if metadata_directory is not None: + metadata_directory = abspath(metadata_directory) + return self._call_hook('build_wheel', { + 'wheel_directory': abspath(wheel_directory), + 'config_settings': config_settings, + 'metadata_directory': metadata_directory, + }) + + def get_requires_for_build_editable(self, config_settings=None): + """Get additional dependencies required for building an editable wheel. + + :returns: A list of :pep:`dependency specifiers <508>`. + :rtype: list[str] + + .. admonition:: Fallback + + If the build backend does not defined a hook with this name, an + empty list will be returned. + """ + return self._call_hook('get_requires_for_build_editable', { + 'config_settings': config_settings + }) + + def prepare_metadata_for_build_editable( + self, metadata_directory, config_settings=None, + _allow_fallback=True): + """Prepare a ``*.dist-info`` folder with metadata for this project. + + :returns: Name of the newly created subfolder within + ``metadata_directory``, containing the metadata. + :rtype: str + + .. admonition:: Fallback + + If the build backend does not define a hook with this name and + ``_allow_fallback`` is truthy, the backend will be asked to build a + wheel via the ``build_editable`` hook and the dist-info + extracted from that will be returned. + """ + return self._call_hook('prepare_metadata_for_build_editable', { + 'metadata_directory': abspath(metadata_directory), + 'config_settings': config_settings, + '_allow_fallback': _allow_fallback, + }) + + def build_editable( + self, wheel_directory, config_settings=None, + metadata_directory=None): + """Build an editable wheel from this project. + + :returns: + The name of the newly created wheel within ``wheel_directory``. + + .. admonition:: Interaction with fallback + + If the ``build_editable`` hook was called in the fallback for + :meth:`prepare_metadata_for_build_editable`, the build backend + would not be invoked. Instead, the previously built wheel will be + copied to ``wheel_directory`` and the name of that file will be + returned. + """ + if metadata_directory is not None: + metadata_directory = abspath(metadata_directory) + return self._call_hook('build_editable', { + 'wheel_directory': abspath(wheel_directory), + 'config_settings': config_settings, + 'metadata_directory': metadata_directory, + }) + + def get_requires_for_build_sdist(self, config_settings=None): + """Get additional dependencies required for building an sdist. + + :returns: A list of :pep:`dependency specifiers <508>`. + :rtype: list[str] + """ + return self._call_hook('get_requires_for_build_sdist', { + 'config_settings': config_settings + }) + + def build_sdist(self, sdist_directory, config_settings=None): + """Build an sdist from this project. + + :returns: + The name of the newly created sdist within ``wheel_directory``. + """ + return self._call_hook('build_sdist', { + 'sdist_directory': abspath(sdist_directory), + 'config_settings': config_settings, + }) + + def _call_hook(self, hook_name, kwargs): + extra_environ = {'PEP517_BUILD_BACKEND': self.build_backend} + + if self.backend_path: + backend_path = os.pathsep.join(self.backend_path) + extra_environ['PEP517_BACKEND_PATH'] = backend_path + + with tempfile.TemporaryDirectory() as td: + hook_input = {'kwargs': kwargs} + write_json(hook_input, pjoin(td, 'input.json'), indent=2) + + # Run the hook in a subprocess + with _in_proc_script_path() as script: + python = self.python_executable + self._subprocess_runner( + [python, abspath(str(script)), hook_name, td], + cwd=self.source_dir, + extra_environ=extra_environ + ) + + data = read_json(pjoin(td, 'output.json')) + if data.get('unsupported'): + raise UnsupportedOperation(data.get('traceback', '')) + if data.get('no_backend'): + raise BackendUnavailable(data.get('traceback', '')) + if data.get('backend_invalid'): + raise BackendInvalid( + backend_name=self.build_backend, + backend_path=self.backend_path, + message=data.get('backend_error', '') + ) + if data.get('hook_missing'): + raise HookMissing(data.get('missing_hook_name') or hook_name) + return data['return_val'] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__init__.py new file mode 100644 index 0000000..917fa06 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__init__.py @@ -0,0 +1,18 @@ +"""This is a subpackage because the directory is on sys.path for _in_process.py + +The subpackage should stay as empty as possible to avoid shadowing modules that +the backend might import. +""" + +import importlib.resources as resources + +try: + resources.files +except AttributeError: + # Python 3.8 compatibility + def _in_proc_script_path(): + return resources.path(__package__, '_in_process.py') +else: + def _in_proc_script_path(): + return resources.as_file( + resources.files(__package__).joinpath('_in_process.py')) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..311e279 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc new file mode 100644 index 0000000..622c4f2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py new file mode 100644 index 0000000..ee511ff --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py @@ -0,0 +1,353 @@ +"""This is invoked in a subprocess to call the build backend hooks. + +It expects: +- Command line args: hook_name, control_dir +- Environment variables: + PEP517_BUILD_BACKEND=entry.point:spec + PEP517_BACKEND_PATH=paths (separated with os.pathsep) +- control_dir/input.json: + - {"kwargs": {...}} + +Results: +- control_dir/output.json + - {"return_val": ...} +""" +import json +import os +import os.path +import re +import shutil +import sys +import traceback +from glob import glob +from importlib import import_module +from os.path import join as pjoin + +# This file is run as a script, and `import wrappers` is not zip-safe, so we +# include write_json() and read_json() from wrappers.py. + + +def write_json(obj, path, **kwargs): + with open(path, 'w', encoding='utf-8') as f: + json.dump(obj, f, **kwargs) + + +def read_json(path): + with open(path, encoding='utf-8') as f: + return json.load(f) + + +class BackendUnavailable(Exception): + """Raised if we cannot import the backend""" + def __init__(self, traceback): + self.traceback = traceback + + +class BackendInvalid(Exception): + """Raised if the backend is invalid""" + def __init__(self, message): + self.message = message + + +class HookMissing(Exception): + """Raised if a hook is missing and we are not executing the fallback""" + def __init__(self, hook_name=None): + super().__init__(hook_name) + self.hook_name = hook_name + + +def contained_in(filename, directory): + """Test if a file is located within the given directory.""" + filename = os.path.normcase(os.path.abspath(filename)) + directory = os.path.normcase(os.path.abspath(directory)) + return os.path.commonprefix([filename, directory]) == directory + + +def _build_backend(): + """Find and load the build backend""" + # Add in-tree backend directories to the front of sys.path. + backend_path = os.environ.get('PEP517_BACKEND_PATH') + if backend_path: + extra_pathitems = backend_path.split(os.pathsep) + sys.path[:0] = extra_pathitems + + ep = os.environ['PEP517_BUILD_BACKEND'] + mod_path, _, obj_path = ep.partition(':') + try: + obj = import_module(mod_path) + except ImportError: + raise BackendUnavailable(traceback.format_exc()) + + if backend_path: + if not any( + contained_in(obj.__file__, path) + for path in extra_pathitems + ): + raise BackendInvalid("Backend was not loaded from backend-path") + + if obj_path: + for path_part in obj_path.split('.'): + obj = getattr(obj, path_part) + return obj + + +def _supported_features(): + """Return the list of options features supported by the backend. + + Returns a list of strings. + The only possible value is 'build_editable'. + """ + backend = _build_backend() + features = [] + if hasattr(backend, "build_editable"): + features.append("build_editable") + return features + + +def get_requires_for_build_wheel(config_settings): + """Invoke the optional get_requires_for_build_wheel hook + + Returns [] if the hook is not defined. + """ + backend = _build_backend() + try: + hook = backend.get_requires_for_build_wheel + except AttributeError: + return [] + else: + return hook(config_settings) + + +def get_requires_for_build_editable(config_settings): + """Invoke the optional get_requires_for_build_editable hook + + Returns [] if the hook is not defined. + """ + backend = _build_backend() + try: + hook = backend.get_requires_for_build_editable + except AttributeError: + return [] + else: + return hook(config_settings) + + +def prepare_metadata_for_build_wheel( + metadata_directory, config_settings, _allow_fallback): + """Invoke optional prepare_metadata_for_build_wheel + + Implements a fallback by building a wheel if the hook isn't defined, + unless _allow_fallback is False in which case HookMissing is raised. + """ + backend = _build_backend() + try: + hook = backend.prepare_metadata_for_build_wheel + except AttributeError: + if not _allow_fallback: + raise HookMissing() + else: + return hook(metadata_directory, config_settings) + # fallback to build_wheel outside the try block to avoid exception chaining + # which can be confusing to users and is not relevant + whl_basename = backend.build_wheel(metadata_directory, config_settings) + return _get_wheel_metadata_from_wheel(whl_basename, metadata_directory, + config_settings) + + +def prepare_metadata_for_build_editable( + metadata_directory, config_settings, _allow_fallback): + """Invoke optional prepare_metadata_for_build_editable + + Implements a fallback by building an editable wheel if the hook isn't + defined, unless _allow_fallback is False in which case HookMissing is + raised. + """ + backend = _build_backend() + try: + hook = backend.prepare_metadata_for_build_editable + except AttributeError: + if not _allow_fallback: + raise HookMissing() + try: + build_hook = backend.build_editable + except AttributeError: + raise HookMissing(hook_name='build_editable') + else: + whl_basename = build_hook(metadata_directory, config_settings) + return _get_wheel_metadata_from_wheel(whl_basename, + metadata_directory, + config_settings) + else: + return hook(metadata_directory, config_settings) + + +WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL' + + +def _dist_info_files(whl_zip): + """Identify the .dist-info folder inside a wheel ZipFile.""" + res = [] + for path in whl_zip.namelist(): + m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path) + if m: + res.append(path) + if res: + return res + raise Exception("No .dist-info folder found in wheel") + + +def _get_wheel_metadata_from_wheel( + whl_basename, metadata_directory, config_settings): + """Extract the metadata from a wheel. + + Fallback for when the build backend does not + define the 'get_wheel_metadata' hook. + """ + from zipfile import ZipFile + with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'): + pass # Touch marker file + + whl_file = os.path.join(metadata_directory, whl_basename) + with ZipFile(whl_file) as zipf: + dist_info = _dist_info_files(zipf) + zipf.extractall(path=metadata_directory, members=dist_info) + return dist_info[0].split('/')[0] + + +def _find_already_built_wheel(metadata_directory): + """Check for a wheel already built during the get_wheel_metadata hook. + """ + if not metadata_directory: + return None + metadata_parent = os.path.dirname(metadata_directory) + if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)): + return None + + whl_files = glob(os.path.join(metadata_parent, '*.whl')) + if not whl_files: + print('Found wheel built marker, but no .whl files') + return None + if len(whl_files) > 1: + print('Found multiple .whl files; unspecified behaviour. ' + 'Will call build_wheel.') + return None + + # Exactly one .whl file + return whl_files[0] + + +def build_wheel(wheel_directory, config_settings, metadata_directory=None): + """Invoke the mandatory build_wheel hook. + + If a wheel was already built in the + prepare_metadata_for_build_wheel fallback, this + will copy it rather than rebuilding the wheel. + """ + prebuilt_whl = _find_already_built_wheel(metadata_directory) + if prebuilt_whl: + shutil.copy2(prebuilt_whl, wheel_directory) + return os.path.basename(prebuilt_whl) + + return _build_backend().build_wheel(wheel_directory, config_settings, + metadata_directory) + + +def build_editable(wheel_directory, config_settings, metadata_directory=None): + """Invoke the optional build_editable hook. + + If a wheel was already built in the + prepare_metadata_for_build_editable fallback, this + will copy it rather than rebuilding the wheel. + """ + backend = _build_backend() + try: + hook = backend.build_editable + except AttributeError: + raise HookMissing() + else: + prebuilt_whl = _find_already_built_wheel(metadata_directory) + if prebuilt_whl: + shutil.copy2(prebuilt_whl, wheel_directory) + return os.path.basename(prebuilt_whl) + + return hook(wheel_directory, config_settings, metadata_directory) + + +def get_requires_for_build_sdist(config_settings): + """Invoke the optional get_requires_for_build_wheel hook + + Returns [] if the hook is not defined. + """ + backend = _build_backend() + try: + hook = backend.get_requires_for_build_sdist + except AttributeError: + return [] + else: + return hook(config_settings) + + +class _DummyException(Exception): + """Nothing should ever raise this exception""" + + +class GotUnsupportedOperation(Exception): + """For internal use when backend raises UnsupportedOperation""" + def __init__(self, traceback): + self.traceback = traceback + + +def build_sdist(sdist_directory, config_settings): + """Invoke the mandatory build_sdist hook.""" + backend = _build_backend() + try: + return backend.build_sdist(sdist_directory, config_settings) + except getattr(backend, 'UnsupportedOperation', _DummyException): + raise GotUnsupportedOperation(traceback.format_exc()) + + +HOOK_NAMES = { + 'get_requires_for_build_wheel', + 'prepare_metadata_for_build_wheel', + 'build_wheel', + 'get_requires_for_build_editable', + 'prepare_metadata_for_build_editable', + 'build_editable', + 'get_requires_for_build_sdist', + 'build_sdist', + '_supported_features', +} + + +def main(): + if len(sys.argv) < 3: + sys.exit("Needs args: hook_name, control_dir") + hook_name = sys.argv[1] + control_dir = sys.argv[2] + if hook_name not in HOOK_NAMES: + sys.exit("Unknown hook: %s" % hook_name) + hook = globals()[hook_name] + + hook_input = read_json(pjoin(control_dir, 'input.json')) + + json_out = {'unsupported': False, 'return_val': None} + try: + json_out['return_val'] = hook(**hook_input['kwargs']) + except BackendUnavailable as e: + json_out['no_backend'] = True + json_out['traceback'] = e.traceback + except BackendInvalid as e: + json_out['backend_invalid'] = True + json_out['backend_error'] = e.message + except GotUnsupportedOperation as e: + json_out['unsupported'] = True + json_out['traceback'] = e.traceback + except HookMissing as e: + json_out['hook_missing'] = True + json_out['missing_hook_name'] = e.hook_name or hook_name + + write_json(json_out, pjoin(control_dir, 'output.json'), indent=2) + + +if __name__ == '__main__': + main() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__init__.py new file mode 100644 index 0000000..10ff67f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__init__.py @@ -0,0 +1,182 @@ +# __ +# /__) _ _ _ _ _/ _ +# / ( (- (/ (/ (- _) / _) +# / + +""" +Requests HTTP Library +~~~~~~~~~~~~~~~~~~~~~ + +Requests is an HTTP library, written in Python, for human beings. +Basic GET usage: + + >>> import requests + >>> r = requests.get('https://www.python.org') + >>> r.status_code + 200 + >>> b'Python is a programming language' in r.content + True + +... or POST: + + >>> payload = dict(key1='value1', key2='value2') + >>> r = requests.post('https://httpbin.org/post', data=payload) + >>> print(r.text) + { + ... + "form": { + "key1": "value1", + "key2": "value2" + }, + ... + } + +The other HTTP methods are supported - see `requests.api`. Full documentation +is at . + +:copyright: (c) 2017 by Kenneth Reitz. +:license: Apache 2.0, see LICENSE for more details. +""" + +import warnings + +from pip._vendor import urllib3 + +from .exceptions import RequestsDependencyWarning + +charset_normalizer_version = None + +try: + from pip._vendor.chardet import __version__ as chardet_version +except ImportError: + chardet_version = None + + +def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): + urllib3_version = urllib3_version.split(".") + assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git. + + # Sometimes, urllib3 only reports its version as 16.1. + if len(urllib3_version) == 2: + urllib3_version.append("0") + + # Check urllib3 for compatibility. + major, minor, patch = urllib3_version # noqa: F811 + major, minor, patch = int(major), int(minor), int(patch) + # urllib3 >= 1.21.1 + assert major >= 1 + if major == 1: + assert minor >= 21 + + # Check charset_normalizer for compatibility. + if chardet_version: + major, minor, patch = chardet_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # chardet_version >= 3.0.2, < 6.0.0 + assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0) + elif charset_normalizer_version: + major, minor, patch = charset_normalizer_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # charset_normalizer >= 2.0.0 < 4.0.0 + assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) + else: + raise Exception("You need either charset_normalizer or chardet installed") + + +def _check_cryptography(cryptography_version): + # cryptography < 1.3.4 + try: + cryptography_version = list(map(int, cryptography_version.split("."))) + except ValueError: + return + + if cryptography_version < [1, 3, 4]: + warning = "Old version of cryptography ({}) may cause slowdown.".format( + cryptography_version + ) + warnings.warn(warning, RequestsDependencyWarning) + + +# Check imported dependencies for compatibility. +try: + check_compatibility( + urllib3.__version__, chardet_version, charset_normalizer_version + ) +except (AssertionError, ValueError): + warnings.warn( + "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " + "version!".format( + urllib3.__version__, chardet_version, charset_normalizer_version + ), + RequestsDependencyWarning, + ) + +# Attempt to enable urllib3's fallback for SNI support +# if the standard library doesn't support SNI or the +# 'ssl' library isn't available. +try: + # Note: This logic prevents upgrading cryptography on Windows, if imported + # as part of pip. + from pip._internal.utils.compat import WINDOWS + if not WINDOWS: + raise ImportError("pip internals: don't import cryptography on Windows") + try: + import ssl + except ImportError: + ssl = None + + if not getattr(ssl, "HAS_SNI", False): + from pip._vendor.urllib3.contrib import pyopenssl + + pyopenssl.inject_into_urllib3() + + # Check cryptography version + from cryptography import __version__ as cryptography_version + + _check_cryptography(cryptography_version) +except ImportError: + pass + +# urllib3's DependencyWarnings should be silenced. +from pip._vendor.urllib3.exceptions import DependencyWarning + +warnings.simplefilter("ignore", DependencyWarning) + +# Set default logging handler to avoid "No handler found" warnings. +import logging +from logging import NullHandler + +from . import packages, utils +from .__version__ import ( + __author__, + __author_email__, + __build__, + __cake__, + __copyright__, + __description__, + __license__, + __title__, + __url__, + __version__, +) +from .api import delete, get, head, options, patch, post, put, request +from .exceptions import ( + ConnectionError, + ConnectTimeout, + FileModeWarning, + HTTPError, + JSONDecodeError, + ReadTimeout, + RequestException, + Timeout, + TooManyRedirects, + URLRequired, +) +from .models import PreparedRequest, Request, Response +from .sessions import Session, session +from .status_codes import codes + +logging.getLogger(__name__).addHandler(NullHandler()) + +# FileModeWarnings go off per the default. +warnings.simplefilter("default", FileModeWarning, append=True) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..4b6e2d2 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc new file mode 100644 index 0000000..ded6813 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc new file mode 100644 index 0000000..5485673 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc new file mode 100644 index 0000000..5f6bdca Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/api.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/api.cpython-312.pyc new file mode 100644 index 0000000..759b4d7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/api.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000..5a1fb4c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-312.pyc new file mode 100644 index 0000000..505cd57 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..e41976b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc new file mode 100644 index 0000000..be55d86 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..3f4a795 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/help.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/help.cpython-312.pyc new file mode 100644 index 0000000..bc8e813 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/help.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc new file mode 100644 index 0000000..208da66 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/models.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..cb50c0c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/models.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-312.pyc new file mode 100644 index 0000000..f250f09 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc new file mode 100644 index 0000000..cede640 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc new file mode 100644 index 0000000..a54a674 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-312.pyc new file mode 100644 index 0000000..4df492e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..a22d750 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__version__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__version__.py new file mode 100644 index 0000000..5063c3f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/__version__.py @@ -0,0 +1,14 @@ +# .-. .-. .-. . . .-. .-. .-. .-. +# |( |- |.| | | |- `-. | `-. +# ' ' `-' `-`.`-' `-' `-' ' `-' + +__title__ = "requests" +__description__ = "Python HTTP for Humans." +__url__ = "https://requests.readthedocs.io" +__version__ = "2.31.0" +__build__ = 0x023100 +__author__ = "Kenneth Reitz" +__author_email__ = "me@kennethreitz.org" +__license__ = "Apache 2.0" +__copyright__ = "Copyright Kenneth Reitz" +__cake__ = "\u2728 \U0001f370 \u2728" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/_internal_utils.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/_internal_utils.py new file mode 100644 index 0000000..f2cf635 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/_internal_utils.py @@ -0,0 +1,50 @@ +""" +requests._internal_utils +~~~~~~~~~~~~~~ + +Provides utility functions that are consumed internally by Requests +which depend on extremely few external helpers (such as compat) +""" +import re + +from .compat import builtin_str + +_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$") +_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$") +_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") +_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") + +_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR) +_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE) +HEADER_VALIDATORS = { + bytes: _HEADER_VALIDATORS_BYTE, + str: _HEADER_VALIDATORS_STR, +} + + +def to_native_string(string, encoding="ascii"): + """Given a string object, regardless of type, returns a representation of + that string in the native string type, encoding and decoding where + necessary. This assumes ASCII unless told otherwise. + """ + if isinstance(string, builtin_str): + out = string + else: + out = string.decode(encoding) + + return out + + +def unicode_is_ascii(u_string): + """Determine if unicode string only contains ASCII characters. + + :param str u_string: unicode string to check. Must be unicode + and not Python 2 `str`. + :rtype: bool + """ + assert isinstance(u_string, str) + try: + u_string.encode("ascii") + return True + except UnicodeEncodeError: + return False diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/adapters.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/adapters.py new file mode 100644 index 0000000..10c1767 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/adapters.py @@ -0,0 +1,538 @@ +""" +requests.adapters +~~~~~~~~~~~~~~~~~ + +This module contains the transport adapters that Requests uses to define +and maintain connections. +""" + +import os.path +import socket # noqa: F401 + +from pip._vendor.urllib3.exceptions import ClosedPoolError, ConnectTimeoutError +from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError +from pip._vendor.urllib3.exceptions import InvalidHeader as _InvalidHeader +from pip._vendor.urllib3.exceptions import ( + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, +) +from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError +from pip._vendor.urllib3.exceptions import ReadTimeoutError, ResponseError +from pip._vendor.urllib3.exceptions import SSLError as _SSLError +from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url +from pip._vendor.urllib3.util import Timeout as TimeoutSauce +from pip._vendor.urllib3.util import parse_url +from pip._vendor.urllib3.util.retry import Retry + +from .auth import _basic_auth_str +from .compat import basestring, urlparse +from .cookies import extract_cookies_to_jar +from .exceptions import ( + ConnectionError, + ConnectTimeout, + InvalidHeader, + InvalidProxyURL, + InvalidSchema, + InvalidURL, + ProxyError, + ReadTimeout, + RetryError, + SSLError, +) +from .models import Response +from .structures import CaseInsensitiveDict +from .utils import ( + DEFAULT_CA_BUNDLE_PATH, + extract_zipped_paths, + get_auth_from_url, + get_encoding_from_headers, + prepend_scheme_if_needed, + select_proxy, + urldefragauth, +) + +try: + from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager +except ImportError: + + def SOCKSProxyManager(*args, **kwargs): + raise InvalidSchema("Missing dependencies for SOCKS support.") + + +DEFAULT_POOLBLOCK = False +DEFAULT_POOLSIZE = 10 +DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None + + +class BaseAdapter: + """The Base Transport Adapter""" + + def __init__(self): + super().__init__() + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + """ + raise NotImplementedError + + def close(self): + """Cleans up adapter specific items.""" + raise NotImplementedError + + +class HTTPAdapter(BaseAdapter): + """The built-in HTTP Adapter for urllib3. + + Provides a general-case interface for Requests sessions to contact HTTP and + HTTPS urls by implementing the Transport Adapter interface. This class will + usually be created by the :class:`Session ` class under the + covers. + + :param pool_connections: The number of urllib3 connection pools to cache. + :param pool_maxsize: The maximum number of connections to save in the pool. + :param max_retries: The maximum number of retries each connection + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. If you need granular control over the conditions under + which we retry a request, import urllib3's ``Retry`` class and pass + that instead. + :param pool_block: Whether the connection pool should block for connections. + + Usage:: + + >>> import requests + >>> s = requests.Session() + >>> a = requests.adapters.HTTPAdapter(max_retries=3) + >>> s.mount('http://', a) + """ + + __attrs__ = [ + "max_retries", + "config", + "_pool_connections", + "_pool_maxsize", + "_pool_block", + ] + + def __init__( + self, + pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, + max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK, + ): + if max_retries == DEFAULT_RETRIES: + self.max_retries = Retry(0, read=False) + else: + self.max_retries = Retry.from_int(max_retries) + self.config = {} + self.proxy_manager = {} + + super().__init__() + + self._pool_connections = pool_connections + self._pool_maxsize = pool_maxsize + self._pool_block = pool_block + + self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) + + def __getstate__(self): + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + # Can't handle by adding 'proxy_manager' to self.__attrs__ because + # self.poolmanager uses a lambda function, which isn't pickleable. + self.proxy_manager = {} + self.config = {} + + for attr, value in state.items(): + setattr(self, attr, value) + + self.init_poolmanager( + self._pool_connections, self._pool_maxsize, block=self._pool_block + ) + + def init_poolmanager( + self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs + ): + """Initializes a urllib3 PoolManager. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param connections: The number of urllib3 connection pools to cache. + :param maxsize: The maximum number of connections to save in the pool. + :param block: Block when no free connections are available. + :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. + """ + # save these values for pickling + self._pool_connections = connections + self._pool_maxsize = maxsize + self._pool_block = block + + self.poolmanager = PoolManager( + num_pools=connections, + maxsize=maxsize, + block=block, + **pool_kwargs, + ) + + def proxy_manager_for(self, proxy, **proxy_kwargs): + """Return urllib3 ProxyManager for the given proxy. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The proxy to return a urllib3 ProxyManager for. + :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :returns: ProxyManager + :rtype: urllib3.ProxyManager + """ + if proxy in self.proxy_manager: + manager = self.proxy_manager[proxy] + elif proxy.lower().startswith("socks"): + username, password = get_auth_from_url(proxy) + manager = self.proxy_manager[proxy] = SOCKSProxyManager( + proxy, + username=username, + password=password, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + else: + proxy_headers = self.proxy_headers(proxy) + manager = self.proxy_manager[proxy] = proxy_from_url( + proxy, + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + + return manager + + def cert_verify(self, conn, url, verify, cert): + """Verify a SSL certificate. This method should not be called from user + code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param conn: The urllib3 connection object associated with the cert. + :param url: The requested URL. + :param verify: Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: The SSL certificate to verify. + """ + if url.lower().startswith("https") and verify: + + cert_loc = None + + # Allow self-specified cert location. + if verify is not True: + cert_loc = verify + + if not cert_loc: + cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) + + if not cert_loc or not os.path.exists(cert_loc): + raise OSError( + f"Could not find a suitable TLS CA certificate bundle, " + f"invalid path: {cert_loc}" + ) + + conn.cert_reqs = "CERT_REQUIRED" + + if not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc + else: + conn.cert_reqs = "CERT_NONE" + conn.ca_certs = None + conn.ca_cert_dir = None + + if cert: + if not isinstance(cert, basestring): + conn.cert_file = cert[0] + conn.key_file = cert[1] + else: + conn.cert_file = cert + conn.key_file = None + if conn.cert_file and not os.path.exists(conn.cert_file): + raise OSError( + f"Could not find the TLS certificate file, " + f"invalid path: {conn.cert_file}" + ) + if conn.key_file and not os.path.exists(conn.key_file): + raise OSError( + f"Could not find the TLS key file, invalid path: {conn.key_file}" + ) + + def build_response(self, req, resp): + """Builds a :class:`Response ` object from a urllib3 + response. This should not be called from user code, and is only exposed + for use when subclassing the + :class:`HTTPAdapter ` + + :param req: The :class:`PreparedRequest ` used to generate the response. + :param resp: The urllib3 response object. + :rtype: requests.Response + """ + response = Response() + + # Fallback to None if there's no status_code, for whatever reason. + response.status_code = getattr(resp, "status", None) + + # Make headers case-insensitive. + response.headers = CaseInsensitiveDict(getattr(resp, "headers", {})) + + # Set encoding. + response.encoding = get_encoding_from_headers(response.headers) + response.raw = resp + response.reason = response.raw.reason + + if isinstance(req.url, bytes): + response.url = req.url.decode("utf-8") + else: + response.url = req.url + + # Add new cookies from the server. + extract_cookies_to_jar(response.cookies, req, resp) + + # Give the Response some context. + response.request = req + response.connection = self + + return response + + def get_connection(self, url, proxies=None): + """Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :rtype: urllib3.ConnectionPool + """ + proxy = select_proxy(url, proxies) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, "http") + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL( + "Please check proxy URL. It is malformed " + "and could be missing the host." + ) + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) + else: + # Only scheme should be lower case + parsed = urlparse(url) + url = parsed.geturl() + conn = self.poolmanager.connection_from_url(url) + + return conn + + def close(self): + """Disposes of any internal state. + + Currently, this closes the PoolManager and any active ProxyManager, + which closes any pooled connections. + """ + self.poolmanager.clear() + for proxy in self.proxy_manager.values(): + proxy.clear() + + def request_url(self, request, proxies): + """Obtain the url to use when making the final request. + + If the message is being sent through a HTTP proxy, the full URL has to + be used. Otherwise, we should only use the path portion of the URL. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` being sent. + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. + :rtype: str + """ + proxy = select_proxy(request.url, proxies) + scheme = urlparse(request.url).scheme + + is_proxied_http_request = proxy and scheme != "https" + using_socks_proxy = False + if proxy: + proxy_scheme = urlparse(proxy).scheme.lower() + using_socks_proxy = proxy_scheme.startswith("socks") + + url = request.path_url + if is_proxied_http_request and not using_socks_proxy: + url = urldefragauth(request.url) + + return url + + def add_headers(self, request, **kwargs): + """Add any headers needed by the connection. As of v2.0 this does + nothing by default, but is left for overriding by users that subclass + the :class:`HTTPAdapter `. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` to add headers to. + :param kwargs: The keyword arguments from the call to send(). + """ + pass + + def proxy_headers(self, proxy): + """Returns a dictionary of the headers to add to any request sent + through a proxy. This works with urllib3 magic to ensure that they are + correctly sent to the proxy, rather than in a tunnelled request if + CONNECT is being used. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The url of the proxy being used for this request. + :rtype: dict + """ + headers = {} + username, password = get_auth_from_url(proxy) + + if username: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return headers + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple or urllib3 Timeout object + :param verify: (optional) Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + :rtype: requests.Response + """ + + try: + conn = self.get_connection(request.url, proxies) + except LocationValueError as e: + raise InvalidURL(e, request=request) + + self.cert_verify(conn, request.url, verify, cert) + url = self.request_url(request, proxies) + self.add_headers( + request, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + ) + + chunked = not (request.body is None or "Content-Length" in request.headers) + + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError: + raise ValueError( + f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " + f"or a single float to set both timeouts to the same value." + ) + elif isinstance(timeout, TimeoutSauce): + pass + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) + + try: + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout, + chunked=chunked, + ) + + except (ProtocolError, OSError) as err: + raise ConnectionError(err, request=request) + + except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) + + if isinstance(e.reason, ResponseError): + raise RetryError(e, request=request) + + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + + if isinstance(e.reason, _SSLError): + # This branch is for urllib3 v1.22 and later. + raise SSLError(e, request=request) + + raise ConnectionError(e, request=request) + + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + + except _ProxyError as e: + raise ProxyError(e) + + except (_SSLError, _HTTPError) as e: + if isinstance(e, _SSLError): + # This branch is for urllib3 versions earlier than v1.22 + raise SSLError(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) + elif isinstance(e, _InvalidHeader): + raise InvalidHeader(e, request=request) + else: + raise + + return self.build_response(request, resp) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/api.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/api.py new file mode 100644 index 0000000..cd0b3ee --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/api.py @@ -0,0 +1,157 @@ +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2012 by Kenneth Reitz. +:license: Apache2, see LICENSE for more details. +""" + +from . import sessions + + +def request(method, url, **kwargs): + """Constructs and sends a :class:`Request `. + + :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. + :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send data + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param stream: (optional) if ``False``, the response content will be immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + :return: :class:`Response ` object + :rtype: requests.Response + + Usage:: + + >>> import requests + >>> req = requests.request('GET', 'https://httpbin.org/get') + >>> req + + """ + + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) + + +def get(url, params=None, **kwargs): + r"""Sends a GET request. + + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("get", url, params=params, **kwargs) + + +def options(url, **kwargs): + r"""Sends an OPTIONS request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("options", url, **kwargs) + + +def head(url, **kwargs): + r"""Sends a HEAD request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. If + `allow_redirects` is not provided, it will be set to `False` (as + opposed to the default :meth:`request` behavior). + :return: :class:`Response ` object + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return request("head", url, **kwargs) + + +def post(url, data=None, json=None, **kwargs): + r"""Sends a POST request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("post", url, data=data, json=json, **kwargs) + + +def put(url, data=None, **kwargs): + r"""Sends a PUT request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("put", url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + r"""Sends a PATCH request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("patch", url, data=data, **kwargs) + + +def delete(url, **kwargs): + r"""Sends a DELETE request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("delete", url, **kwargs) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/auth.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/auth.py new file mode 100644 index 0000000..9733686 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/auth.py @@ -0,0 +1,315 @@ +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import hashlib +import os +import re +import threading +import time +import warnings +from base64 import b64encode + +from ._internal_utils import to_native_string +from .compat import basestring, str, urlparse +from .cookies import extract_cookies_to_jar +from .utils import parse_dict_header + +CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded" +CONTENT_TYPE_MULTI_PART = "multipart/form-data" + + +def _basic_auth_str(username, password): + """Returns a Basic Auth string.""" + + # "I want us to put a big-ol' comment on top of it that + # says that this behaviour is dumb but we need to preserve + # it because people are relying on it." + # - Lukasa + # + # These are here solely to maintain backwards compatibility + # for things like ints. This will be removed in 3.0.0. + if not isinstance(username, basestring): + warnings.warn( + "Non-string usernames will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(username), + category=DeprecationWarning, + ) + username = str(username) + + if not isinstance(password, basestring): + warnings.warn( + "Non-string passwords will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(type(password)), + category=DeprecationWarning, + ) + password = str(password) + # -- End Removal -- + + if isinstance(username, str): + username = username.encode("latin1") + + if isinstance(password, str): + password = password.encode("latin1") + + authstr = "Basic " + to_native_string( + b64encode(b":".join((username, password))).strip() + ) + + return authstr + + +class AuthBase: + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError("Auth hooks must be callable.") + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other + + def __call__(self, r): + r.headers["Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPProxyAuth(HTTPBasicAuth): + """Attaches HTTP Proxy Authentication to a given Request object.""" + + def __call__(self, r): + r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, "init"): + self._thread_local.init = True + self._thread_local.last_nonce = "" + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None + + def build_digest_header(self, method, url): + """ + :rtype: str + """ + + realm = self._thread_local.chal["realm"] + nonce = self._thread_local.chal["nonce"] + qop = self._thread_local.chal.get("qop") + algorithm = self._thread_local.chal.get("algorithm") + opaque = self._thread_local.chal.get("opaque") + hash_utf8 = None + + if algorithm is None: + _algorithm = "MD5" + else: + _algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if _algorithm == "MD5" or _algorithm == "MD5-SESS": + + def md5_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.md5(x).hexdigest() + + hash_utf8 = md5_utf8 + elif _algorithm == "SHA": + + def sha_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha1(x).hexdigest() + + hash_utf8 = sha_utf8 + elif _algorithm == "SHA-256": + + def sha256_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha256(x).hexdigest() + + hash_utf8 = sha256_utf8 + elif _algorithm == "SHA-512": + + def sha512_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha512(x).hexdigest() + + hash_utf8 = sha512_utf8 + + KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731 + + if hash_utf8 is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(url) + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" + if p_parsed.query: + path += f"?{p_parsed.query}" + + A1 = f"{self.username}:{realm}:{self.password}" + A2 = f"{method}:{path}" + + HA1 = hash_utf8(A1) + HA2 = hash_utf8(A2) + + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 + else: + self._thread_local.nonce_count = 1 + ncvalue = f"{self._thread_local.nonce_count:08x}" + s = str(self._thread_local.nonce_count).encode("utf-8") + s += nonce.encode("utf-8") + s += time.ctime().encode("utf-8") + s += os.urandom(8) + + cnonce = hashlib.sha1(s).hexdigest()[:16] + if _algorithm == "MD5-SESS": + HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}") + + if not qop: + respdig = KD(HA1, f"{nonce}:{HA2}") + elif qop == "auth" or "auth" in qop.split(","): + noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}" + respdig = KD(HA1, noncebit) + else: + # XXX handle auth-int. + return None + + self._thread_local.last_nonce = nonce + + # XXX should the partial digests be encoded too? + base = ( + f'username="{self.username}", realm="{realm}", nonce="{nonce}", ' + f'uri="{path}", response="{respdig}"' + ) + if opaque: + base += f', opaque="{opaque}"' + if algorithm: + base += f', algorithm="{algorithm}"' + if entdig: + base += f', digest="{entdig}"' + if qop: + base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"' + + return f"Digest {base}" + + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + self._thread_local.num_401_calls = 1 + + def handle_401(self, r, **kwargs): + """ + Takes the given response and tries digest-auth, if needed. + + :rtype: requests.Response + """ + + # If response is not 4xx, do not auth + # See https://github.com/psf/requests/issues/3772 + if not 400 <= r.status_code < 500: + self._thread_local.num_401_calls = 1 + return r + + if self._thread_local.pos is not None: + # Rewind the file position indicator of the body to where + # it was to resend the request. + r.request.body.seek(self._thread_local.pos) + s_auth = r.headers.get("www-authenticate", "") + + if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2: + + self._thread_local.num_401_calls += 1 + pat = re.compile(r"digest ", flags=re.IGNORECASE) + self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1)) + + # Consume content and release the original connection + # to allow our new request to reuse the same one. + r.content + r.close() + prep = r.request.copy() + extract_cookies_to_jar(prep._cookies, r.request, r.raw) + prep.prepare_cookies(prep._cookies) + + prep.headers["Authorization"] = self.build_digest_header( + prep.method, prep.url + ) + _r = r.connection.send(prep, **kwargs) + _r.history.append(r) + _r.request = prep + + return _r + + self._thread_local.num_401_calls = 1 + return r + + def __call__(self, r): + # Initialize per-thread state, if needed + self.init_per_thread_state() + # If we have a saved nonce, skip the 401 + if self._thread_local.last_nonce: + r.headers["Authorization"] = self.build_digest_header(r.method, r.url) + try: + self._thread_local.pos = r.body.tell() + except AttributeError: + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self._thread_local.pos = None + r.register_hook("response", self.handle_401) + r.register_hook("response", self.handle_redirect) + self._thread_local.num_401_calls = 1 + + return r + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/certs.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/certs.py new file mode 100644 index 0000000..38696a1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/certs.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python + +""" +requests.certs +~~~~~~~~~~~~~~ + +This module returns the preferred default CA certificate bundle. There is +only one — the one from the certifi package. + +If you are packaging Requests, e.g., for a Linux distribution or a managed +environment, you can change the definition of where() to return a separately +packaged CA bundle. +""" + +import os + +if "_PIP_STANDALONE_CERT" not in os.environ: + from pip._vendor.certifi import where +else: + def where(): + return os.environ["_PIP_STANDALONE_CERT"] + +if __name__ == "__main__": + print(where()) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/compat.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/compat.py new file mode 100644 index 0000000..9ab2bb4 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/compat.py @@ -0,0 +1,67 @@ +""" +requests.compat +~~~~~~~~~~~~~~~ + +This module previously handled import compatibility issues +between Python 2 and Python 3. It remains for backwards +compatibility until the next major version. +""" + +from pip._vendor import chardet + +import sys + +# ------- +# Pythons +# ------- + +# Syntax sugar. +_ver = sys.version_info + +#: Python 2.x? +is_py2 = _ver[0] == 2 + +#: Python 3.x? +is_py3 = _ver[0] == 3 + +# Note: We've patched out simplejson support in pip because it prevents +# upgrading simplejson on Windows. +import json +from json import JSONDecodeError + +# Keep OrderedDict for backwards compatibility. +from collections import OrderedDict +from collections.abc import Callable, Mapping, MutableMapping +from http import cookiejar as cookielib +from http.cookies import Morsel +from io import StringIO + +# -------------- +# Legacy Imports +# -------------- +from urllib.parse import ( + quote, + quote_plus, + unquote, + unquote_plus, + urldefrag, + urlencode, + urljoin, + urlparse, + urlsplit, + urlunparse, +) +from urllib.request import ( + getproxies, + getproxies_environment, + parse_http_list, + proxy_bypass, + proxy_bypass_environment, +) + +builtin_str = str +str = str +bytes = bytes +basestring = (str, bytes) +numeric_types = (int, float) +integer_types = (int,) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/cookies.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/cookies.py new file mode 100644 index 0000000..bf54ab2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/cookies.py @@ -0,0 +1,561 @@ +""" +requests.cookies +~~~~~~~~~~~~~~~~ + +Compatibility code to be able to use `cookielib.CookieJar` with requests. + +requests.utils imports from here, so be careful with imports. +""" + +import calendar +import copy +import time + +from ._internal_utils import to_native_string +from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse + +try: + import threading +except ImportError: + import dummy_threading as threading + + +class MockRequest: + """Wraps a `requests.Request` to mimic a `urllib2.Request`. + + The code in `cookielib.CookieJar` expects this interface in order to correctly + manage cookie policies, i.e., determine whether a cookie can be set, given the + domains of the request and the cookie. + + The original request object is read-only. The client is responsible for collecting + the new headers via `get_new_headers()` and interpreting them appropriately. You + probably want `get_cookie_header`, defined below. + """ + + def __init__(self, request): + self._r = request + self._new_headers = {} + self.type = urlparse(self._r.url).scheme + + def get_type(self): + return self.type + + def get_host(self): + return urlparse(self._r.url).netloc + + def get_origin_req_host(self): + return self.get_host() + + def get_full_url(self): + # Only return the response's URL if the user hadn't set the Host + # header + if not self._r.headers.get("Host"): + return self._r.url + # If they did set it, retrieve it and reconstruct the expected domain + host = to_native_string(self._r.headers["Host"], encoding="utf-8") + parsed = urlparse(self._r.url) + # Reconstruct the URL as we expect it + return urlunparse( + [ + parsed.scheme, + host, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment, + ] + ) + + def is_unverifiable(self): + return True + + def has_header(self, name): + return name in self._r.headers or name in self._new_headers + + def get_header(self, name, default=None): + return self._r.headers.get(name, self._new_headers.get(name, default)) + + def add_header(self, key, val): + """cookielib has no legitimate use for this method; add it back if you find one.""" + raise NotImplementedError( + "Cookie headers should be added with add_unredirected_header()" + ) + + def add_unredirected_header(self, name, value): + self._new_headers[name] = value + + def get_new_headers(self): + return self._new_headers + + @property + def unverifiable(self): + return self.is_unverifiable() + + @property + def origin_req_host(self): + return self.get_origin_req_host() + + @property + def host(self): + return self.get_host() + + +class MockResponse: + """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. + + ...what? Basically, expose the parsed HTTP headers from the server response + the way `cookielib` expects to see them. + """ + + def __init__(self, headers): + """Make a MockResponse for `cookielib` to read. + + :param headers: a httplib.HTTPMessage or analogous carrying the headers + """ + self._headers = headers + + def info(self): + return self._headers + + def getheaders(self, name): + self._headers.getheaders(name) + + +def extract_cookies_to_jar(jar, request, response): + """Extract the cookies from the response into a CookieJar. + + :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) + :param request: our own requests.Request object + :param response: urllib3.HTTPResponse object + """ + if not (hasattr(response, "_original_response") and response._original_response): + return + # the _original_response field is the wrapped httplib.HTTPResponse object, + req = MockRequest(request) + # pull out the HTTPMessage with the headers and put it in the mock: + res = MockResponse(response._original_response.msg) + jar.extract_cookies(res, req) + + +def get_cookie_header(jar, request): + """ + Produce an appropriate Cookie header string to be sent with `request`, or None. + + :rtype: str + """ + r = MockRequest(request) + jar.add_cookie_header(r) + return r.get_new_headers().get("Cookie") + + +def remove_cookie_by_name(cookiejar, name, domain=None, path=None): + """Unsets a cookie by name, by default over all domains and paths. + + Wraps CookieJar.clear(), is O(n). + """ + clearables = [] + for cookie in cookiejar: + if cookie.name != name: + continue + if domain is not None and domain != cookie.domain: + continue + if path is not None and path != cookie.path: + continue + clearables.append((cookie.domain, cookie.path, cookie.name)) + + for domain, path, name in clearables: + cookiejar.clear(domain, path, name) + + +class CookieConflictError(RuntimeError): + """There are two cookies that meet the criteria specified in the cookie jar. + Use .get and .set and include domain and path args in order to be more specific. + """ + + +class RequestsCookieJar(cookielib.CookieJar, MutableMapping): + """Compatibility class; is a cookielib.CookieJar, but exposes a dict + interface. + + This is the CookieJar we create by default for requests and sessions that + don't specify one, since some clients may expect response.cookies and + session.cookies to support dict operations. + + Requests does not use the dict interface internally; it's just for + compatibility with external client code. All requests code should work + out of the box with externally provided instances of ``CookieJar``, e.g. + ``LWPCookieJar`` and ``FileCookieJar``. + + Unlike a regular CookieJar, this class is pickleable. + + .. warning:: dictionary operations that are normally O(1) may be O(n). + """ + + def get(self, name, default=None, domain=None, path=None): + """Dict-like get() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + + .. warning:: operation is O(n), not O(1). + """ + try: + return self._find_no_duplicates(name, domain, path) + except KeyError: + return default + + def set(self, name, value, **kwargs): + """Dict-like set() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + """ + # support client code that unsets cookies by assignment of a None value: + if value is None: + remove_cookie_by_name( + self, name, domain=kwargs.get("domain"), path=kwargs.get("path") + ) + return + + if isinstance(value, Morsel): + c = morsel_to_cookie(value) + else: + c = create_cookie(name, value, **kwargs) + self.set_cookie(c) + return c + + def iterkeys(self): + """Dict-like iterkeys() that returns an iterator of names of cookies + from the jar. + + .. seealso:: itervalues() and iteritems(). + """ + for cookie in iter(self): + yield cookie.name + + def keys(self): + """Dict-like keys() that returns a list of names of cookies from the + jar. + + .. seealso:: values() and items(). + """ + return list(self.iterkeys()) + + def itervalues(self): + """Dict-like itervalues() that returns an iterator of values of cookies + from the jar. + + .. seealso:: iterkeys() and iteritems(). + """ + for cookie in iter(self): + yield cookie.value + + def values(self): + """Dict-like values() that returns a list of values of cookies from the + jar. + + .. seealso:: keys() and items(). + """ + return list(self.itervalues()) + + def iteritems(self): + """Dict-like iteritems() that returns an iterator of name-value tuples + from the jar. + + .. seealso:: iterkeys() and itervalues(). + """ + for cookie in iter(self): + yield cookie.name, cookie.value + + def items(self): + """Dict-like items() that returns a list of name-value tuples from the + jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a + vanilla python dict of key value pairs. + + .. seealso:: keys() and values(). + """ + return list(self.iteritems()) + + def list_domains(self): + """Utility method to list all the domains in the jar.""" + domains = [] + for cookie in iter(self): + if cookie.domain not in domains: + domains.append(cookie.domain) + return domains + + def list_paths(self): + """Utility method to list all the paths in the jar.""" + paths = [] + for cookie in iter(self): + if cookie.path not in paths: + paths.append(cookie.path) + return paths + + def multiple_domains(self): + """Returns True if there are multiple domains in the jar. + Returns False otherwise. + + :rtype: bool + """ + domains = [] + for cookie in iter(self): + if cookie.domain is not None and cookie.domain in domains: + return True + domains.append(cookie.domain) + return False # there is only one domain in jar + + def get_dict(self, domain=None, path=None): + """Takes as an argument an optional domain and path and returns a plain + old Python dict of name-value pairs of cookies that meet the + requirements. + + :rtype: dict + """ + dictionary = {} + for cookie in iter(self): + if (domain is None or cookie.domain == domain) and ( + path is None or cookie.path == path + ): + dictionary[cookie.name] = cookie.value + return dictionary + + def __contains__(self, name): + try: + return super().__contains__(name) + except CookieConflictError: + return True + + def __getitem__(self, name): + """Dict-like __getitem__() for compatibility with client code. Throws + exception if there are more than one cookie with name. In that case, + use the more explicit get() method instead. + + .. warning:: operation is O(n), not O(1). + """ + return self._find_no_duplicates(name) + + def __setitem__(self, name, value): + """Dict-like __setitem__ for compatibility with client code. Throws + exception if there is already a cookie of that name in the jar. In that + case, use the more explicit set() method instead. + """ + self.set(name, value) + + def __delitem__(self, name): + """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s + ``remove_cookie_by_name()``. + """ + remove_cookie_by_name(self, name) + + def set_cookie(self, cookie, *args, **kwargs): + if ( + hasattr(cookie.value, "startswith") + and cookie.value.startswith('"') + and cookie.value.endswith('"') + ): + cookie.value = cookie.value.replace('\\"', "") + return super().set_cookie(cookie, *args, **kwargs) + + def update(self, other): + """Updates this jar with cookies from another CookieJar or dict-like""" + if isinstance(other, cookielib.CookieJar): + for cookie in other: + self.set_cookie(copy.copy(cookie)) + else: + super().update(other) + + def _find(self, name, domain=None, path=None): + """Requests uses this method internally to get cookie values. + + If there are conflicting cookies, _find arbitrarily chooses one. + See _find_no_duplicates if you want an exception thrown if there are + conflicting cookies. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :return: cookie.value + """ + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + return cookie.value + + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def _find_no_duplicates(self, name, domain=None, path=None): + """Both ``__get_item__`` and ``get`` call this function: it's never + used elsewhere in Requests. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :raises KeyError: if cookie is not found + :raises CookieConflictError: if there are multiple cookies + that match name and optionally domain and path + :return: cookie.value + """ + toReturn = None + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if toReturn is not None: + # if there are multiple cookies that meet passed in criteria + raise CookieConflictError( + f"There are multiple cookies with name, {name!r}" + ) + # we will eventually return this as long as no cookie conflict + toReturn = cookie.value + + if toReturn: + return toReturn + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def __getstate__(self): + """Unlike a normal CookieJar, this class is pickleable.""" + state = self.__dict__.copy() + # remove the unpickleable RLock object + state.pop("_cookies_lock") + return state + + def __setstate__(self, state): + """Unlike a normal CookieJar, this class is pickleable.""" + self.__dict__.update(state) + if "_cookies_lock" not in self.__dict__: + self._cookies_lock = threading.RLock() + + def copy(self): + """Return a copy of this RequestsCookieJar.""" + new_cj = RequestsCookieJar() + new_cj.set_policy(self.get_policy()) + new_cj.update(self) + return new_cj + + def get_policy(self): + """Return the CookiePolicy instance used.""" + return self._policy + + +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, "copy"): + # We're dealing with an instance of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + +def create_cookie(name, value, **kwargs): + """Make a cookie from underspecified parameters. + + By default, the pair of `name` and `value` will be set for the domain '' + and sent on every request (this is sometimes called a "supercookie"). + """ + result = { + "version": 0, + "name": name, + "value": value, + "port": None, + "domain": "", + "path": "/", + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + + badargs = set(kwargs) - set(result) + if badargs: + raise TypeError( + f"create_cookie() got unexpected keyword arguments: {list(badargs)}" + ) + + result.update(kwargs) + result["port_specified"] = bool(result["port"]) + result["domain_specified"] = bool(result["domain"]) + result["domain_initial_dot"] = result["domain"].startswith(".") + result["path_specified"] = bool(result["path"]) + + return cookielib.Cookie(**result) + + +def morsel_to_cookie(morsel): + """Convert a Morsel object into a Cookie containing the one k/v pair.""" + + expires = None + if morsel["max-age"]: + try: + expires = int(time.time() + int(morsel["max-age"])) + except ValueError: + raise TypeError(f"max-age: {morsel['max-age']} must be integer") + elif morsel["expires"]: + time_template = "%a, %d-%b-%Y %H:%M:%S GMT" + expires = calendar.timegm(time.strptime(morsel["expires"], time_template)) + return create_cookie( + comment=morsel["comment"], + comment_url=bool(morsel["comment"]), + discard=False, + domain=morsel["domain"], + expires=expires, + name=morsel.key, + path=morsel["path"], + port=None, + rest={"HttpOnly": morsel["httponly"]}, + rfc2109=False, + secure=bool(morsel["secure"]), + value=morsel.value, + version=morsel["version"] or 0, + ) + + +def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): + """Returns a CookieJar from a key/value dictionary. + + :param cookie_dict: Dict of key/values to insert into CookieJar. + :param cookiejar: (optional) A cookiejar to add the cookies to. + :param overwrite: (optional) If False, will not replace cookies + already in the jar with new ones. + :rtype: CookieJar + """ + if cookiejar is None: + cookiejar = RequestsCookieJar() + + if cookie_dict is not None: + names_from_jar = [cookie.name for cookie in cookiejar] + for name in cookie_dict: + if overwrite or (name not in names_from_jar): + cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) + + return cookiejar + + +def merge_cookies(cookiejar, cookies): + """Add cookies to cookiejar and returns a merged CookieJar. + + :param cookiejar: CookieJar object to add the cookies to. + :param cookies: Dictionary or CookieJar object to be added. + :rtype: CookieJar + """ + if not isinstance(cookiejar, cookielib.CookieJar): + raise ValueError("You can only merge into CookieJar") + + if isinstance(cookies, dict): + cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False) + elif isinstance(cookies, cookielib.CookieJar): + try: + cookiejar.update(cookies) + except AttributeError: + for cookie_in_jar in cookies: + cookiejar.set_cookie(cookie_in_jar) + + return cookiejar diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/exceptions.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/exceptions.py new file mode 100644 index 0000000..168d073 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/exceptions.py @@ -0,0 +1,141 @@ +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. +""" +from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError + +from .compat import JSONDecodeError as CompatJSONDecodeError + + +class RequestException(IOError): + """There was an ambiguous exception that occurred while handling your + request. + """ + + def __init__(self, *args, **kwargs): + """Initialize RequestException with `request` and `response` objects.""" + response = kwargs.pop("response", None) + self.response = response + self.request = kwargs.pop("request", None) + if response is not None and not self.request and hasattr(response, "request"): + self.request = self.response.request + super().__init__(*args, **kwargs) + + +class InvalidJSONError(RequestException): + """A JSON error occurred.""" + + +class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError): + """Couldn't decode the text into json""" + + def __init__(self, *args, **kwargs): + """ + Construct the JSONDecodeError instance first with all + args. Then use it's args to construct the IOError so that + the json specific args aren't used as IOError specific args + and the error message from JSONDecodeError is preserved. + """ + CompatJSONDecodeError.__init__(self, *args) + InvalidJSONError.__init__(self, *self.args, **kwargs) + + +class HTTPError(RequestException): + """An HTTP error occurred.""" + + +class ConnectionError(RequestException): + """A Connection error occurred.""" + + +class ProxyError(ConnectionError): + """A proxy error occurred.""" + + +class SSLError(ConnectionError): + """An SSL error occurred.""" + + +class Timeout(RequestException): + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" + + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + + +class TooManyRedirects(RequestException): + """Too many redirects.""" + + +class MissingSchema(RequestException, ValueError): + """The URL scheme (e.g. http or https) is missing.""" + + +class InvalidSchema(RequestException, ValueError): + """The URL scheme provided is either invalid or unsupported.""" + + +class InvalidURL(RequestException, ValueError): + """The URL provided was somehow invalid.""" + + +class InvalidHeader(RequestException, ValueError): + """The header value provided was somehow invalid.""" + + +class InvalidProxyURL(InvalidURL): + """The proxy URL provided is invalid.""" + + +class ChunkedEncodingError(RequestException): + """The server declared chunked encoding but sent an invalid chunk.""" + + +class ContentDecodingError(RequestException, BaseHTTPError): + """Failed to decode response content.""" + + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed.""" + + +class RetryError(RequestException): + """Custom retries logic failed""" + + +class UnrewindableBodyError(RequestException): + """Requests encountered an error when trying to rewind a body.""" + + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """A file was opened in text mode, but Requests determined its binary length.""" + + +class RequestsDependencyWarning(RequestsWarning): + """An imported dependency doesn't match the expected version range.""" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/help.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/help.py new file mode 100644 index 0000000..2d292c2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/help.py @@ -0,0 +1,131 @@ +"""Module containing bug report helper(s).""" + +import json +import platform +import ssl +import sys + +from pip._vendor import idna +from pip._vendor import urllib3 + +from . import __version__ as requests_version + +charset_normalizer = None + +try: + from pip._vendor import chardet +except ImportError: + chardet = None + +try: + from pip._vendor.urllib3.contrib import pyopenssl +except ImportError: + pyopenssl = None + OpenSSL = None + cryptography = None +else: + import cryptography + import OpenSSL + + +def _implementation(): + """Return a dict with the Python implementation and version. + + Provide both the name and the version of the Python implementation + currently running. For example, on CPython 3.10.3 it will return + {'name': 'CPython', 'version': '3.10.3'}. + + This function works best on CPython and PyPy: in particular, it probably + doesn't work for Jython or IronPython. Future investigation should be done + to work out the correct shape of the code for those platforms. + """ + implementation = platform.python_implementation() + + if implementation == "CPython": + implementation_version = platform.python_version() + elif implementation == "PyPy": + implementation_version = "{}.{}.{}".format( + sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro, + ) + if sys.pypy_version_info.releaselevel != "final": + implementation_version = "".join( + [implementation_version, sys.pypy_version_info.releaselevel] + ) + elif implementation == "Jython": + implementation_version = platform.python_version() # Complete Guess + elif implementation == "IronPython": + implementation_version = platform.python_version() # Complete Guess + else: + implementation_version = "Unknown" + + return {"name": implementation, "version": implementation_version} + + +def info(): + """Generate information for a bug report.""" + try: + platform_info = { + "system": platform.system(), + "release": platform.release(), + } + except OSError: + platform_info = { + "system": "Unknown", + "release": "Unknown", + } + + implementation_info = _implementation() + urllib3_info = {"version": urllib3.__version__} + charset_normalizer_info = {"version": None} + chardet_info = {"version": None} + if charset_normalizer: + charset_normalizer_info = {"version": charset_normalizer.__version__} + if chardet: + chardet_info = {"version": chardet.__version__} + + pyopenssl_info = { + "version": None, + "openssl_version": "", + } + if OpenSSL: + pyopenssl_info = { + "version": OpenSSL.__version__, + "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}", + } + cryptography_info = { + "version": getattr(cryptography, "__version__", ""), + } + idna_info = { + "version": getattr(idna, "__version__", ""), + } + + system_ssl = ssl.OPENSSL_VERSION_NUMBER + system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""} + + return { + "platform": platform_info, + "implementation": implementation_info, + "system_ssl": system_ssl_info, + "using_pyopenssl": pyopenssl is not None, + "using_charset_normalizer": chardet is None, + "pyOpenSSL": pyopenssl_info, + "urllib3": urllib3_info, + "chardet": chardet_info, + "charset_normalizer": charset_normalizer_info, + "cryptography": cryptography_info, + "idna": idna_info, + "requests": { + "version": requests_version, + }, + } + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps(info(), sort_keys=True, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/hooks.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/hooks.py new file mode 100644 index 0000000..d181ba2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/hooks.py @@ -0,0 +1,33 @@ +""" +requests.hooks +~~~~~~~~~~~~~~ + +This module provides the capabilities for the Requests hooks system. + +Available hooks: + +``response``: + The response generated from a Request. +""" +HOOKS = ["response"] + + +def default_hooks(): + return {event: [] for event in HOOKS} + + +# TODO: response is the only one + + +def dispatch_hook(key, hooks, hook_data, **kwargs): + """Dispatches a hook dictionary on a given piece of data.""" + hooks = hooks or {} + hooks = hooks.get(key) + if hooks: + if hasattr(hooks, "__call__"): + hooks = [hooks] + for hook in hooks: + _hook_data = hook(hook_data, **kwargs) + if _hook_data is not None: + hook_data = _hook_data + return hook_data diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/models.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/models.py new file mode 100644 index 0000000..76e6f19 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/models.py @@ -0,0 +1,1034 @@ +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import datetime + +# Import encoding now, to avoid implicit import later. +# Implicit import within threads may cause LookupError when standard library is in a ZIP, +# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. +import encodings.idna # noqa: F401 +from io import UnsupportedOperation + +from pip._vendor.urllib3.exceptions import ( + DecodeError, + LocationParseError, + ProtocolError, + ReadTimeoutError, + SSLError, +) +from pip._vendor.urllib3.fields import RequestField +from pip._vendor.urllib3.filepost import encode_multipart_formdata +from pip._vendor.urllib3.util import parse_url + +from ._internal_utils import to_native_string, unicode_is_ascii +from .auth import HTTPBasicAuth +from .compat import ( + Callable, + JSONDecodeError, + Mapping, + basestring, + builtin_str, + chardet, + cookielib, +) +from .compat import json as complexjson +from .compat import urlencode, urlsplit, urlunparse +from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header +from .exceptions import ( + ChunkedEncodingError, + ConnectionError, + ContentDecodingError, + HTTPError, + InvalidJSONError, + InvalidURL, +) +from .exceptions import JSONDecodeError as RequestsJSONDecodeError +from .exceptions import MissingSchema +from .exceptions import SSLError as RequestsSSLError +from .exceptions import StreamConsumedError +from .hooks import default_hooks +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( + check_header_validity, + get_auth_from_url, + guess_filename, + guess_json_utf, + iter_slices, + parse_header_links, + requote_uri, + stream_decode_response_unicode, + super_len, + to_key_val_list, +) + +#: The set of HTTP status codes that indicate an automatically +#: processable redirect. +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 +) + +DEFAULT_REDIRECT_LIMIT = 30 +CONTENT_CHUNK_SIZE = 10 * 1024 +ITER_CHUNK_SIZE = 512 + + +class RequestEncodingMixin: + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.url) + + path = p.path + if not path: + path = "/" + + url.append(path) + + query = p.query + if query: + url.append("?") + url.append(query) + + return "".join(url) + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + Will successfully encode parameters when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + if parameters are supplied as a dict. + """ + + if isinstance(data, (str, bytes)): + return data + elif hasattr(data, "read"): + return data + elif hasattr(data, "__iter__"): + result = [] + for k, vs in to_key_val_list(data): + if isinstance(vs, basestring) or not hasattr(vs, "__iter__"): + vs = [vs] + for v in vs: + if v is not None: + result.append( + ( + k.encode("utf-8") if isinstance(k, str) else k, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + return urlencode(result, doseq=True) + else: + return data + + @staticmethod + def _encode_files(files, data): + """Build the body for a multipart/form-data request. + + Will successfully encode files when passed as a dict or a list of + tuples. Order is retained if data is a list of tuples but arbitrary + if parameters are supplied as a dict. + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). + """ + if not files: + raise ValueError("Files must be provided.") + elif isinstance(data, basestring): + raise ValueError("Data must not be a string.") + + new_fields = [] + fields = to_key_val_list(data or {}) + files = to_key_val_list(files or {}) + + for field, val in fields: + if isinstance(val, basestring) or not hasattr(val, "__iter__"): + val = [val] + for v in val: + if v is not None: + # Don't call str() on bytestrings: in Py3 it all goes wrong. + if not isinstance(v, bytes): + v = str(v) + + new_fields.append( + ( + field.decode("utf-8") + if isinstance(field, bytes) + else field, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + + for (k, v) in files: + # support for explicit filename + ft = None + fh = None + if isinstance(v, (tuple, list)): + if len(v) == 2: + fn, fp = v + elif len(v) == 3: + fn, fp, ft = v + else: + fn, fp, ft, fh = v + else: + fn = guess_filename(v) or k + fp = v + + if isinstance(fp, (str, bytes, bytearray)): + fdata = fp + elif hasattr(fp, "read"): + fdata = fp.read() + elif fp is None: + continue + else: + fdata = fp + + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) + rf.make_multipart(content_type=ft) + new_fields.append(rf) + + body, content_type = encode_multipart_formdata(new_fields) + + return body, content_type + + +class RequestHooksMixin: + def register_hook(self, event, hook): + """Properly register a hook.""" + + if event not in self.hooks: + raise ValueError(f'Unsupported event specified, with event name "{event}"') + + if isinstance(hook, Callable): + self.hooks[event].append(hook) + elif hasattr(hook, "__iter__"): + self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + + def deregister_hook(self, event, hook): + """Deregister a previously registered hook. + Returns True if the hook existed, False if not. + """ + + try: + self.hooks[event].remove(hook) + return True + except ValueError: + return False + + +class Request(RequestHooksMixin): + """A user-created :class:`Request ` object. + + Used to prepare a :class:`PreparedRequest `, which is sent to the server. + + :param method: HTTP method to use. + :param url: URL to send. + :param headers: dictionary of headers to send. + :param files: dictionary of {filename: fileobject} files to multipart upload. + :param data: the body to attach to the request. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param json: json for the body to attach to the request (if files or data is not specified). + :param params: URL parameters to append to the URL. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param auth: Auth handler or (user, pass) tuple. + :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param hooks: dictionary of callback hooks, for internal usage. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> req.prepare() + + """ + + def __init__( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + + # Default empty dicts for dict params. + data = [] if data is None else data + files = [] if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + + self.hooks = default_hooks() + for (k, v) in list(hooks.items()): + self.register_hook(event=k, hook=v) + + self.method = method + self.url = url + self.headers = headers + self.files = files + self.data = data + self.json = json + self.params = params + self.auth = auth + self.cookies = cookies + + def __repr__(self): + return f"" + + def prepare(self): + """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" + p = PreparedRequest() + p.prepare( + method=self.method, + url=self.url, + headers=self.headers, + files=self.files, + data=self.data, + json=self.json, + params=self.params, + auth=self.auth, + cookies=self.cookies, + hooks=self.hooks, + ) + return p + + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + """The fully mutable :class:`PreparedRequest ` object, + containing the exact bytes that will be sent to the server. + + Instances are generated from a :class:`Request ` object, and + should not be instantiated manually; doing so may produce undesirable + effects. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> r = req.prepare() + >>> r + + + >>> s = requests.Session() + >>> s.send(r) + + """ + + def __init__(self): + #: HTTP verb to send to the server. + self.method = None + #: HTTP URL to send the request to. + self.url = None + #: dictionary of HTTP headers. + self.headers = None + # The `CookieJar` used to create the Cookie header will be stored here + # after prepare_cookies is called + self._cookies = None + #: request body to send to the server. + self.body = None + #: dictionary of callback hooks, for internal usage. + self.hooks = default_hooks() + #: integer denoting starting position of a readable file-like body. + self._body_position = None + + def prepare( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + """Prepares the entire request with the given parameters.""" + + self.prepare_method(method) + self.prepare_url(url, params) + self.prepare_headers(headers) + self.prepare_cookies(cookies) + self.prepare_body(data, files, json) + self.prepare_auth(auth, url) + + # Note that prepare_auth must be last to enable authentication schemes + # such as OAuth to work on a fully prepared request. + + # This MUST go after prepare_auth. Authenticators could add a hook + self.prepare_hooks(hooks) + + def __repr__(self): + return f"" + + def copy(self): + p = PreparedRequest() + p.method = self.method + p.url = self.url + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = _copy_cookie_jar(self._cookies) + p.body = self.body + p.hooks = self.hooks + p._body_position = self._body_position + return p + + def prepare_method(self, method): + """Prepares the given HTTP method.""" + self.method = method + if self.method is not None: + self.method = to_native_string(self.method.upper()) + + @staticmethod + def _get_idna_encoded_host(host): + from pip._vendor import idna + + try: + host = idna.encode(host, uts46=True).decode("utf-8") + except idna.IDNAError: + raise UnicodeError + return host + + def prepare_url(self, url, params): + """Prepares the given HTTP URL.""" + #: Accept objects that have string representations. + #: We're unable to blindly call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/psf/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode("utf8") + else: + url = str(url) + + # Remove leading whitespaces from url + url = url.lstrip() + + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. + if ":" in url and not url.lower().startswith("http"): + self.url = url + return + + # Support for unicode domain names and paths. + try: + scheme, auth, host, port, path, query, fragment = parse_url(url) + except LocationParseError as e: + raise InvalidURL(*e.args) + + if not scheme: + raise MissingSchema( + f"Invalid URL {url!r}: No scheme supplied. " + f"Perhaps you meant https://{url}?" + ) + + if not host: + raise InvalidURL(f"Invalid URL {url!r}: No host supplied") + + # In general, we want to try IDNA encoding the hostname if the string contains + # non-ASCII characters. This allows users to automatically get the correct IDNA + # behaviour. For strings containing only ASCII characters, we need to also verify + # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + if not unicode_is_ascii(host): + try: + host = self._get_idna_encoded_host(host) + except UnicodeError: + raise InvalidURL("URL has an invalid label.") + elif host.startswith(("*", ".")): + raise InvalidURL("URL has an invalid label.") + + # Carefully reconstruct the network location + netloc = auth or "" + if netloc: + netloc += "@" + netloc += host + if port: + netloc += f":{port}" + + # Bare domains aren't valid URLs. + if not path: + path = "/" + + if isinstance(params, (str, bytes)): + params = to_native_string(params) + + enc_params = self._encode_params(params) + if enc_params: + if query: + query = f"{query}&{enc_params}" + else: + query = enc_params + + url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + self.url = url + + def prepare_headers(self, headers): + """Prepares the given HTTP headers.""" + + self.headers = CaseInsensitiveDict() + if headers: + for header in headers.items(): + # Raise exception on invalid header value. + check_header_validity(header) + name, value = header + self.headers[to_native_string(name)] = value + + def prepare_body(self, data, files, json=None): + """Prepares the given HTTP body data.""" + + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + # Nottin' on you. + body = None + content_type = None + + if not data and json is not None: + # urllib3 requires a bytes-like body. Python 2's json.dumps + # provides this natively, but Python 3 gives a Unicode string. + content_type = "application/json" + + try: + body = complexjson.dumps(json, allow_nan=False) + except ValueError as ve: + raise InvalidJSONError(ve, request=self) + + if not isinstance(body, bytes): + body = body.encode("utf-8") + + is_stream = all( + [ + hasattr(data, "__iter__"), + not isinstance(data, (basestring, list, tuple, Mapping)), + ] + ) + + if is_stream: + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + + body = data + + if getattr(body, "tell", None) is not None: + # Record the current file position before reading. + # This will allow us to rewind a file in the event + # of a redirect. + try: + self._body_position = body.tell() + except OSError: + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body + self._body_position = object() + + if files: + raise NotImplementedError( + "Streamed bodies and files are mutually exclusive." + ) + + if length: + self.headers["Content-Length"] = builtin_str(length) + else: + self.headers["Transfer-Encoding"] = "chunked" + else: + # Multi-part file uploads. + if files: + (body, content_type) = self._encode_files(files, data) + else: + if data: + body = self._encode_params(data) + if isinstance(data, basestring) or hasattr(data, "read"): + content_type = None + else: + content_type = "application/x-www-form-urlencoded" + + self.prepare_content_length(body) + + # Add content-type if it wasn't explicitly provided. + if content_type and ("content-type" not in self.headers): + self.headers["Content-Type"] = content_type + + self.body = body + + def prepare_content_length(self, body): + """Prepare Content-Length header based on request method and body""" + if body is not None: + length = super_len(body) + if length: + # If length exists, set it. Otherwise, we fallback + # to Transfer-Encoding: chunked. + self.headers["Content-Length"] = builtin_str(length) + elif ( + self.method not in ("GET", "HEAD") + and self.headers.get("Content-Length") is None + ): + # Set Content-Length to 0 for methods that can have a body + # but don't provide one. (i.e. not GET or HEAD) + self.headers["Content-Length"] = "0" + + def prepare_auth(self, auth, url=""): + """Prepares the given HTTP auth data.""" + + # If no Auth is explicitly provided, extract it from the URL first. + if auth is None: + url_auth = get_auth_from_url(self.url) + auth = url_auth if any(url_auth) else None + + if auth: + if isinstance(auth, tuple) and len(auth) == 2: + # special-case basic HTTP auth + auth = HTTPBasicAuth(*auth) + + # Allow auth to make its changes. + r = auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + # Recompute Content-Length + self.prepare_content_length(self.body) + + def prepare_cookies(self, cookies): + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest ` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand. + """ + if isinstance(cookies, cookielib.CookieJar): + self._cookies = cookies + else: + self._cookies = cookiejar_from_dict(cookies) + + cookie_header = get_cookie_header(self._cookies, self) + if cookie_header is not None: + self.headers["Cookie"] = cookie_header + + def prepare_hooks(self, hooks): + """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] + for event in hooks: + self.register_hook(event, hooks[event]) + + +class Response: + """The :class:`Response ` object, which contains a + server's response to an HTTP request. + """ + + __attrs__ = [ + "_content", + "status_code", + "headers", + "url", + "history", + "encoding", + "reason", + "cookies", + "elapsed", + "request", + ] + + def __init__(self): + self._content = False + self._content_consumed = False + self._next = None + + #: Integer Code of responded HTTP Status, e.g. 404 or 200. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + #: Use of ``raw`` requires that ``stream=True`` be set on the request. + #: This requirement does not apply for use internally to Requests. + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Encoding to decode with when accessing r.text. + self.encoding = None + + #: A list of :class:`Response ` objects from + #: the history of the Request. Any redirect responses will end + #: up here. The list is sorted from the oldest to the most recent request. + self.history = [] + + #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". + self.reason = None + + #: A CookieJar of Cookies the server sent back. + self.cookies = cookiejar_from_dict({}) + + #: The amount of time elapsed between sending the request + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. + self.elapsed = datetime.timedelta(0) + + #: The :class:`PreparedRequest ` object to which this + #: is a response. + self.request = None + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def __getstate__(self): + # Consume everything; accessing the content attribute makes + # sure the content has been fully read. + if not self._content_consumed: + self.content + + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + for name, value in state.items(): + setattr(self, name, value) + + # pickled objects do not have .raw + setattr(self, "_content_consumed", True) + setattr(self, "raw", None) + + def __repr__(self): + return f"" + + def __bool__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __nonzero__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __iter__(self): + """Allows you to use a response as an iterator.""" + return self.iter_content(128) + + @property + def ok(self): + """Returns True if :attr:`status_code` is less than 400, False if not. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + try: + self.raise_for_status() + except HTTPError: + return False + return True + + @property + def is_redirect(self): + """True if this Response is a well-formed HTTP redirect that could have + been processed automatically (by :meth:`Session.resolve_redirects`). + """ + return "location" in self.headers and self.status_code in REDIRECT_STATI + + @property + def is_permanent_redirect(self): + """True if this Response one of the permanent versions of redirect.""" + return "location" in self.headers and self.status_code in ( + codes.moved_permanently, + codes.permanent_redirect, + ) + + @property + def next(self): + """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + return self._next + + @property + def apparent_encoding(self): + """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" + return chardet.detect(self.content)["encoding"] + + def iter_content(self, chunk_size=1, decode_unicode=False): + """Iterates over the response data. When stream=True is set on the + request, this avoids reading the content at once into memory for + large responses. The chunk size is the number of bytes it should + read into memory. This is not necessarily the length of each item + returned as decoding can take place. + + chunk_size must be of type int or None. A value of None will + function differently depending on the value of `stream`. + stream=True will read data as it arrives in whatever size the + chunks are received. If stream=False, data is returned as + a single chunk. + + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ + + def generate(): + # Special case for urllib3. + if hasattr(self.raw, "stream"): + try: + yield from self.raw.stream(chunk_size, decode_content=True) + except ProtocolError as e: + raise ChunkedEncodingError(e) + except DecodeError as e: + raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) + except SSLError as e: + raise RequestsSSLError(e) + else: + # Standard file-like object. + while True: + chunk = self.raw.read(chunk_size) + if not chunk: + break + yield chunk + + self._content_consumed = True + + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() + elif chunk_size is not None and not isinstance(chunk_size, int): + raise TypeError( + f"chunk_size must be an int, it is instead a {type(chunk_size)}." + ) + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks + + if decode_unicode: + chunks = stream_decode_response_unicode(chunks, self) + + return chunks + + def iter_lines( + self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None + ): + """Iterates over the response data, one line at a time. When + stream=True is set on the request, this avoids reading the + content at once into memory for large responses. + + .. note:: This method is not reentrant safe. + """ + + pending = None + + for chunk in self.iter_content( + chunk_size=chunk_size, decode_unicode=decode_unicode + ): + + if pending is not None: + chunk = pending + chunk + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() + + if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: + pending = lines.pop() + else: + pending = None + + yield from lines + + if pending is not None: + yield pending + + @property + def content(self): + """Content of the response, in bytes.""" + + if self._content is False: + # Read the contents. + if self._content_consumed: + raise RuntimeError("The content for this response was already consumed") + + if self.status_code == 0 or self.raw is None: + self._content = None + else: + self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" + + self._content_consumed = True + # don't need to release the connection; that's been handled by urllib3 + # since we exhausted the data. + return self._content + + @property + def text(self): + """Content of the response, in unicode. + + If Response.encoding is None, encoding will be guessed using + ``charset_normalizer`` or ``chardet``. + + The encoding of the response content is determined based solely on HTTP + headers, following RFC 2616 to the letter. If you can take advantage of + non-HTTP knowledge to make a better guess at the encoding, you should + set ``r.encoding`` appropriately before accessing this property. + """ + + # Try charset from content-type + content = None + encoding = self.encoding + + if not self.content: + return "" + + # Fallback to auto-detected encoding. + if self.encoding is None: + encoding = self.apparent_encoding + + # Decode unicode from given encoding. + try: + content = str(self.content, encoding, errors="replace") + except (LookupError, TypeError): + # A LookupError is raised if the encoding was not found which could + # indicate a misspelling or similar mistake. + # + # A TypeError can be raised if encoding is None + # + # So we try blindly encoding. + content = str(self.content, errors="replace") + + return content + + def json(self, **kwargs): + r"""Returns the json-encoded content of a response, if any. + + :param \*\*kwargs: Optional arguments that ``json.loads`` takes. + :raises requests.exceptions.JSONDecodeError: If the response body does not + contain valid json. + """ + + if not self.encoding and self.content and len(self.content) > 3: + # No encoding set. JSON RFC 4627 section 3 states we should expect + # UTF-8, -16 or -32. Detect which one to use; If the detection or + # decoding fails, fall back to `self.text` (using charset_normalizer to make + # a best guess). + encoding = guess_json_utf(self.content) + if encoding is not None: + try: + return complexjson.loads(self.content.decode(encoding), **kwargs) + except UnicodeDecodeError: + # Wrong UTF codec detected; usually because it's not UTF-8 + # but some other 8-bit codec. This is an RFC violation, + # and the server didn't bother to tell us what codec *was* + # used. + pass + except JSONDecodeError as e: + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + try: + return complexjson.loads(self.text, **kwargs) + except JSONDecodeError as e: + # Catch JSON-related errors and raise as requests.JSONDecodeError + # This aliases json.JSONDecodeError and simplejson.JSONDecodeError + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + @property + def links(self): + """Returns the parsed header links of the response, if any.""" + + header = self.headers.get("link") + + resolved_links = {} + + if header: + links = parse_header_links(header) + + for link in links: + key = link.get("rel") or link.get("url") + resolved_links[key] = link + + return resolved_links + + def raise_for_status(self): + """Raises :class:`HTTPError`, if one occurred.""" + + http_error_msg = "" + if isinstance(self.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. (See PR #3538) + try: + reason = self.reason.decode("utf-8") + except UnicodeDecodeError: + reason = self.reason.decode("iso-8859-1") + else: + reason = self.reason + + if 400 <= self.status_code < 500: + http_error_msg = ( + f"{self.status_code} Client Error: {reason} for url: {self.url}" + ) + + elif 500 <= self.status_code < 600: + http_error_msg = ( + f"{self.status_code} Server Error: {reason} for url: {self.url}" + ) + + if http_error_msg: + raise HTTPError(http_error_msg, response=self) + + def close(self): + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. + + *Note: Should not normally need to be called explicitly.* + """ + if not self._content_consumed: + self.raw.close() + + release_conn = getattr(self.raw, "release_conn", None) + if release_conn is not None: + release_conn() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/packages.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/packages.py new file mode 100644 index 0000000..9582fa7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/packages.py @@ -0,0 +1,16 @@ +import sys + +# This code exists for backwards compatibility reasons. +# I don't like it either. Just look the other way. :) + +for package in ('urllib3', 'idna', 'chardet'): + vendored_package = "pip._vendor." + package + locals()[package] = __import__(vendored_package) + # This traversal is apparently necessary such that the identities are + # preserved (requests.packages.urllib3.* is urllib3.*) + for mod in list(sys.modules): + if mod == vendored_package or mod.startswith(vendored_package + '.'): + unprefixed_mod = mod[len("pip._vendor."):] + sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod] + +# Kinda cool, though, right? diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/sessions.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/sessions.py new file mode 100644 index 0000000..dbcf2a7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/sessions.py @@ -0,0 +1,833 @@ +""" +requests.sessions +~~~~~~~~~~~~~~~~~ + +This module provides a Session object to manage and persist settings across +requests (cookies, auth, proxies). +""" +import os +import sys +import time +from collections import OrderedDict +from datetime import timedelta + +from ._internal_utils import to_native_string +from .adapters import HTTPAdapter +from .auth import _basic_auth_str +from .compat import Mapping, cookielib, urljoin, urlparse +from .cookies import ( + RequestsCookieJar, + cookiejar_from_dict, + extract_cookies_to_jar, + merge_cookies, +) +from .exceptions import ( + ChunkedEncodingError, + ContentDecodingError, + InvalidSchema, + TooManyRedirects, +) +from .hooks import default_hooks, dispatch_hook + +# formerly defined here, reexposed here for backward compatibility +from .models import ( # noqa: F401 + DEFAULT_REDIRECT_LIMIT, + REDIRECT_STATI, + PreparedRequest, + Request, +) +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( # noqa: F401 + DEFAULT_PORTS, + default_headers, + get_auth_from_url, + get_environ_proxies, + get_netrc_auth, + requote_uri, + resolve_proxies, + rewind_body, + should_bypass_proxies, + to_key_val_list, +) + +# Preferred clock, based on which one is more accurate on a given system. +if sys.platform == "win32": + preferred_clock = time.perf_counter +else: + preferred_clock = time.time + + +def merge_setting(request_setting, session_setting, dict_class=OrderedDict): + """Determines appropriate setting for a given request, taking into account + the explicit setting on that request, and the setting in the session. If a + setting is a dictionary, they will be merged together using `dict_class` + """ + + if session_setting is None: + return request_setting + + if request_setting is None: + return session_setting + + # Bypass if not a dictionary (e.g. verify) + if not ( + isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) + ): + return request_setting + + merged_setting = dict_class(to_key_val_list(session_setting)) + merged_setting.update(to_key_val_list(request_setting)) + + # Remove keys that are set to None. Extract keys first to avoid altering + # the dictionary during iteration. + none_keys = [k for (k, v) in merged_setting.items() if v is None] + for key in none_keys: + del merged_setting[key] + + return merged_setting + + +def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): + """Properly merges both requests and session hooks. + + This is necessary because when request_hooks == {'response': []}, the + merge breaks Session hooks entirely. + """ + if session_hooks is None or session_hooks.get("response") == []: + return request_hooks + + if request_hooks is None or request_hooks.get("response") == []: + return session_hooks + + return merge_setting(request_hooks, session_hooks, dict_class) + + +class SessionRedirectMixin: + def get_redirect_target(self, resp): + """Receives a Response. Returns a redirect URI or ``None``""" + # Due to the nature of how requests processes redirects this method will + # be called at least once upon the original response and at least twice + # on each subsequent redirect response (if any). + # If a custom mixin is used to handle this logic, it may be advantageous + # to cache the redirect location onto the response object as a private + # attribute. + if resp.is_redirect: + location = resp.headers["location"] + # Currently the underlying http module on py3 decode headers + # in latin1, but empirical evidence suggests that latin1 is very + # rarely used with non-ASCII characters in HTTP headers. + # It is more likely to get UTF8 header rather than latin1. + # This causes incorrect handling of UTF8 encoded location headers. + # To solve this, we re-encode the location in latin1. + location = location.encode("latin1") + return to_native_string(location, "utf8") + return None + + def should_strip_auth(self, old_url, new_url): + """Decide whether Authorization header should be removed when redirecting""" + old_parsed = urlparse(old_url) + new_parsed = urlparse(new_url) + if old_parsed.hostname != new_parsed.hostname: + return True + # Special case: allow http -> https redirect when using the standard + # ports. This isn't specified by RFC 7235, but is kept to avoid + # breaking backwards compatibility with older versions of requests + # that allowed any redirects on the same host. + if ( + old_parsed.scheme == "http" + and old_parsed.port in (80, None) + and new_parsed.scheme == "https" + and new_parsed.port in (443, None) + ): + return False + + # Handle default port usage corresponding to scheme. + changed_port = old_parsed.port != new_parsed.port + changed_scheme = old_parsed.scheme != new_parsed.scheme + default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) + if ( + not changed_scheme + and old_parsed.port in default_port + and new_parsed.port in default_port + ): + return False + + # Standard case: root URI must match + return changed_port or changed_scheme + + def resolve_redirects( + self, + resp, + req, + stream=False, + timeout=None, + verify=True, + cert=None, + proxies=None, + yield_requests=False, + **adapter_kwargs, + ): + """Receives a Response. Returns a generator of Responses or Requests.""" + + hist = [] # keep track of history + + url = self.get_redirect_target(resp) + previous_fragment = urlparse(req.url).fragment + while url: + prepared_request = req.copy() + + # Update history and keep track of redirects. + # resp.history must ignore the original request in this loop + hist.append(resp) + resp.history = hist[1:] + + try: + resp.content # Consume socket so it can be released + except (ChunkedEncodingError, ContentDecodingError, RuntimeError): + resp.raw.read(decode_content=False) + + if len(resp.history) >= self.max_redirects: + raise TooManyRedirects( + f"Exceeded {self.max_redirects} redirects.", response=resp + ) + + # Release the connection back into the pool. + resp.close() + + # Handle redirection without scheme (see: RFC 1808 Section 4) + if url.startswith("//"): + parsed_rurl = urlparse(resp.url) + url = ":".join([to_native_string(parsed_rurl.scheme), url]) + + # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) + parsed = urlparse(url) + if parsed.fragment == "" and previous_fragment: + parsed = parsed._replace(fragment=previous_fragment) + elif parsed.fragment: + previous_fragment = parsed.fragment + url = parsed.geturl() + + # Facilitate relative 'location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + # Compliant with RFC3986, we percent encode the url. + if not parsed.netloc: + url = urljoin(resp.url, requote_uri(url)) + else: + url = requote_uri(url) + + prepared_request.url = to_native_string(url) + + self.rebuild_method(prepared_request, resp) + + # https://github.com/psf/requests/issues/1084 + if resp.status_code not in ( + codes.temporary_redirect, + codes.permanent_redirect, + ): + # https://github.com/psf/requests/issues/3490 + purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding") + for header in purged_headers: + prepared_request.headers.pop(header, None) + prepared_request.body = None + + headers = prepared_request.headers + headers.pop("Cookie", None) + + # Extract any cookies sent on the response to the cookiejar + # in the new request. Because we've mutated our copied prepared + # request, use the old one that we haven't yet touched. + extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) + merge_cookies(prepared_request._cookies, self.cookies) + prepared_request.prepare_cookies(prepared_request._cookies) + + # Rebuild auth and proxy information. + proxies = self.rebuild_proxies(prepared_request, proxies) + self.rebuild_auth(prepared_request, resp) + + # A failed tell() sets `_body_position` to `object()`. This non-None + # value ensures `rewindable` will be True, allowing us to raise an + # UnrewindableBodyError, instead of hanging the connection. + rewindable = prepared_request._body_position is not None and ( + "Content-Length" in headers or "Transfer-Encoding" in headers + ) + + # Attempt to rewind consumed file-like object. + if rewindable: + rewind_body(prepared_request) + + # Override the original request. + req = prepared_request + + if yield_requests: + yield req + else: + + resp = self.send( + req, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + allow_redirects=False, + **adapter_kwargs, + ) + + extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + + # extract redirect url, if any, for the next loop + url = self.get_redirect_target(resp) + yield resp + + def rebuild_auth(self, prepared_request, response): + """When being redirected we may want to strip authentication from the + request to avoid leaking credentials. This method intelligently removes + and reapplies authentication where possible to avoid credential loss. + """ + headers = prepared_request.headers + url = prepared_request.url + + if "Authorization" in headers and self.should_strip_auth( + response.request.url, url + ): + # If we get redirected to a new host, we should strip out any + # authentication headers. + del headers["Authorization"] + + # .netrc might have more auth for us on our new host. + new_auth = get_netrc_auth(url) if self.trust_env else None + if new_auth is not None: + prepared_request.prepare_auth(new_auth) + + def rebuild_proxies(self, prepared_request, proxies): + """This method re-evaluates the proxy configuration by considering the + environment variables. If we are redirected to a URL covered by + NO_PROXY, we strip the proxy configuration. Otherwise, we set missing + proxy keys for this URL (in case they were stripped by a previous + redirect). + + This method also replaces the Proxy-Authorization header where + necessary. + + :rtype: dict + """ + headers = prepared_request.headers + scheme = urlparse(prepared_request.url).scheme + new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env) + + if "Proxy-Authorization" in headers: + del headers["Proxy-Authorization"] + + try: + username, password = get_auth_from_url(new_proxies[scheme]) + except KeyError: + username, password = None, None + + # urllib3 handles proxy authorization for us in the standard adapter. + # Avoid appending this to TLS tunneled requests where it may be leaked. + if not scheme.startswith('https') and username and password: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return new_proxies + + def rebuild_method(self, prepared_request, response): + """When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = prepared_request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.see_other and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # First, turn 302s into GETs. + if response.status_code == codes.found and method != "HEAD": + method = "GET" + + # Second, if a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in Issue 1704. + if response.status_code == codes.moved and method == "POST": + method = "GET" + + prepared_request.method = method + + +class Session(SessionRedirectMixin): + """A Requests session. + + Provides cookie persistence, connection-pooling, and configuration. + + Basic Usage:: + + >>> import requests + >>> s = requests.Session() + >>> s.get('https://httpbin.org/get') + + + Or as a context manager:: + + >>> with requests.Session() as s: + ... s.get('https://httpbin.org/get') + + """ + + __attrs__ = [ + "headers", + "cookies", + "auth", + "proxies", + "hooks", + "params", + "verify", + "cert", + "adapters", + "stream", + "trust_env", + "max_redirects", + ] + + def __init__(self): + + #: A case-insensitive dictionary of headers to be sent on each + #: :class:`Request ` sent from this + #: :class:`Session `. + self.headers = default_headers() + + #: Default Authentication tuple or object to attach to + #: :class:`Request `. + self.auth = None + + #: Dictionary mapping protocol or protocol and host to the URL of the proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to + #: be used on each :class:`Request `. + self.proxies = {} + + #: Event-handling hooks. + self.hooks = default_hooks() + + #: Dictionary of querystring data to attach to each + #: :class:`Request `. The dictionary values may be lists for + #: representing multivalued query parameters. + self.params = {} + + #: Stream response content default. + self.stream = False + + #: SSL Verification default. + #: Defaults to `True`, requiring requests to verify the TLS certificate at the + #: remote end. + #: If verify is set to `False`, requests will accept any TLS certificate + #: presented by the server, and will ignore hostname mismatches and/or + #: expired certificates, which will make your application vulnerable to + #: man-in-the-middle (MitM) attacks. + #: Only set this to `False` for testing. + self.verify = True + + #: SSL client certificate default, if String, path to ssl client + #: cert file (.pem). If Tuple, ('cert', 'key') pair. + self.cert = None + + #: Maximum number of redirects allowed. If the request exceeds this + #: limit, a :class:`TooManyRedirects` exception is raised. + #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is + #: 30. + self.max_redirects = DEFAULT_REDIRECT_LIMIT + + #: Trust environment settings for proxy configuration, default + #: authentication and similar. + self.trust_env = True + + #: A CookieJar containing all currently outstanding cookies set on this + #: session. By default it is a + #: :class:`RequestsCookieJar `, but + #: may be any other ``cookielib.CookieJar`` compatible object. + self.cookies = cookiejar_from_dict({}) + + # Default connection adapters. + self.adapters = OrderedDict() + self.mount("https://", HTTPAdapter()) + self.mount("http://", HTTPAdapter()) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def prepare_request(self, request): + """Constructs a :class:`PreparedRequest ` for + transmission and returns it. The :class:`PreparedRequest` has settings + merged from the :class:`Request ` instance and those of the + :class:`Session`. + + :param request: :class:`Request` instance to prepare with this + session's settings. + :rtype: requests.PreparedRequest + """ + cookies = request.cookies or {} + + # Bootstrap CookieJar. + if not isinstance(cookies, cookielib.CookieJar): + cookies = cookiejar_from_dict(cookies) + + # Merge with session cookies + merged_cookies = merge_cookies( + merge_cookies(RequestsCookieJar(), self.cookies), cookies + ) + + # Set environment's basic authentication if not explicitly set. + auth = request.auth + if self.trust_env and not auth and not self.auth: + auth = get_netrc_auth(request.url) + + p = PreparedRequest() + p.prepare( + method=request.method.upper(), + url=request.url, + files=request.files, + data=request.data, + json=request.json, + headers=merge_setting( + request.headers, self.headers, dict_class=CaseInsensitiveDict + ), + params=merge_setting(request.params, self.params), + auth=merge_setting(auth, self.auth), + cookies=merged_cookies, + hooks=merge_hooks(request.hooks, self.hooks), + ) + return p + + def request( + self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None, + ): + """Constructs a :class:`Request `, prepares it and sends it. + Returns :class:`Response ` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query + string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the + :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the + :class:`Request`. + :param files: (optional) Dictionary of ``'filename': file-like-objects`` + for multipart encoding upload. + :param auth: (optional) Auth tuple or callable to enable + Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Set to True by default. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol or protocol and + hostname to the URL of the proxy. + :param stream: (optional) whether to immediately download the response + content. Defaults to ``False``. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. When set to + ``False``, requests will accept any TLS certificate presented by + the server, and will ignore hostname mismatches and/or expired + certificates, which will make your application vulnerable to + man-in-the-middle (MitM) attacks. Setting verify to ``False`` + may be useful during local development or testing. + :param cert: (optional) if String, path to ssl client cert file (.pem). + If Tuple, ('cert', 'key') pair. + :rtype: requests.Response + """ + # Create the Request. + req = Request( + method=method.upper(), + url=url, + headers=headers, + files=files, + data=data or {}, + json=json, + params=params or {}, + auth=auth, + cookies=cookies, + hooks=hooks, + ) + prep = self.prepare_request(req) + + proxies = proxies or {} + + settings = self.merge_environment_settings( + prep.url, proxies, stream, verify, cert + ) + + # Send the request. + send_kwargs = { + "timeout": timeout, + "allow_redirects": allow_redirects, + } + send_kwargs.update(settings) + resp = self.send(prep, **send_kwargs) + + return resp + + def get(self, url, **kwargs): + r"""Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("GET", url, **kwargs) + + def options(self, url, **kwargs): + r"""Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("OPTIONS", url, **kwargs) + + def head(self, url, **kwargs): + r"""Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return self.request("HEAD", url, **kwargs) + + def post(self, url, data=None, json=None, **kwargs): + r"""Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("POST", url, data=data, json=json, **kwargs) + + def put(self, url, data=None, **kwargs): + r"""Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PUT", url, data=data, **kwargs) + + def patch(self, url, data=None, **kwargs): + r"""Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PATCH", url, data=data, **kwargs) + + def delete(self, url, **kwargs): + r"""Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("DELETE", url, **kwargs) + + def send(self, request, **kwargs): + """Send a given PreparedRequest. + + :rtype: requests.Response + """ + # Set defaults that the hooks can utilize to ensure they always have + # the correct parameters to reproduce the previous request. + kwargs.setdefault("stream", self.stream) + kwargs.setdefault("verify", self.verify) + kwargs.setdefault("cert", self.cert) + if "proxies" not in kwargs: + kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env) + + # It's possible that users might accidentally send a Request object. + # Guard against that specific failure case. + if isinstance(request, Request): + raise ValueError("You can only send PreparedRequests.") + + # Set up variables needed for resolve_redirects and dispatching of hooks + allow_redirects = kwargs.pop("allow_redirects", True) + stream = kwargs.get("stream") + hooks = request.hooks + + # Get the appropriate adapter to use + adapter = self.get_adapter(url=request.url) + + # Start time (approximately) of the request + start = preferred_clock() + + # Send the request + r = adapter.send(request, **kwargs) + + # Total elapsed time of the request (approximately) + elapsed = preferred_clock() - start + r.elapsed = timedelta(seconds=elapsed) + + # Response manipulation hooks + r = dispatch_hook("response", hooks, r, **kwargs) + + # Persist cookies + if r.history: + + # If the hooks create history then we want those cookies too + for resp in r.history: + extract_cookies_to_jar(self.cookies, resp.request, resp.raw) + + extract_cookies_to_jar(self.cookies, request, r.raw) + + # Resolve redirects if allowed. + if allow_redirects: + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + history = [resp for resp in gen] + else: + history = [] + + # Shuffle things around if there's history. + if history: + # Insert the first (original) request at the start + history.insert(0, r) + # Get the last request made + r = history.pop() + r.history = history + + # If redirects aren't being followed, store the response on the Request for Response.next(). + if not allow_redirects: + try: + r._next = next( + self.resolve_redirects(r, request, yield_requests=True, **kwargs) + ) + except StopIteration: + pass + + if not stream: + r.content + + return r + + def merge_environment_settings(self, url, proxies, stream, verify, cert): + """ + Check the environment and merge it with some settings. + + :rtype: dict + """ + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + no_proxy = proxies.get("no_proxy") if proxies is not None else None + env_proxies = get_environ_proxies(url, no_proxy=no_proxy) + for (k, v) in env_proxies.items(): + proxies.setdefault(k, v) + + # Look for requests environment configuration + # and be compatible with cURL. + if verify is True or verify is None: + verify = ( + os.environ.get("REQUESTS_CA_BUNDLE") + or os.environ.get("CURL_CA_BUNDLE") + or verify + ) + + # Merge all the kwargs. + proxies = merge_setting(proxies, self.proxies) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert} + + def get_adapter(self, url): + """ + Returns the appropriate connection adapter for the given URL. + + :rtype: requests.adapters.BaseAdapter + """ + for (prefix, adapter) in self.adapters.items(): + + if url.lower().startswith(prefix.lower()): + return adapter + + # Nothing matches :-/ + raise InvalidSchema(f"No connection adapters were found for {url!r}") + + def close(self): + """Closes all adapters and as such the session""" + for v in self.adapters.values(): + v.close() + + def mount(self, prefix, adapter): + """Registers a connection adapter to a prefix. + + Adapters are sorted in descending order by prefix length. + """ + self.adapters[prefix] = adapter + keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] + + for key in keys_to_move: + self.adapters[key] = self.adapters.pop(key) + + def __getstate__(self): + state = {attr: getattr(self, attr, None) for attr in self.__attrs__} + return state + + def __setstate__(self, state): + for attr, value in state.items(): + setattr(self, attr, value) + + +def session(): + """ + Returns a :class:`Session` for context-management. + + .. deprecated:: 1.0.0 + + This method has been deprecated since version 1.0.0 and is only kept for + backwards compatibility. New code should use :class:`~requests.sessions.Session` + to create a session. This may be removed at a future date. + + :rtype: Session + """ + return Session() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/status_codes.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/status_codes.py new file mode 100644 index 0000000..4bd072b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/status_codes.py @@ -0,0 +1,128 @@ +r""" +The ``codes`` object defines a mapping from common names for HTTP statuses +to their numerical codes, accessible either as attributes or as dictionary +items. + +Example:: + + >>> import requests + >>> requests.codes['temporary_redirect'] + 307 + >>> requests.codes.teapot + 418 + >>> requests.codes['\o/'] + 200 + +Some codes have multiple names, and both upper- and lower-case versions of +the names are allowed. For example, ``codes.ok``, ``codes.OK``, and +``codes.okay`` all correspond to the HTTP status code 200. +""" + +from .structures import LookupDict + +_codes = { + # Informational. + 100: ("continue",), + 101: ("switching_protocols",), + 102: ("processing",), + 103: ("checkpoint",), + 122: ("uri_too_long", "request_uri_too_long"), + 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"), + 201: ("created",), + 202: ("accepted",), + 203: ("non_authoritative_info", "non_authoritative_information"), + 204: ("no_content",), + 205: ("reset_content", "reset"), + 206: ("partial_content", "partial"), + 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"), + 208: ("already_reported",), + 226: ("im_used",), + # Redirection. + 300: ("multiple_choices",), + 301: ("moved_permanently", "moved", "\\o-"), + 302: ("found",), + 303: ("see_other", "other"), + 304: ("not_modified",), + 305: ("use_proxy",), + 306: ("switch_proxy",), + 307: ("temporary_redirect", "temporary_moved", "temporary"), + 308: ( + "permanent_redirect", + "resume_incomplete", + "resume", + ), # "resume" and "resume_incomplete" to be removed in 3.0 + # Client Error. + 400: ("bad_request", "bad"), + 401: ("unauthorized",), + 402: ("payment_required", "payment"), + 403: ("forbidden",), + 404: ("not_found", "-o-"), + 405: ("method_not_allowed", "not_allowed"), + 406: ("not_acceptable",), + 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"), + 408: ("request_timeout", "timeout"), + 409: ("conflict",), + 410: ("gone",), + 411: ("length_required",), + 412: ("precondition_failed", "precondition"), + 413: ("request_entity_too_large",), + 414: ("request_uri_too_large",), + 415: ("unsupported_media_type", "unsupported_media", "media_type"), + 416: ( + "requested_range_not_satisfiable", + "requested_range", + "range_not_satisfiable", + ), + 417: ("expectation_failed",), + 418: ("im_a_teapot", "teapot", "i_am_a_teapot"), + 421: ("misdirected_request",), + 422: ("unprocessable_entity", "unprocessable"), + 423: ("locked",), + 424: ("failed_dependency", "dependency"), + 425: ("unordered_collection", "unordered"), + 426: ("upgrade_required", "upgrade"), + 428: ("precondition_required", "precondition"), + 429: ("too_many_requests", "too_many"), + 431: ("header_fields_too_large", "fields_too_large"), + 444: ("no_response", "none"), + 449: ("retry_with", "retry"), + 450: ("blocked_by_windows_parental_controls", "parental_controls"), + 451: ("unavailable_for_legal_reasons", "legal_reasons"), + 499: ("client_closed_request",), + # Server Error. + 500: ("internal_server_error", "server_error", "/o\\", "✗"), + 501: ("not_implemented",), + 502: ("bad_gateway",), + 503: ("service_unavailable", "unavailable"), + 504: ("gateway_timeout",), + 505: ("http_version_not_supported", "http_version"), + 506: ("variant_also_negotiates",), + 507: ("insufficient_storage",), + 509: ("bandwidth_limit_exceeded", "bandwidth"), + 510: ("not_extended",), + 511: ("network_authentication_required", "network_auth", "network_authentication"), +} + +codes = LookupDict(name="status_codes") + + +def _init(): + for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith(("\\", "/")): + setattr(codes, title.upper(), code) + + def doc(code): + names = ", ".join(f"``{n}``" for n in _codes[code]) + return "* %d: %s" % (code, names) + + global __doc__ + __doc__ = ( + __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes)) + if __doc__ is not None + else None + ) + + +_init() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/structures.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/structures.py new file mode 100644 index 0000000..188e13e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/structures.py @@ -0,0 +1,99 @@ +""" +requests.structures +~~~~~~~~~~~~~~~~~~~ + +Data structures that power Requests. +""" + +from collections import OrderedDict + +from .compat import Mapping, MutableMapping + + +class CaseInsensitiveDict(MutableMapping): + """A case-insensitive ``dict``-like object. + + Implements all methods and operations of + ``MutableMapping`` as well as dict's ``copy``. Also + provides ``lower_items``. + + All keys are expected to be strings. The structure remembers the + case of the last key to be set, and ``iter(instance)``, + ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` + will contain case-sensitive keys. However, querying and contains + testing is case insensitive:: + + cid = CaseInsensitiveDict() + cid['Accept'] = 'application/json' + cid['aCCEPT'] == 'application/json' # True + list(cid) == ['Accept'] # True + + For example, ``headers['content-encoding']`` will return the + value of a ``'Content-Encoding'`` response header, regardless + of how the header name was originally stored. + + If the constructor, ``.update``, or equality comparison + operations are given keys that have equal ``.lower()``s, the + behavior is undefined. + """ + + def __init__(self, data=None, **kwargs): + self._store = OrderedDict() + if data is None: + data = {} + self.update(data, **kwargs) + + def __setitem__(self, key, value): + # Use the lowercased key for lookups, but store the actual + # key alongside the value. + self._store[key.lower()] = (key, value) + + def __getitem__(self, key): + return self._store[key.lower()][1] + + def __delitem__(self, key): + del self._store[key.lower()] + + def __iter__(self): + return (casedkey for casedkey, mappedvalue in self._store.values()) + + def __len__(self): + return len(self._store) + + def lower_items(self): + """Like iteritems(), but with all lowercase keys.""" + return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items()) + + def __eq__(self, other): + if isinstance(other, Mapping): + other = CaseInsensitiveDict(other) + else: + return NotImplemented + # Compare insensitively + return dict(self.lower_items()) == dict(other.lower_items()) + + # Copy is required + def copy(self): + return CaseInsensitiveDict(self._store.values()) + + def __repr__(self): + return str(dict(self.items())) + + +class LookupDict(dict): + """Dictionary lookup object.""" + + def __init__(self, name=None): + self.name = name + super().__init__() + + def __repr__(self): + return f"" + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + + return self.__dict__.get(key, None) + + def get(self, key, default=None): + return self.__dict__.get(key, default) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/requests/utils.py b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/utils.py new file mode 100644 index 0000000..36607ed --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/requests/utils.py @@ -0,0 +1,1094 @@ +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utility functions that are used within Requests +that are also useful for external consumption. +""" + +import codecs +import contextlib +import io +import os +import re +import socket +import struct +import sys +import tempfile +import warnings +import zipfile +from collections import OrderedDict + +from pip._vendor.urllib3.util import make_headers, parse_url + +from . import certs +from .__version__ import __version__ + +# to_native_string is unused here, but imported here for backwards compatibility +from ._internal_utils import ( # noqa: F401 + _HEADER_VALIDATORS_BYTE, + _HEADER_VALIDATORS_STR, + HEADER_VALIDATORS, + to_native_string, +) +from .compat import ( + Mapping, + basestring, + bytes, + getproxies, + getproxies_environment, + integer_types, +) +from .compat import parse_http_list as _parse_list_header +from .compat import ( + proxy_bypass, + proxy_bypass_environment, + quote, + str, + unquote, + urlparse, + urlunparse, +) +from .cookies import cookiejar_from_dict +from .exceptions import ( + FileModeWarning, + InvalidHeader, + InvalidURL, + UnrewindableBodyError, +) +from .structures import CaseInsensitiveDict + +NETRC_FILES = (".netrc", "_netrc") + +DEFAULT_CA_BUNDLE_PATH = certs.where() + +DEFAULT_PORTS = {"http": 80, "https": 443} + +# Ensure that ', ' is used to preserve previous delimiter behavior. +DEFAULT_ACCEPT_ENCODING = ", ".join( + re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"]) +) + + +if sys.platform == "win32": + # provide a proxy_bypass version on Windows without DNS lookups + + def proxy_bypass_registry(host): + try: + import winreg + except ImportError: + return False + + try: + internetSettings = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Internet Settings", + ) + # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it + proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0]) + # ProxyOverride is almost always a string + proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0] + except (OSError, ValueError): + return False + if not proxyEnable or not proxyOverride: + return False + + # make a check value list from the registry entry: replace the + # '' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(";") + # now check if we match one of the registry values. + for test in proxyOverride: + if test == "": + if "." not in host: + return True + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + if re.match(test, host, re.I): + return True + return False + + def proxy_bypass(host): # noqa + """Return True, if the host should be bypassed. + + Checks proxy settings gathered from the environment, if specified, + or the registry. + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + + +def dict_to_sequence(d): + """Returns an internal sequence dictionary update.""" + + if hasattr(d, "items"): + d = d.items() + + return d + + +def super_len(o): + total_length = None + current_position = 0 + + if hasattr(o, "__len__"): + total_length = len(o) + + elif hasattr(o, "len"): + total_length = o.len + + elif hasattr(o, "fileno"): + try: + fileno = o.fileno() + except (io.UnsupportedOperation, AttributeError): + # AttributeError is a surprising exception, seeing as how we've just checked + # that `hasattr(o, 'fileno')`. It happens for objects obtained via + # `Tarfile.extractfile()`, per issue 5229. + pass + else: + total_length = os.fstat(fileno).st_size + + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if "b" not in o.mode: + warnings.warn( + ( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode." + ), + FileModeWarning, + ) + + if hasattr(o, "tell"): + try: + current_position = o.tell() + except OSError: + # This can happen in some weird situations, such as when the file + # is actually a special file descriptor like stdin. In this + # instance, we don't know what the length is, so set it to zero and + # let requests chunk it instead. + if total_length is not None: + current_position = total_length + else: + if hasattr(o, "seek") and total_length is None: + # StringIO and BytesIO have seek but no usable fileno + try: + # seek to end of file + o.seek(0, 2) + total_length = o.tell() + + # seek back to current position to support + # partially read file-like objects + o.seek(current_position or 0) + except OSError: + total_length = 0 + + if total_length is None: + total_length = 0 + + return max(0, total_length - current_position) + + +def get_netrc_auth(url, raise_errors=False): + """Returns the Requests tuple auth for a given url from netrc.""" + + netrc_file = os.environ.get("NETRC") + if netrc_file is not None: + netrc_locations = (netrc_file,) + else: + netrc_locations = (f"~/{f}" for f in NETRC_FILES) + + try: + from netrc import NetrcParseError, netrc + + netrc_path = None + + for f in netrc_locations: + try: + loc = os.path.expanduser(f) + except KeyError: + # os.path.expanduser can fail when $HOME is undefined and + # getpwuid fails. See https://bugs.python.org/issue20164 & + # https://github.com/psf/requests/issues/1846 + return + + if os.path.exists(loc): + netrc_path = loc + break + + # Abort early if there isn't one. + if netrc_path is None: + return + + ri = urlparse(url) + + # Strip port numbers from netloc. This weird `if...encode`` dance is + # used for Python 3.2, which doesn't support unicode literals. + splitstr = b":" + if isinstance(url, str): + splitstr = splitstr.decode("ascii") + host = ri.netloc.split(splitstr)[0] + + try: + _netrc = netrc(netrc_path).authenticators(host) + if _netrc: + # Return with login / password + login_i = 0 if _netrc[0] else 1 + return (_netrc[login_i], _netrc[2]) + except (NetrcParseError, OSError): + # If there was a parsing error or a permissions issue reading the file, + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise + + # App Engine hackiness. + except (ImportError, AttributeError): + pass + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, "name", None) + if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">": + return os.path.basename(name) + + +def extract_zipped_paths(path): + """Replace nonexistent paths that look like they refer to a member of a zip + archive with the location of an extracted copy of the target, or else + just return the provided path unchanged. + """ + if os.path.exists(path): + # this is already a valid path, no need to do anything further + return path + + # find the first valid part of the provided path and treat that as a zip archive + # assume the rest of the path is the name of a member in the archive + archive, member = os.path.split(path) + while archive and not os.path.exists(archive): + archive, prefix = os.path.split(archive) + if not prefix: + # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split), + # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users + break + member = "/".join([prefix, member]) + + if not zipfile.is_zipfile(archive): + return path + + zip_file = zipfile.ZipFile(archive) + if member not in zip_file.namelist(): + return path + + # we have a valid zip archive and a valid member of that archive + tmp = tempfile.gettempdir() + extracted_path = os.path.join(tmp, member.split("/")[-1]) + if not os.path.exists(extracted_path): + # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition + with atomic_open(extracted_path) as file_handler: + file_handler.write(zip_file.read(member)) + return extracted_path + + +@contextlib.contextmanager +def atomic_open(filename): + """Write a file to the disk in an atomic fashion""" + tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename)) + try: + with os.fdopen(tmp_descriptor, "wb") as tmp_handler: + yield tmp_handler + os.replace(tmp_name, filename) + except BaseException: + os.remove(tmp_name) + raise + + +def from_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. Unless it can not be represented as such, return an + OrderedDict, e.g., + + :: + + >>> from_key_val_list([('key', 'val')]) + OrderedDict([('key', 'val')]) + >>> from_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + >>> from_key_val_list({'key': 'val'}) + OrderedDict([('key', 'val')]) + + :rtype: OrderedDict + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + return OrderedDict(value) + + +def to_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. If it can be, return a list of tuples, e.g., + + :: + + >>> to_key_val_list([('key', 'val')]) + [('key', 'val')] + >>> to_key_val_list({'key': 'val'}) + [('key', 'val')] + >>> to_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + + :rtype: list + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + if isinstance(value, Mapping): + value = value.items() + + return list(value) + + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + :rtype: list + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + :rtype: dict + """ + result = {} + for item in _parse_list_header(value): + if "=" not in item: + result[item] = None + continue + name, value = item.split("=", 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + :rtype: str + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != "\\\\": + return value.replace("\\\\", "\\").replace('\\"', '"') + return value + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + :rtype: dict + """ + + cookie_dict = {} + + for cookie in cj: + cookie_dict[cookie.name] = cookie.value + + return cookie_dict + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + :rtype: CookieJar + """ + + return cookiejar_from_dict(cookie_dict, cj) + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + warnings.warn( + ( + "In requests 3.0, get_encodings_from_content will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + charset_re = re.compile(r']', flags=re.I) + pragma_re = re.compile(r']', flags=re.I) + xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') + + return ( + charset_re.findall(content) + + pragma_re.findall(content) + + xml_re.findall(content) + ) + + +def _parse_content_type_header(header): + """Returns content type and parameters from given header + + :param header: string + :return: tuple containing content type and dictionary of + parameters + """ + + tokens = header.split(";") + content_type, params = tokens[0].strip(), tokens[1:] + params_dict = {} + items_to_strip = "\"' " + + for param in params: + param = param.strip() + if param: + key, value = param, True + index_of_equals = param.find("=") + if index_of_equals != -1: + key = param[:index_of_equals].strip(items_to_strip) + value = param[index_of_equals + 1 :].strip(items_to_strip) + params_dict[key.lower()] = value + return content_type, params_dict + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + :rtype: str + """ + + content_type = headers.get("content-type") + + if not content_type: + return None + + content_type, params = _parse_content_type_header(content_type) + + if "charset" in params: + return params["charset"].strip("'\"") + + if "text" in content_type: + return "ISO-8859-1" + + if "application/json" in content_type: + # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset + return "utf-8" + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes an iterator.""" + + if r.encoding is None: + yield from iterator + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace") + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode(b"", final=True) + if rv: + yield rv + + +def iter_slices(string, slice_length): + """Iterate over slices of a string.""" + pos = 0 + if slice_length is None or slice_length <= 0: + slice_length = len(string) + while pos < len(string): + yield string[pos : pos + slice_length] + pos += slice_length + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + 2. fall back and replace all unicode characters + + :rtype: str + """ + warnings.warn( + ( + "In requests 3.0, get_unicode_from_response will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return str(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return str(r.content, encoding, errors="replace") + except TypeError: + return r.content + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~" +) + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + + :rtype: str + """ + parts = uri.split("%") + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + try: + c = chr(int(h, 16)) + except ValueError: + raise InvalidURL(f"Invalid percent-escape sequence: '{h}'") + + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = f"%{parts[i]}" + else: + parts[i] = f"%{parts[i]}" + return "".join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + + :rtype: str + """ + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) + + +def address_in_network(ip, net): + """This function allows you to check if an IP belongs to a network subnet + + Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 + returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 + + :rtype: bool + """ + ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0] + netaddr, bits = net.split("/") + netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0] + network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask + return (ipaddr & netmask) == (network & netmask) + + +def dotted_netmask(mask): + """Converts mask from /xx format to xxx.xxx.xxx.xxx + + Example: if mask is 24 function returns 255.255.255.0 + + :rtype: str + """ + bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1 + return socket.inet_ntoa(struct.pack(">I", bits)) + + +def is_ipv4_address(string_ip): + """ + :rtype: bool + """ + try: + socket.inet_aton(string_ip) + except OSError: + return False + return True + + +def is_valid_cidr(string_network): + """ + Very simple check of the cidr format in no_proxy variable. + + :rtype: bool + """ + if string_network.count("/") == 1: + try: + mask = int(string_network.split("/")[1]) + except ValueError: + return False + + if mask < 1 or mask > 32: + return False + + try: + socket.inet_aton(string_network.split("/")[0]) + except OSError: + return False + else: + return False + return True + + +@contextlib.contextmanager +def set_environ(env_name, value): + """Set the environment variable 'env_name' to 'value' + + Save previous value, yield, and then restore the previous value stored in + the environment variable 'env_name'. + + If 'value' is None, do nothing""" + value_changed = value is not None + if value_changed: + old_value = os.environ.get(env_name) + os.environ[env_name] = value + try: + yield + finally: + if value_changed: + if old_value is None: + del os.environ[env_name] + else: + os.environ[env_name] = old_value + + +def should_bypass_proxies(url, no_proxy): + """ + Returns whether we should bypass proxies or not. + + :rtype: bool + """ + # Prioritize lowercase environment variables over uppercase + # to keep a consistent behaviour with other http projects (curl, wget). + def get_proxy(key): + return os.environ.get(key) or os.environ.get(key.upper()) + + # First check whether no_proxy is defined. If it is, check that the URL + # we're getting isn't in the no_proxy list. + no_proxy_arg = no_proxy + if no_proxy is None: + no_proxy = get_proxy("no_proxy") + parsed = urlparse(url) + + if parsed.hostname is None: + # URLs don't always have hostnames, e.g. file:/// urls. + return True + + if no_proxy: + # We need to check whether we match here. We need to see if we match + # the end of the hostname, both with and without the port. + no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host) + + if is_ipv4_address(parsed.hostname): + for proxy_ip in no_proxy: + if is_valid_cidr(proxy_ip): + if address_in_network(parsed.hostname, proxy_ip): + return True + elif parsed.hostname == proxy_ip: + # If no_proxy ip was defined in plain IP notation instead of cidr notation & + # matches the IP of the index + return True + else: + host_with_port = parsed.hostname + if parsed.port: + host_with_port += f":{parsed.port}" + + for host in no_proxy: + if parsed.hostname.endswith(host) or host_with_port.endswith(host): + # The URL does match something in no_proxy, so we don't want + # to apply the proxies on this URL. + return True + + with set_environ("no_proxy", no_proxy_arg): + # parsed.hostname can be `None` in cases such as a file URI. + try: + bypass = proxy_bypass(parsed.hostname) + except (TypeError, socket.gaierror): + bypass = False + + if bypass: + return True + + return False + + +def get_environ_proxies(url, no_proxy=None): + """ + Return a dict of environment proxies. + + :rtype: dict + """ + if should_bypass_proxies(url, no_proxy=no_proxy): + return {} + else: + return getproxies() + + +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. + + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + if urlparts.hostname is None: + return proxies.get(urlparts.scheme, proxies.get("all")) + + proxy_keys = [ + urlparts.scheme + "://" + urlparts.hostname, + urlparts.scheme, + "all://" + urlparts.hostname, + "all", + ] + proxy = None + for proxy_key in proxy_keys: + if proxy_key in proxies: + proxy = proxies[proxy_key] + break + + return proxy + + +def resolve_proxies(request, proxies, trust_env=True): + """This method takes proxy information from a request and configuration + input to resolve a mapping of target proxies. This will consider settings + such a NO_PROXY to strip proxy configurations. + + :param request: Request or PreparedRequest + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + :param trust_env: Boolean declaring whether to trust environment configs + + :rtype: dict + """ + proxies = proxies if proxies is not None else {} + url = request.url + scheme = urlparse(url).scheme + no_proxy = proxies.get("no_proxy") + new_proxies = proxies.copy() + + if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy): + environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) + + proxy = environ_proxies.get(scheme, environ_proxies.get("all")) + + if proxy: + new_proxies.setdefault(scheme, proxy) + return new_proxies + + +def default_user_agent(name="python-requests"): + """ + Return a string representing the default user agent. + + :rtype: str + """ + return f"{name}/{__version__}" + + +def default_headers(): + """ + :rtype: requests.structures.CaseInsensitiveDict + """ + return CaseInsensitiveDict( + { + "User-Agent": default_user_agent(), + "Accept-Encoding": DEFAULT_ACCEPT_ENCODING, + "Accept": "*/*", + "Connection": "keep-alive", + } + ) + + +def parse_header_links(value): + """Return a list of parsed link headers proxies. + + i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" + + :rtype: list + """ + + links = [] + + replace_chars = " '\"" + + value = value.strip(replace_chars) + if not value: + return links + + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + + link = {"url": url.strip("<> '\"")} + + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + + link[key.strip(replace_chars)] = value.strip(replace_chars) + + links.append(link) + + return links + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = "\x00".encode("ascii") # encoding to ASCII for Python 3 +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data): + """ + :rtype: str + """ + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return "utf-32" # BOM included + if sample[:3] == codecs.BOM_UTF8: + return "utf-8-sig" # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return "utf-16" # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return "utf-8" + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return "utf-16-be" + if sample[1::2] == _null2: # 2nd and 4th are null + return "utf-16-le" + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return "utf-32-be" + if sample[1:] == _null3: + return "utf-32-le" + # Did not detect a valid UTF-32 ascii-range character + return None + + +def prepend_scheme_if_needed(url, new_scheme): + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument. + + :rtype: str + """ + parsed = parse_url(url) + scheme, auth, host, port, path, query, fragment = parsed + + # A defect in urlparse determines that there isn't a netloc present in some + # urls. We previously assumed parsing was overly cautious, and swapped the + # netloc and path. Due to a lack of tests on the original defect, this is + # maintained with parse_url for backwards compatibility. + netloc = parsed.netloc + if not netloc: + netloc, path = path, netloc + + if auth: + # parse_url doesn't provide the netloc with auth + # so we'll add it ourselves. + netloc = "@".join([auth, netloc]) + if scheme is None: + scheme = new_scheme + if path is None: + path = "" + + return urlunparse((scheme, netloc, path, "", query, fragment)) + + +def get_auth_from_url(url): + """Given a url with authentication components, extract them into a tuple of + username,password. + + :rtype: (str,str) + """ + parsed = urlparse(url) + + try: + auth = (unquote(parsed.username), unquote(parsed.password)) + except (AttributeError, TypeError): + auth = ("", "") + + return auth + + +def check_header_validity(header): + """Verifies that header parts don't contain leading whitespace + reserved characters, or return characters. + + :param header: tuple, in the format (name, value). + """ + name, value = header + _validate_header_part(header, name, 0) + _validate_header_part(header, value, 1) + + +def _validate_header_part(header, header_part, header_validator_index): + if isinstance(header_part, str): + validator = _HEADER_VALIDATORS_STR[header_validator_index] + elif isinstance(header_part, bytes): + validator = _HEADER_VALIDATORS_BYTE[header_validator_index] + else: + raise InvalidHeader( + f"Header part ({header_part!r}) from {header} " + f"must be of type str or bytes, not {type(header_part)}" + ) + + if not validator.match(header_part): + header_kind = "name" if header_validator_index == 0 else "value" + raise InvalidHeader( + f"Invalid leading whitespace, reserved character(s), or return" + f"character(s) in header {header_kind}: {header_part!r}" + ) + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit("@", 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, "")) + + +def rewind_body(prepared_request): + """Move file pointer back to its recorded starting position + so it can be read again on redirect. + """ + body_seek = getattr(prepared_request.body, "seek", None) + if body_seek is not None and isinstance( + prepared_request._body_position, integer_types + ): + try: + body_seek(prepared_request._body_position) + except OSError: + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect." + ) + else: + raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__init__.py new file mode 100644 index 0000000..d92acc7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__init__.py @@ -0,0 +1,26 @@ +__all__ = [ + "__version__", + "AbstractProvider", + "AbstractResolver", + "BaseReporter", + "InconsistentCandidate", + "Resolver", + "RequirementsConflicted", + "ResolutionError", + "ResolutionImpossible", + "ResolutionTooDeep", +] + +__version__ = "1.0.1" + + +from .providers import AbstractProvider, AbstractResolver +from .reporters import BaseReporter +from .resolvers import ( + InconsistentCandidate, + RequirementsConflicted, + ResolutionError, + ResolutionImpossible, + ResolutionTooDeep, + Resolver, +) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a478c97 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc new file mode 100644 index 0000000..672010e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc new file mode 100644 index 0000000..7a4f166 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-312.pyc new file mode 100644 index 0000000..c5ded39 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc new file mode 100644 index 0000000..6957b91 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d8a43ab Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-312.pyc new file mode 100644 index 0000000..8b86ddc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py new file mode 100644 index 0000000..1becc50 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py @@ -0,0 +1,6 @@ +__all__ = ["Mapping", "Sequence"] + +try: + from collections.abc import Mapping, Sequence +except ImportError: + from collections import Mapping, Sequence diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/providers.py b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/providers.py new file mode 100644 index 0000000..e99d87e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/providers.py @@ -0,0 +1,133 @@ +class AbstractProvider(object): + """Delegate class to provide the required interface for the resolver.""" + + def identify(self, requirement_or_candidate): + """Given a requirement, return an identifier for it. + + This is used to identify a requirement, e.g. whether two requirements + should have their specifier parts merged. + """ + raise NotImplementedError + + def get_preference( + self, + identifier, + resolutions, + candidates, + information, + backtrack_causes, + ): + """Produce a sort key for given requirement based on preference. + + The preference is defined as "I think this requirement should be + resolved first". The lower the return value is, the more preferred + this group of arguments is. + + :param identifier: An identifier as returned by ``identify()``. This + identifies the dependency matches which should be returned. + :param resolutions: Mapping of candidates currently pinned by the + resolver. Each key is an identifier, and the value is a candidate. + The candidate may conflict with requirements from ``information``. + :param candidates: Mapping of each dependency's possible candidates. + Each value is an iterator of candidates. + :param information: Mapping of requirement information of each package. + Each value is an iterator of *requirement information*. + :param backtrack_causes: Sequence of requirement information that were + the requirements that caused the resolver to most recently backtrack. + + A *requirement information* instance is a named tuple with two members: + + * ``requirement`` specifies a requirement contributing to the current + list of candidates. + * ``parent`` specifies the candidate that provides (depended on) the + requirement, or ``None`` to indicate a root requirement. + + The preference could depend on various issues, including (not + necessarily in this order): + + * Is this package pinned in the current resolution result? + * How relaxed is the requirement? Stricter ones should probably be + worked on first? (I don't know, actually.) + * How many possibilities are there to satisfy this requirement? Those + with few left should likely be worked on first, I guess? + * Are there any known conflicts for this requirement? We should + probably work on those with the most known conflicts. + + A sortable value should be returned (this will be used as the ``key`` + parameter of the built-in sorting function). The smaller the value is, + the more preferred this requirement is (i.e. the sorting function + is called with ``reverse=False``). + """ + raise NotImplementedError + + def find_matches(self, identifier, requirements, incompatibilities): + """Find all possible candidates that satisfy the given constraints. + + :param identifier: An identifier as returned by ``identify()``. This + identifies the dependency matches of which should be returned. + :param requirements: A mapping of requirements that all returned + candidates must satisfy. Each key is an identifier, and the value + an iterator of requirements for that dependency. + :param incompatibilities: A mapping of known incompatibilities of + each dependency. Each key is an identifier, and the value an + iterator of incompatibilities known to the resolver. All + incompatibilities *must* be excluded from the return value. + + This should try to get candidates based on the requirements' types. + For VCS, local, and archive requirements, the one-and-only match is + returned, and for a "named" requirement, the index(es) should be + consulted to find concrete candidates for this requirement. + + The return value should produce candidates ordered by preference; the + most preferred candidate should come first. The return type may be one + of the following: + + * A callable that returns an iterator that yields candidates. + * An collection of candidates. + * An iterable of candidates. This will be consumed immediately into a + list of candidates. + """ + raise NotImplementedError + + def is_satisfied_by(self, requirement, candidate): + """Whether the given requirement can be satisfied by a candidate. + + The candidate is guaranteed to have been generated from the + requirement. + + A boolean should be returned to indicate whether ``candidate`` is a + viable solution to the requirement. + """ + raise NotImplementedError + + def get_dependencies(self, candidate): + """Get dependencies of a candidate. + + This should return a collection of requirements that `candidate` + specifies as its dependencies. + """ + raise NotImplementedError + + +class AbstractResolver(object): + """The thing that performs the actual resolution work.""" + + base_exception = Exception + + def __init__(self, provider, reporter): + self.provider = provider + self.reporter = reporter + + def resolve(self, requirements, **kwargs): + """Take a collection of constraints, spit out the resolution result. + + This returns a representation of the final resolution state, with one + guarenteed attribute ``mapping`` that contains resolved candidates as + values. The keys are their respective identifiers. + + :param requirements: A collection of constraints. + :param kwargs: Additional keyword arguments that subclasses may accept. + + :raises: ``self.base_exception`` or its subclass. + """ + raise NotImplementedError diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/reporters.py b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/reporters.py new file mode 100644 index 0000000..688b5e1 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/reporters.py @@ -0,0 +1,43 @@ +class BaseReporter(object): + """Delegate class to provider progress reporting for the resolver.""" + + def starting(self): + """Called before the resolution actually starts.""" + + def starting_round(self, index): + """Called before each round of resolution starts. + + The index is zero-based. + """ + + def ending_round(self, index, state): + """Called before each round of resolution ends. + + This is NOT called if the resolution ends at this round. Use `ending` + if you want to report finalization. The index is zero-based. + """ + + def ending(self, state): + """Called before the resolution ends successfully.""" + + def adding_requirement(self, requirement, parent): + """Called when adding a new requirement into the resolve criteria. + + :param requirement: The additional requirement to be applied to filter + the available candidaites. + :param parent: The candidate that requires ``requirement`` as a + dependency, or None if ``requirement`` is one of the root + requirements passed in from ``Resolver.resolve()``. + """ + + def resolving_conflicts(self, causes): + """Called when starting to attempt requirement conflict resolution. + + :param causes: The information on the collision that caused the backtracking. + """ + + def rejecting_candidate(self, criterion, candidate): + """Called when rejecting a candidate during backtracking.""" + + def pinning(self, candidate): + """Called when adding a candidate to the potential solution.""" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers.py b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers.py new file mode 100644 index 0000000..2c3d0e3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers.py @@ -0,0 +1,547 @@ +import collections +import itertools +import operator + +from .providers import AbstractResolver +from .structs import DirectedGraph, IteratorMapping, build_iter_view + +RequirementInformation = collections.namedtuple( + "RequirementInformation", ["requirement", "parent"] +) + + +class ResolverException(Exception): + """A base class for all exceptions raised by this module. + + Exceptions derived by this class should all be handled in this module. Any + bubbling pass the resolver should be treated as a bug. + """ + + +class RequirementsConflicted(ResolverException): + def __init__(self, criterion): + super(RequirementsConflicted, self).__init__(criterion) + self.criterion = criterion + + def __str__(self): + return "Requirements conflict: {}".format( + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class InconsistentCandidate(ResolverException): + def __init__(self, candidate, criterion): + super(InconsistentCandidate, self).__init__(candidate, criterion) + self.candidate = candidate + self.criterion = criterion + + def __str__(self): + return "Provided candidate {!r} does not satisfy {}".format( + self.candidate, + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class Criterion(object): + """Representation of possible resolution results of a package. + + This holds three attributes: + + * `information` is a collection of `RequirementInformation` pairs. + Each pair is a requirement contributing to this criterion, and the + candidate that provides the requirement. + * `incompatibilities` is a collection of all known not-to-work candidates + to exclude from consideration. + * `candidates` is a collection containing all possible candidates deducted + from the union of contributing requirements and known incompatibilities. + It should never be empty, except when the criterion is an attribute of a + raised `RequirementsConflicted` (in which case it is always empty). + + .. note:: + This class is intended to be externally immutable. **Do not** mutate + any of its attribute containers. + """ + + def __init__(self, candidates, information, incompatibilities): + self.candidates = candidates + self.information = information + self.incompatibilities = incompatibilities + + def __repr__(self): + requirements = ", ".join( + "({!r}, via={!r})".format(req, parent) + for req, parent in self.information + ) + return "Criterion({})".format(requirements) + + def iter_requirement(self): + return (i.requirement for i in self.information) + + def iter_parent(self): + return (i.parent for i in self.information) + + +class ResolutionError(ResolverException): + pass + + +class ResolutionImpossible(ResolutionError): + def __init__(self, causes): + super(ResolutionImpossible, self).__init__(causes) + # causes is a list of RequirementInformation objects + self.causes = causes + + +class ResolutionTooDeep(ResolutionError): + def __init__(self, round_count): + super(ResolutionTooDeep, self).__init__(round_count) + self.round_count = round_count + + +# Resolution state in a round. +State = collections.namedtuple("State", "mapping criteria backtrack_causes") + + +class Resolution(object): + """Stateful resolution object. + + This is designed as a one-off object that holds information to kick start + the resolution process, and holds the results afterwards. + """ + + def __init__(self, provider, reporter): + self._p = provider + self._r = reporter + self._states = [] + + @property + def state(self): + try: + return self._states[-1] + except IndexError: + raise AttributeError("state") + + def _push_new_state(self): + """Push a new state into history. + + This new state will be used to hold resolution results of the next + coming round. + """ + base = self._states[-1] + state = State( + mapping=base.mapping.copy(), + criteria=base.criteria.copy(), + backtrack_causes=base.backtrack_causes[:], + ) + self._states.append(state) + + def _add_to_criteria(self, criteria, requirement, parent): + self._r.adding_requirement(requirement=requirement, parent=parent) + + identifier = self._p.identify(requirement_or_candidate=requirement) + criterion = criteria.get(identifier) + if criterion: + incompatibilities = list(criterion.incompatibilities) + else: + incompatibilities = [] + + matches = self._p.find_matches( + identifier=identifier, + requirements=IteratorMapping( + criteria, + operator.methodcaller("iter_requirement"), + {identifier: [requirement]}, + ), + incompatibilities=IteratorMapping( + criteria, + operator.attrgetter("incompatibilities"), + {identifier: incompatibilities}, + ), + ) + + if criterion: + information = list(criterion.information) + information.append(RequirementInformation(requirement, parent)) + else: + information = [RequirementInformation(requirement, parent)] + + criterion = Criterion( + candidates=build_iter_view(matches), + information=information, + incompatibilities=incompatibilities, + ) + if not criterion.candidates: + raise RequirementsConflicted(criterion) + criteria[identifier] = criterion + + def _remove_information_from_criteria(self, criteria, parents): + """Remove information from parents of criteria. + + Concretely, removes all values from each criterion's ``information`` + field that have one of ``parents`` as provider of the requirement. + + :param criteria: The criteria to update. + :param parents: Identifiers for which to remove information from all criteria. + """ + if not parents: + return + for key, criterion in criteria.items(): + criteria[key] = Criterion( + criterion.candidates, + [ + information + for information in criterion.information + if ( + information.parent is None + or self._p.identify(information.parent) not in parents + ) + ], + criterion.incompatibilities, + ) + + def _get_preference(self, name): + return self._p.get_preference( + identifier=name, + resolutions=self.state.mapping, + candidates=IteratorMapping( + self.state.criteria, + operator.attrgetter("candidates"), + ), + information=IteratorMapping( + self.state.criteria, + operator.attrgetter("information"), + ), + backtrack_causes=self.state.backtrack_causes, + ) + + def _is_current_pin_satisfying(self, name, criterion): + try: + current_pin = self.state.mapping[name] + except KeyError: + return False + return all( + self._p.is_satisfied_by(requirement=r, candidate=current_pin) + for r in criterion.iter_requirement() + ) + + def _get_updated_criteria(self, candidate): + criteria = self.state.criteria.copy() + for requirement in self._p.get_dependencies(candidate=candidate): + self._add_to_criteria(criteria, requirement, parent=candidate) + return criteria + + def _attempt_to_pin_criterion(self, name): + criterion = self.state.criteria[name] + + causes = [] + for candidate in criterion.candidates: + try: + criteria = self._get_updated_criteria(candidate) + except RequirementsConflicted as e: + self._r.rejecting_candidate(e.criterion, candidate) + causes.append(e.criterion) + continue + + # Check the newly-pinned candidate actually works. This should + # always pass under normal circumstances, but in the case of a + # faulty provider, we will raise an error to notify the implementer + # to fix find_matches() and/or is_satisfied_by(). + satisfied = all( + self._p.is_satisfied_by(requirement=r, candidate=candidate) + for r in criterion.iter_requirement() + ) + if not satisfied: + raise InconsistentCandidate(candidate, criterion) + + self._r.pinning(candidate=candidate) + self.state.criteria.update(criteria) + + # Put newly-pinned candidate at the end. This is essential because + # backtracking looks at this mapping to get the last pin. + self.state.mapping.pop(name, None) + self.state.mapping[name] = candidate + + return [] + + # All candidates tried, nothing works. This criterion is a dead + # end, signal for backtracking. + return causes + + def _backjump(self, causes): + """Perform backjumping. + + When we enter here, the stack is like this:: + + [ state Z ] + [ state Y ] + [ state X ] + .... earlier states are irrelevant. + + 1. No pins worked for Z, so it does not have a pin. + 2. We want to reset state Y to unpinned, and pin another candidate. + 3. State X holds what state Y was before the pin, but does not + have the incompatibility information gathered in state Y. + + Each iteration of the loop will: + + 1. Identify Z. The incompatibility is not always caused by the latest + state. For example, given three requirements A, B and C, with + dependencies A1, B1 and C1, where A1 and B1 are incompatible: the + last state might be related to C, so we want to discard the + previous state. + 2. Discard Z. + 3. Discard Y but remember its incompatibility information gathered + previously, and the failure we're dealing with right now. + 4. Push a new state Y' based on X, and apply the incompatibility + information from Y to Y'. + 5a. If this causes Y' to conflict, we need to backtrack again. Make Y' + the new Z and go back to step 2. + 5b. If the incompatibilities apply cleanly, end backtracking. + """ + incompatible_reqs = itertools.chain( + (c.parent for c in causes if c.parent is not None), + (c.requirement for c in causes), + ) + incompatible_deps = {self._p.identify(r) for r in incompatible_reqs} + while len(self._states) >= 3: + # Remove the state that triggered backtracking. + del self._states[-1] + + # Ensure to backtrack to a state that caused the incompatibility + incompatible_state = False + while not incompatible_state: + # Retrieve the last candidate pin and known incompatibilities. + try: + broken_state = self._states.pop() + name, candidate = broken_state.mapping.popitem() + except (IndexError, KeyError): + raise ResolutionImpossible(causes) + current_dependencies = { + self._p.identify(d) + for d in self._p.get_dependencies(candidate) + } + incompatible_state = not current_dependencies.isdisjoint( + incompatible_deps + ) + + incompatibilities_from_broken = [ + (k, list(v.incompatibilities)) + for k, v in broken_state.criteria.items() + ] + + # Also mark the newly known incompatibility. + incompatibilities_from_broken.append((name, [candidate])) + + # Create a new state from the last known-to-work one, and apply + # the previously gathered incompatibility information. + def _patch_criteria(): + for k, incompatibilities in incompatibilities_from_broken: + if not incompatibilities: + continue + try: + criterion = self.state.criteria[k] + except KeyError: + continue + matches = self._p.find_matches( + identifier=k, + requirements=IteratorMapping( + self.state.criteria, + operator.methodcaller("iter_requirement"), + ), + incompatibilities=IteratorMapping( + self.state.criteria, + operator.attrgetter("incompatibilities"), + {k: incompatibilities}, + ), + ) + candidates = build_iter_view(matches) + if not candidates: + return False + incompatibilities.extend(criterion.incompatibilities) + self.state.criteria[k] = Criterion( + candidates=candidates, + information=list(criterion.information), + incompatibilities=incompatibilities, + ) + return True + + self._push_new_state() + success = _patch_criteria() + + # It works! Let's work on this new state. + if success: + return True + + # State does not work after applying known incompatibilities. + # Try the still previous state. + + # No way to backtrack anymore. + return False + + def resolve(self, requirements, max_rounds): + if self._states: + raise RuntimeError("already resolved") + + self._r.starting() + + # Initialize the root state. + self._states = [ + State( + mapping=collections.OrderedDict(), + criteria={}, + backtrack_causes=[], + ) + ] + for r in requirements: + try: + self._add_to_criteria(self.state.criteria, r, parent=None) + except RequirementsConflicted as e: + raise ResolutionImpossible(e.criterion.information) + + # The root state is saved as a sentinel so the first ever pin can have + # something to backtrack to if it fails. The root state is basically + # pinning the virtual "root" package in the graph. + self._push_new_state() + + for round_index in range(max_rounds): + self._r.starting_round(index=round_index) + + unsatisfied_names = [ + key + for key, criterion in self.state.criteria.items() + if not self._is_current_pin_satisfying(key, criterion) + ] + + # All criteria are accounted for. Nothing more to pin, we are done! + if not unsatisfied_names: + self._r.ending(state=self.state) + return self.state + + # keep track of satisfied names to calculate diff after pinning + satisfied_names = set(self.state.criteria.keys()) - set( + unsatisfied_names + ) + + # Choose the most preferred unpinned criterion to try. + name = min(unsatisfied_names, key=self._get_preference) + failure_causes = self._attempt_to_pin_criterion(name) + + if failure_causes: + causes = [i for c in failure_causes for i in c.information] + # Backjump if pinning fails. The backjump process puts us in + # an unpinned state, so we can work on it in the next round. + self._r.resolving_conflicts(causes=causes) + success = self._backjump(causes) + self.state.backtrack_causes[:] = causes + + # Dead ends everywhere. Give up. + if not success: + raise ResolutionImpossible(self.state.backtrack_causes) + else: + # discard as information sources any invalidated names + # (unsatisfied names that were previously satisfied) + newly_unsatisfied_names = { + key + for key, criterion in self.state.criteria.items() + if key in satisfied_names + and not self._is_current_pin_satisfying(key, criterion) + } + self._remove_information_from_criteria( + self.state.criteria, newly_unsatisfied_names + ) + # Pinning was successful. Push a new state to do another pin. + self._push_new_state() + + self._r.ending_round(index=round_index, state=self.state) + + raise ResolutionTooDeep(max_rounds) + + +def _has_route_to_root(criteria, key, all_keys, connected): + if key in connected: + return True + if key not in criteria: + return False + for p in criteria[key].iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey in connected: + connected.add(key) + return True + if _has_route_to_root(criteria, pkey, all_keys, connected): + connected.add(key) + return True + return False + + +Result = collections.namedtuple("Result", "mapping graph criteria") + + +def _build_result(state): + mapping = state.mapping + all_keys = {id(v): k for k, v in mapping.items()} + all_keys[id(None)] = None + + graph = DirectedGraph() + graph.add(None) # Sentinel as root dependencies' parent. + + connected = {None} + for key, criterion in state.criteria.items(): + if not _has_route_to_root(state.criteria, key, all_keys, connected): + continue + if key not in graph: + graph.add(key) + for p in criterion.iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey not in graph: + graph.add(pkey) + graph.connect(pkey, key) + + return Result( + mapping={k: v for k, v in mapping.items() if k in connected}, + graph=graph, + criteria=state.criteria, + ) + + +class Resolver(AbstractResolver): + """The thing that performs the actual resolution work.""" + + base_exception = ResolverException + + def resolve(self, requirements, max_rounds=100): + """Take a collection of constraints, spit out the resolution result. + + The return value is a representation to the final resolution result. It + is a tuple subclass with three public members: + + * `mapping`: A dict of resolved candidates. Each key is an identifier + of a requirement (as returned by the provider's `identify` method), + and the value is the resolved candidate. + * `graph`: A `DirectedGraph` instance representing the dependency tree. + The vertices are keys of `mapping`, and each edge represents *why* + a particular package is included. A special vertex `None` is + included to represent parents of user-supplied requirements. + * `criteria`: A dict of "criteria" that hold detailed information on + how edges in the graph are derived. Each key is an identifier of a + requirement, and the value is a `Criterion` instance. + + The following exceptions may be raised if a resolution cannot be found: + + * `ResolutionImpossible`: A resolution cannot be found for the given + combination of requirements. The `causes` attribute of the + exception is a list of (requirement, parent), giving the + requirements that could not be satisfied. + * `ResolutionTooDeep`: The dependency tree is too deeply nested and + the resolver gave up. This is usually caused by a circular + dependency, but you can try to resolve this by increasing the + `max_rounds` argument. + """ + resolution = Resolution(self.provider, self.reporter) + state = resolution.resolve(requirements, max_rounds=max_rounds) + return _build_result(state) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/structs.py b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/structs.py new file mode 100644 index 0000000..359a34f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/resolvelib/structs.py @@ -0,0 +1,170 @@ +import itertools + +from .compat import collections_abc + + +class DirectedGraph(object): + """A graph structure with directed edges.""" + + def __init__(self): + self._vertices = set() + self._forwards = {} # -> Set[] + self._backwards = {} # -> Set[] + + def __iter__(self): + return iter(self._vertices) + + def __len__(self): + return len(self._vertices) + + def __contains__(self, key): + return key in self._vertices + + def copy(self): + """Return a shallow copy of this graph.""" + other = DirectedGraph() + other._vertices = set(self._vertices) + other._forwards = {k: set(v) for k, v in self._forwards.items()} + other._backwards = {k: set(v) for k, v in self._backwards.items()} + return other + + def add(self, key): + """Add a new vertex to the graph.""" + if key in self._vertices: + raise ValueError("vertex exists") + self._vertices.add(key) + self._forwards[key] = set() + self._backwards[key] = set() + + def remove(self, key): + """Remove a vertex from the graph, disconnecting all edges from/to it.""" + self._vertices.remove(key) + for f in self._forwards.pop(key): + self._backwards[f].remove(key) + for t in self._backwards.pop(key): + self._forwards[t].remove(key) + + def connected(self, f, t): + return f in self._backwards[t] and t in self._forwards[f] + + def connect(self, f, t): + """Connect two existing vertices. + + Nothing happens if the vertices are already connected. + """ + if t not in self._vertices: + raise KeyError(t) + self._forwards[f].add(t) + self._backwards[t].add(f) + + def iter_edges(self): + for f, children in self._forwards.items(): + for t in children: + yield f, t + + def iter_children(self, key): + return iter(self._forwards[key]) + + def iter_parents(self, key): + return iter(self._backwards[key]) + + +class IteratorMapping(collections_abc.Mapping): + def __init__(self, mapping, accessor, appends=None): + self._mapping = mapping + self._accessor = accessor + self._appends = appends or {} + + def __repr__(self): + return "IteratorMapping({!r}, {!r}, {!r})".format( + self._mapping, + self._accessor, + self._appends, + ) + + def __bool__(self): + return bool(self._mapping or self._appends) + + __nonzero__ = __bool__ # XXX: Python 2. + + def __contains__(self, key): + return key in self._mapping or key in self._appends + + def __getitem__(self, k): + try: + v = self._mapping[k] + except KeyError: + return iter(self._appends[k]) + return itertools.chain(self._accessor(v), self._appends.get(k, ())) + + def __iter__(self): + more = (k for k in self._appends if k not in self._mapping) + return itertools.chain(self._mapping, more) + + def __len__(self): + more = sum(1 for k in self._appends if k not in self._mapping) + return len(self._mapping) + more + + +class _FactoryIterableView(object): + """Wrap an iterator factory returned by `find_matches()`. + + Calling `iter()` on this class would invoke the underlying iterator + factory, making it a "collection with ordering" that can be iterated + through multiple times, but lacks random access methods presented in + built-in Python sequence types. + """ + + def __init__(self, factory): + self._factory = factory + self._iterable = None + + def __repr__(self): + return "{}({})".format(type(self).__name__, list(self)) + + def __bool__(self): + try: + next(iter(self)) + except StopIteration: + return False + return True + + __nonzero__ = __bool__ # XXX: Python 2. + + def __iter__(self): + iterable = ( + self._factory() if self._iterable is None else self._iterable + ) + self._iterable, current = itertools.tee(iterable) + return current + + +class _SequenceIterableView(object): + """Wrap an iterable returned by find_matches(). + + This is essentially just a proxy to the underlying sequence that provides + the same interface as `_FactoryIterableView`. + """ + + def __init__(self, sequence): + self._sequence = sequence + + def __repr__(self): + return "{}({})".format(type(self).__name__, self._sequence) + + def __bool__(self): + return bool(self._sequence) + + __nonzero__ = __bool__ # XXX: Python 2. + + def __iter__(self): + return iter(self._sequence) + + +def build_iter_view(matches): + """Build an iterable view from the value returned by `find_matches()`.""" + if callable(matches): + return _FactoryIterableView(matches) + if not isinstance(matches, collections_abc.Sequence): + matches = list(matches) + return _SequenceIterableView(matches) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__init__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__init__.py new file mode 100644 index 0000000..73f58d7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__init__.py @@ -0,0 +1,177 @@ +"""Rich text and beautiful formatting in the terminal.""" + +import os +from typing import IO, TYPE_CHECKING, Any, Callable, Optional, Union + +from ._extension import load_ipython_extension # noqa: F401 + +__all__ = ["get_console", "reconfigure", "print", "inspect", "print_json"] + +if TYPE_CHECKING: + from .console import Console + +# Global console used by alternative print +_console: Optional["Console"] = None + +try: + _IMPORT_CWD = os.path.abspath(os.getcwd()) +except FileNotFoundError: + # Can happen if the cwd has been deleted + _IMPORT_CWD = "" + + +def get_console() -> "Console": + """Get a global :class:`~rich.console.Console` instance. This function is used when Rich requires a Console, + and hasn't been explicitly given one. + + Returns: + Console: A console instance. + """ + global _console + if _console is None: + from .console import Console + + _console = Console() + + return _console + + +def reconfigure(*args: Any, **kwargs: Any) -> None: + """Reconfigures the global console by replacing it with another. + + Args: + *args (Any): Positional arguments for the replacement :class:`~rich.console.Console`. + **kwargs (Any): Keyword arguments for the replacement :class:`~rich.console.Console`. + """ + from pip._vendor.rich.console import Console + + new_console = Console(*args, **kwargs) + _console = get_console() + _console.__dict__ = new_console.__dict__ + + +def print( + *objects: Any, + sep: str = " ", + end: str = "\n", + file: Optional[IO[str]] = None, + flush: bool = False, +) -> None: + r"""Print object(s) supplied via positional arguments. + This function has an identical signature to the built-in print. + For more advanced features, see the :class:`~rich.console.Console` class. + + Args: + sep (str, optional): Separator between printed objects. Defaults to " ". + end (str, optional): Character to write at end of output. Defaults to "\\n". + file (IO[str], optional): File to write to, or None for stdout. Defaults to None. + flush (bool, optional): Has no effect as Rich always flushes output. Defaults to False. + + """ + from .console import Console + + write_console = get_console() if file is None else Console(file=file) + return write_console.print(*objects, sep=sep, end=end) + + +def print_json( + json: Optional[str] = None, + *, + data: Any = None, + indent: Union[None, int, str] = 2, + highlight: bool = True, + skip_keys: bool = False, + ensure_ascii: bool = False, + check_circular: bool = True, + allow_nan: bool = True, + default: Optional[Callable[[Any], Any]] = None, + sort_keys: bool = False, +) -> None: + """Pretty prints JSON. Output will be valid JSON. + + Args: + json (str): A string containing JSON. + data (Any): If json is not supplied, then encode this data. + indent (int, optional): Number of spaces to indent. Defaults to 2. + highlight (bool, optional): Enable highlighting of output: Defaults to True. + skip_keys (bool, optional): Skip keys not of a basic type. Defaults to False. + ensure_ascii (bool, optional): Escape all non-ascii characters. Defaults to False. + check_circular (bool, optional): Check for circular references. Defaults to True. + allow_nan (bool, optional): Allow NaN and Infinity values. Defaults to True. + default (Callable, optional): A callable that converts values that can not be encoded + in to something that can be JSON encoded. Defaults to None. + sort_keys (bool, optional): Sort dictionary keys. Defaults to False. + """ + + get_console().print_json( + json, + data=data, + indent=indent, + highlight=highlight, + skip_keys=skip_keys, + ensure_ascii=ensure_ascii, + check_circular=check_circular, + allow_nan=allow_nan, + default=default, + sort_keys=sort_keys, + ) + + +def inspect( + obj: Any, + *, + console: Optional["Console"] = None, + title: Optional[str] = None, + help: bool = False, + methods: bool = False, + docs: bool = True, + private: bool = False, + dunder: bool = False, + sort: bool = True, + all: bool = False, + value: bool = True, +) -> None: + """Inspect any Python object. + + * inspect() to see summarized info. + * inspect(, methods=True) to see methods. + * inspect(, help=True) to see full (non-abbreviated) help. + * inspect(, private=True) to see private attributes (single underscore). + * inspect(, dunder=True) to see attributes beginning with double underscore. + * inspect(, all=True) to see all attributes. + + Args: + obj (Any): An object to inspect. + title (str, optional): Title to display over inspect result, or None use type. Defaults to None. + help (bool, optional): Show full help text rather than just first paragraph. Defaults to False. + methods (bool, optional): Enable inspection of callables. Defaults to False. + docs (bool, optional): Also render doc strings. Defaults to True. + private (bool, optional): Show private attributes (beginning with underscore). Defaults to False. + dunder (bool, optional): Show attributes starting with double underscore. Defaults to False. + sort (bool, optional): Sort attributes alphabetically. Defaults to True. + all (bool, optional): Show all attributes. Defaults to False. + value (bool, optional): Pretty print value. Defaults to True. + """ + _console = console or get_console() + from pip._vendor.rich._inspect import Inspect + + # Special case for inspect(inspect) + is_inspect = obj is inspect + + _inspect = Inspect( + obj, + title=title, + help=is_inspect or help, + methods=is_inspect or methods, + docs=is_inspect or docs, + private=private, + dunder=dunder, + sort=sort, + all=all, + value=value, + ) + _console.print(_inspect) + + +if __name__ == "__main__": # pragma: no cover + print("Hello, **World**") diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__main__.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__main__.py new file mode 100644 index 0000000..270629f --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__main__.py @@ -0,0 +1,274 @@ +import colorsys +import io +from time import process_time + +from pip._vendor.rich import box +from pip._vendor.rich.color import Color +from pip._vendor.rich.console import Console, ConsoleOptions, Group, RenderableType, RenderResult +from pip._vendor.rich.markdown import Markdown +from pip._vendor.rich.measure import Measurement +from pip._vendor.rich.pretty import Pretty +from pip._vendor.rich.segment import Segment +from pip._vendor.rich.style import Style +from pip._vendor.rich.syntax import Syntax +from pip._vendor.rich.table import Table +from pip._vendor.rich.text import Text + + +class ColorBox: + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + for y in range(0, 5): + for x in range(options.max_width): + h = x / options.max_width + l = 0.1 + ((y / 5) * 0.7) + r1, g1, b1 = colorsys.hls_to_rgb(h, l, 1.0) + r2, g2, b2 = colorsys.hls_to_rgb(h, l + 0.7 / 10, 1.0) + bgcolor = Color.from_rgb(r1 * 255, g1 * 255, b1 * 255) + color = Color.from_rgb(r2 * 255, g2 * 255, b2 * 255) + yield Segment("▄", Style(color=color, bgcolor=bgcolor)) + yield Segment.line() + + def __rich_measure__( + self, console: "Console", options: ConsoleOptions + ) -> Measurement: + return Measurement(1, options.max_width) + + +def make_test_card() -> Table: + """Get a renderable that demonstrates a number of features.""" + table = Table.grid(padding=1, pad_edge=True) + table.title = "Rich features" + table.add_column("Feature", no_wrap=True, justify="center", style="bold red") + table.add_column("Demonstration") + + color_table = Table( + box=None, + expand=False, + show_header=False, + show_edge=False, + pad_edge=False, + ) + color_table.add_row( + ( + "✓ [bold green]4-bit color[/]\n" + "✓ [bold blue]8-bit color[/]\n" + "✓ [bold magenta]Truecolor (16.7 million)[/]\n" + "✓ [bold yellow]Dumb terminals[/]\n" + "✓ [bold cyan]Automatic color conversion" + ), + ColorBox(), + ) + + table.add_row("Colors", color_table) + + table.add_row( + "Styles", + "All ansi styles: [bold]bold[/], [dim]dim[/], [italic]italic[/italic], [underline]underline[/], [strike]strikethrough[/], [reverse]reverse[/], and even [blink]blink[/].", + ) + + lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque in metus sed sapien ultricies pretium a at justo. Maecenas luctus velit et auctor maximus." + lorem_table = Table.grid(padding=1, collapse_padding=True) + lorem_table.pad_edge = False + lorem_table.add_row( + Text(lorem, justify="left", style="green"), + Text(lorem, justify="center", style="yellow"), + Text(lorem, justify="right", style="blue"), + Text(lorem, justify="full", style="red"), + ) + table.add_row( + "Text", + Group( + Text.from_markup( + """Word wrap text. Justify [green]left[/], [yellow]center[/], [blue]right[/] or [red]full[/].\n""" + ), + lorem_table, + ), + ) + + def comparison(renderable1: RenderableType, renderable2: RenderableType) -> Table: + table = Table(show_header=False, pad_edge=False, box=None, expand=True) + table.add_column("1", ratio=1) + table.add_column("2", ratio=1) + table.add_row(renderable1, renderable2) + return table + + table.add_row( + "Asian\nlanguage\nsupport", + ":flag_for_china: 该库支持中文,日文和韩文文本!\n:flag_for_japan: ライブラリは中国語、日本語、韓国語のテキストをサポートしています\n:flag_for_south_korea: 이 라이브러리는 중국어, 일본어 및 한국어 텍스트를 지원합니다", + ) + + markup_example = ( + "[bold magenta]Rich[/] supports a simple [i]bbcode[/i]-like [b]markup[/b] for [yellow]color[/], [underline]style[/], and emoji! " + ":+1: :apple: :ant: :bear: :baguette_bread: :bus: " + ) + table.add_row("Markup", markup_example) + + example_table = Table( + show_edge=False, + show_header=True, + expand=False, + row_styles=["none", "dim"], + box=box.SIMPLE, + ) + example_table.add_column("[green]Date", style="green", no_wrap=True) + example_table.add_column("[blue]Title", style="blue") + example_table.add_column( + "[cyan]Production Budget", + style="cyan", + justify="right", + no_wrap=True, + ) + example_table.add_column( + "[magenta]Box Office", + style="magenta", + justify="right", + no_wrap=True, + ) + example_table.add_row( + "Dec 20, 2019", + "Star Wars: The Rise of Skywalker", + "$275,000,000", + "$375,126,118", + ) + example_table.add_row( + "May 25, 2018", + "[b]Solo[/]: A Star Wars Story", + "$275,000,000", + "$393,151,347", + ) + example_table.add_row( + "Dec 15, 2017", + "Star Wars Ep. VIII: The Last Jedi", + "$262,000,000", + "[bold]$1,332,539,889[/bold]", + ) + example_table.add_row( + "May 19, 1999", + "Star Wars Ep. [b]I[/b]: [i]The phantom Menace", + "$115,000,000", + "$1,027,044,677", + ) + + table.add_row("Tables", example_table) + + code = '''\ +def iter_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: + """Iterate and generate a tuple with a flag for last value.""" + iter_values = iter(values) + try: + previous_value = next(iter_values) + except StopIteration: + return + for value in iter_values: + yield False, previous_value + previous_value = value + yield True, previous_value''' + + pretty_data = { + "foo": [ + 3.1427, + ( + "Paul Atreides", + "Vladimir Harkonnen", + "Thufir Hawat", + ), + ], + "atomic": (False, True, None), + } + table.add_row( + "Syntax\nhighlighting\n&\npretty\nprinting", + comparison( + Syntax(code, "python3", line_numbers=True, indent_guides=True), + Pretty(pretty_data, indent_guides=True), + ), + ) + + markdown_example = """\ +# Markdown + +Supports much of the *markdown* __syntax__! + +- Headers +- Basic formatting: **bold**, *italic*, `code` +- Block quotes +- Lists, and more... + """ + table.add_row( + "Markdown", comparison("[cyan]" + markdown_example, Markdown(markdown_example)) + ) + + table.add_row( + "+more!", + """Progress bars, columns, styled logging handler, tracebacks, etc...""", + ) + return table + + +if __name__ == "__main__": # pragma: no cover + + console = Console( + file=io.StringIO(), + force_terminal=True, + ) + test_card = make_test_card() + + # Print once to warm cache + start = process_time() + console.print(test_card) + pre_cache_taken = round((process_time() - start) * 1000.0, 1) + + console.file = io.StringIO() + + start = process_time() + console.print(test_card) + taken = round((process_time() - start) * 1000.0, 1) + + c = Console(record=True) + c.print(test_card) + + print(f"rendered in {pre_cache_taken}ms (cold cache)") + print(f"rendered in {taken}ms (warm cache)") + + from pip._vendor.rich.panel import Panel + + console = Console() + + sponsor_message = Table.grid(padding=1) + sponsor_message.add_column(style="green", justify="right") + sponsor_message.add_column(no_wrap=True) + + sponsor_message.add_row( + "Textualize", + "[u blue link=https://github.com/textualize]https://github.com/textualize", + ) + sponsor_message.add_row( + "Twitter", + "[u blue link=https://twitter.com/willmcgugan]https://twitter.com/willmcgugan", + ) + + intro_message = Text.from_markup( + """\ +We hope you enjoy using Rich! + +Rich is maintained with [red]:heart:[/] by [link=https://www.textualize.io]Textualize.io[/] + +- Will McGugan""" + ) + + message = Table.grid(padding=2) + message.add_column() + message.add_column(no_wrap=True) + message.add_row(intro_message, sponsor_message) + + console.print( + Panel.fit( + message, + box=box.ROUNDED, + padding=(1, 2), + title="[b red]Thanks for trying out Rich!", + border_style="bright_blue", + ), + justify="center", + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..9008662 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..16f21bf Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc new file mode 100644 index 0000000..f818de8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc new file mode 100644 index 0000000..ef02dad Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc new file mode 100644 index 0000000..557cd39 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc new file mode 100644 index 0000000..12cb020 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc new file mode 100644 index 0000000..efd6e60 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc new file mode 100644 index 0000000..5bd830c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc new file mode 100644 index 0000000..fc355c5 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc new file mode 100644 index 0000000..1b39fab Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc new file mode 100644 index 0000000..a7ac50e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc new file mode 100644 index 0000000..9dd3801 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc new file mode 100644 index 0000000..a29f0cc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc new file mode 100644 index 0000000..8b4b1d3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc new file mode 100644 index 0000000..7c811ca Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc new file mode 100644 index 0000000..f13d935 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc new file mode 100644 index 0000000..ebe50a8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc new file mode 100644 index 0000000..608e2fa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc new file mode 100644 index 0000000..048664a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc new file mode 100644 index 0000000..5559b36 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc new file mode 100644 index 0000000..735dafc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc new file mode 100644 index 0000000..b3c16bc Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-312.pyc new file mode 100644 index 0000000..3a22a8d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/align.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/align.cpython-312.pyc new file mode 100644 index 0000000..4244196 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/align.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc new file mode 100644 index 0000000..c6a4d8b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-312.pyc new file mode 100644 index 0000000..4ff2df3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/box.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/box.cpython-312.pyc new file mode 100644 index 0000000..ed5266e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/box.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-312.pyc new file mode 100644 index 0000000..0c89bd9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color.cpython-312.pyc new file mode 100644 index 0000000..fca6f7a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc new file mode 100644 index 0000000..4c8df80 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-312.pyc new file mode 100644 index 0000000..8fb962c Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/console.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/console.cpython-312.pyc new file mode 100644 index 0000000..7e458e8 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/console.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc new file mode 100644 index 0000000..da0db7d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-312.pyc new file mode 100644 index 0000000..242b2dd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/control.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/control.cpython-312.pyc new file mode 100644 index 0000000..246bddd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/control.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc new file mode 100644 index 0000000..d152a43 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc new file mode 100644 index 0000000..917c8d0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc new file mode 100644 index 0000000..48a6f8b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..0e1fc11 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc new file mode 100644 index 0000000..0303cfd Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc new file mode 100644 index 0000000..e6783fa Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc new file mode 100644 index 0000000..184e458 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/json.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/json.cpython-312.pyc new file mode 100644 index 0000000..9770473 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/json.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc new file mode 100644 index 0000000..8249b46 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-312.pyc new file mode 100644 index 0000000..8c50bc3 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live.cpython-312.pyc new file mode 100644 index 0000000..20157ac Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc new file mode 100644 index 0000000..7a6dd09 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-312.pyc new file mode 100644 index 0000000..42fc0f7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-312.pyc new file mode 100644 index 0000000..3ed89d7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-312.pyc new file mode 100644 index 0000000..407f705 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-312.pyc new file mode 100644 index 0000000..8305850 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-312.pyc new file mode 100644 index 0000000..e0eef84 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-312.pyc new file mode 100644 index 0000000..936cdd9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-312.pyc new file mode 100644 index 0000000..723d098 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc new file mode 100644 index 0000000..8c3a2c4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-312.pyc new file mode 100644 index 0000000..d679963 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc new file mode 100644 index 0000000..c9675c4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc new file mode 100644 index 0000000..8e9a2d9 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc new file mode 100644 index 0000000..3cb2c6e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/region.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/region.cpython-312.pyc new file mode 100644 index 0000000..c1a7c6b Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/region.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-312.pyc new file mode 100644 index 0000000..da07574 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-312.pyc new file mode 100644 index 0000000..c3ab8f4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-312.pyc new file mode 100644 index 0000000..64d0b49 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-312.pyc new file mode 100644 index 0000000..57864b4 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-312.pyc new file mode 100644 index 0000000..4194c6e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc new file mode 100644 index 0000000..9ab23ed Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/status.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/status.cpython-312.pyc new file mode 100644 index 0000000..2cf3969 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/status.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/style.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/style.cpython-312.pyc new file mode 100644 index 0000000..84166d0 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/style.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-312.pyc new file mode 100644 index 0000000..0ff149d Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc new file mode 100644 index 0000000..bc27f1a Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/table.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/table.cpython-312.pyc new file mode 100644 index 0000000..096ee7e Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/table.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc new file mode 100644 index 0000000..9a7f3e7 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/text.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/text.cpython-312.pyc new file mode 100644 index 0000000..b9b4550 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/text.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-312.pyc new file mode 100644 index 0000000..7c42455 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-312.pyc new file mode 100644 index 0000000..f789d18 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc new file mode 100644 index 0000000..d6e9437 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-312.pyc b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-312.pyc new file mode 100644 index 0000000..253f070 Binary files /dev/null and b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-312.pyc differ diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_cell_widths.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_cell_widths.py new file mode 100644 index 0000000..36286df --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_cell_widths.py @@ -0,0 +1,451 @@ +# Auto generated by make_terminal_widths.py + +CELL_WIDTHS = [ + (0, 0, 0), + (1, 31, -1), + (127, 159, -1), + (768, 879, 0), + (1155, 1161, 0), + (1425, 1469, 0), + (1471, 1471, 0), + (1473, 1474, 0), + (1476, 1477, 0), + (1479, 1479, 0), + (1552, 1562, 0), + (1611, 1631, 0), + (1648, 1648, 0), + (1750, 1756, 0), + (1759, 1764, 0), + (1767, 1768, 0), + (1770, 1773, 0), + (1809, 1809, 0), + (1840, 1866, 0), + (1958, 1968, 0), + (2027, 2035, 0), + (2045, 2045, 0), + (2070, 2073, 0), + (2075, 2083, 0), + (2085, 2087, 0), + (2089, 2093, 0), + (2137, 2139, 0), + (2259, 2273, 0), + (2275, 2306, 0), + (2362, 2362, 0), + (2364, 2364, 0), + (2369, 2376, 0), + (2381, 2381, 0), + (2385, 2391, 0), + (2402, 2403, 0), + (2433, 2433, 0), + (2492, 2492, 0), + (2497, 2500, 0), + (2509, 2509, 0), + (2530, 2531, 0), + (2558, 2558, 0), + (2561, 2562, 0), + (2620, 2620, 0), + (2625, 2626, 0), + (2631, 2632, 0), + (2635, 2637, 0), + (2641, 2641, 0), + (2672, 2673, 0), + (2677, 2677, 0), + (2689, 2690, 0), + (2748, 2748, 0), + (2753, 2757, 0), + (2759, 2760, 0), + (2765, 2765, 0), + (2786, 2787, 0), + (2810, 2815, 0), + (2817, 2817, 0), + (2876, 2876, 0), + (2879, 2879, 0), + (2881, 2884, 0), + (2893, 2893, 0), + (2901, 2902, 0), + (2914, 2915, 0), + (2946, 2946, 0), + (3008, 3008, 0), + (3021, 3021, 0), + (3072, 3072, 0), + (3076, 3076, 0), + (3134, 3136, 0), + (3142, 3144, 0), + (3146, 3149, 0), + (3157, 3158, 0), + (3170, 3171, 0), + (3201, 3201, 0), + (3260, 3260, 0), + (3263, 3263, 0), + (3270, 3270, 0), + (3276, 3277, 0), + (3298, 3299, 0), + (3328, 3329, 0), + (3387, 3388, 0), + (3393, 3396, 0), + (3405, 3405, 0), + (3426, 3427, 0), + (3457, 3457, 0), + (3530, 3530, 0), + (3538, 3540, 0), + (3542, 3542, 0), + (3633, 3633, 0), + (3636, 3642, 0), + (3655, 3662, 0), + (3761, 3761, 0), + (3764, 3772, 0), + (3784, 3789, 0), + (3864, 3865, 0), + (3893, 3893, 0), + (3895, 3895, 0), + (3897, 3897, 0), + (3953, 3966, 0), + (3968, 3972, 0), + (3974, 3975, 0), + (3981, 3991, 0), + (3993, 4028, 0), + (4038, 4038, 0), + (4141, 4144, 0), + (4146, 4151, 0), + (4153, 4154, 0), + (4157, 4158, 0), + (4184, 4185, 0), + (4190, 4192, 0), + (4209, 4212, 0), + (4226, 4226, 0), + (4229, 4230, 0), + (4237, 4237, 0), + (4253, 4253, 0), + (4352, 4447, 2), + (4957, 4959, 0), + (5906, 5908, 0), + (5938, 5940, 0), + (5970, 5971, 0), + (6002, 6003, 0), + (6068, 6069, 0), + (6071, 6077, 0), + (6086, 6086, 0), + (6089, 6099, 0), + (6109, 6109, 0), + (6155, 6157, 0), + (6277, 6278, 0), + (6313, 6313, 0), + (6432, 6434, 0), + (6439, 6440, 0), + (6450, 6450, 0), + (6457, 6459, 0), + (6679, 6680, 0), + (6683, 6683, 0), + (6742, 6742, 0), + (6744, 6750, 0), + (6752, 6752, 0), + (6754, 6754, 0), + (6757, 6764, 0), + (6771, 6780, 0), + (6783, 6783, 0), + (6832, 6848, 0), + (6912, 6915, 0), + (6964, 6964, 0), + (6966, 6970, 0), + (6972, 6972, 0), + (6978, 6978, 0), + (7019, 7027, 0), + (7040, 7041, 0), + (7074, 7077, 0), + (7080, 7081, 0), + (7083, 7085, 0), + (7142, 7142, 0), + (7144, 7145, 0), + (7149, 7149, 0), + (7151, 7153, 0), + (7212, 7219, 0), + (7222, 7223, 0), + (7376, 7378, 0), + (7380, 7392, 0), + (7394, 7400, 0), + (7405, 7405, 0), + (7412, 7412, 0), + (7416, 7417, 0), + (7616, 7673, 0), + (7675, 7679, 0), + (8203, 8207, 0), + (8232, 8238, 0), + (8288, 8291, 0), + (8400, 8432, 0), + (8986, 8987, 2), + (9001, 9002, 2), + (9193, 9196, 2), + (9200, 9200, 2), + (9203, 9203, 2), + (9725, 9726, 2), + (9748, 9749, 2), + (9800, 9811, 2), + (9855, 9855, 2), + (9875, 9875, 2), + (9889, 9889, 2), + (9898, 9899, 2), + (9917, 9918, 2), + (9924, 9925, 2), + (9934, 9934, 2), + (9940, 9940, 2), + (9962, 9962, 2), + (9970, 9971, 2), + (9973, 9973, 2), + (9978, 9978, 2), + (9981, 9981, 2), + (9989, 9989, 2), + (9994, 9995, 2), + (10024, 10024, 2), + (10060, 10060, 2), + (10062, 10062, 2), + (10067, 10069, 2), + (10071, 10071, 2), + (10133, 10135, 2), + (10160, 10160, 2), + (10175, 10175, 2), + (11035, 11036, 2), + (11088, 11088, 2), + (11093, 11093, 2), + (11503, 11505, 0), + (11647, 11647, 0), + (11744, 11775, 0), + (11904, 11929, 2), + (11931, 12019, 2), + (12032, 12245, 2), + (12272, 12283, 2), + (12288, 12329, 2), + (12330, 12333, 0), + (12334, 12350, 2), + (12353, 12438, 2), + (12441, 12442, 0), + (12443, 12543, 2), + (12549, 12591, 2), + (12593, 12686, 2), + (12688, 12771, 2), + (12784, 12830, 2), + (12832, 12871, 2), + (12880, 19903, 2), + (19968, 42124, 2), + (42128, 42182, 2), + (42607, 42610, 0), + (42612, 42621, 0), + (42654, 42655, 0), + (42736, 42737, 0), + (43010, 43010, 0), + (43014, 43014, 0), + (43019, 43019, 0), + (43045, 43046, 0), + (43052, 43052, 0), + (43204, 43205, 0), + (43232, 43249, 0), + (43263, 43263, 0), + (43302, 43309, 0), + (43335, 43345, 0), + (43360, 43388, 2), + (43392, 43394, 0), + (43443, 43443, 0), + (43446, 43449, 0), + (43452, 43453, 0), + (43493, 43493, 0), + (43561, 43566, 0), + (43569, 43570, 0), + (43573, 43574, 0), + (43587, 43587, 0), + (43596, 43596, 0), + (43644, 43644, 0), + (43696, 43696, 0), + (43698, 43700, 0), + (43703, 43704, 0), + (43710, 43711, 0), + (43713, 43713, 0), + (43756, 43757, 0), + (43766, 43766, 0), + (44005, 44005, 0), + (44008, 44008, 0), + (44013, 44013, 0), + (44032, 55203, 2), + (63744, 64255, 2), + (64286, 64286, 0), + (65024, 65039, 0), + (65040, 65049, 2), + (65056, 65071, 0), + (65072, 65106, 2), + (65108, 65126, 2), + (65128, 65131, 2), + (65281, 65376, 2), + (65504, 65510, 2), + (66045, 66045, 0), + (66272, 66272, 0), + (66422, 66426, 0), + (68097, 68099, 0), + (68101, 68102, 0), + (68108, 68111, 0), + (68152, 68154, 0), + (68159, 68159, 0), + (68325, 68326, 0), + (68900, 68903, 0), + (69291, 69292, 0), + (69446, 69456, 0), + (69633, 69633, 0), + (69688, 69702, 0), + (69759, 69761, 0), + (69811, 69814, 0), + (69817, 69818, 0), + (69888, 69890, 0), + (69927, 69931, 0), + (69933, 69940, 0), + (70003, 70003, 0), + (70016, 70017, 0), + (70070, 70078, 0), + (70089, 70092, 0), + (70095, 70095, 0), + (70191, 70193, 0), + (70196, 70196, 0), + (70198, 70199, 0), + (70206, 70206, 0), + (70367, 70367, 0), + (70371, 70378, 0), + (70400, 70401, 0), + (70459, 70460, 0), + (70464, 70464, 0), + (70502, 70508, 0), + (70512, 70516, 0), + (70712, 70719, 0), + (70722, 70724, 0), + (70726, 70726, 0), + (70750, 70750, 0), + (70835, 70840, 0), + (70842, 70842, 0), + (70847, 70848, 0), + (70850, 70851, 0), + (71090, 71093, 0), + (71100, 71101, 0), + (71103, 71104, 0), + (71132, 71133, 0), + (71219, 71226, 0), + (71229, 71229, 0), + (71231, 71232, 0), + (71339, 71339, 0), + (71341, 71341, 0), + (71344, 71349, 0), + (71351, 71351, 0), + (71453, 71455, 0), + (71458, 71461, 0), + (71463, 71467, 0), + (71727, 71735, 0), + (71737, 71738, 0), + (71995, 71996, 0), + (71998, 71998, 0), + (72003, 72003, 0), + (72148, 72151, 0), + (72154, 72155, 0), + (72160, 72160, 0), + (72193, 72202, 0), + (72243, 72248, 0), + (72251, 72254, 0), + (72263, 72263, 0), + (72273, 72278, 0), + (72281, 72283, 0), + (72330, 72342, 0), + (72344, 72345, 0), + (72752, 72758, 0), + (72760, 72765, 0), + (72767, 72767, 0), + (72850, 72871, 0), + (72874, 72880, 0), + (72882, 72883, 0), + (72885, 72886, 0), + (73009, 73014, 0), + (73018, 73018, 0), + (73020, 73021, 0), + (73023, 73029, 0), + (73031, 73031, 0), + (73104, 73105, 0), + (73109, 73109, 0), + (73111, 73111, 0), + (73459, 73460, 0), + (92912, 92916, 0), + (92976, 92982, 0), + (94031, 94031, 0), + (94095, 94098, 0), + (94176, 94179, 2), + (94180, 94180, 0), + (94192, 94193, 2), + (94208, 100343, 2), + (100352, 101589, 2), + (101632, 101640, 2), + (110592, 110878, 2), + (110928, 110930, 2), + (110948, 110951, 2), + (110960, 111355, 2), + (113821, 113822, 0), + (119143, 119145, 0), + (119163, 119170, 0), + (119173, 119179, 0), + (119210, 119213, 0), + (119362, 119364, 0), + (121344, 121398, 0), + (121403, 121452, 0), + (121461, 121461, 0), + (121476, 121476, 0), + (121499, 121503, 0), + (121505, 121519, 0), + (122880, 122886, 0), + (122888, 122904, 0), + (122907, 122913, 0), + (122915, 122916, 0), + (122918, 122922, 0), + (123184, 123190, 0), + (123628, 123631, 0), + (125136, 125142, 0), + (125252, 125258, 0), + (126980, 126980, 2), + (127183, 127183, 2), + (127374, 127374, 2), + (127377, 127386, 2), + (127488, 127490, 2), + (127504, 127547, 2), + (127552, 127560, 2), + (127568, 127569, 2), + (127584, 127589, 2), + (127744, 127776, 2), + (127789, 127797, 2), + (127799, 127868, 2), + (127870, 127891, 2), + (127904, 127946, 2), + (127951, 127955, 2), + (127968, 127984, 2), + (127988, 127988, 2), + (127992, 128062, 2), + (128064, 128064, 2), + (128066, 128252, 2), + (128255, 128317, 2), + (128331, 128334, 2), + (128336, 128359, 2), + (128378, 128378, 2), + (128405, 128406, 2), + (128420, 128420, 2), + (128507, 128591, 2), + (128640, 128709, 2), + (128716, 128716, 2), + (128720, 128722, 2), + (128725, 128727, 2), + (128747, 128748, 2), + (128756, 128764, 2), + (128992, 129003, 2), + (129292, 129338, 2), + (129340, 129349, 2), + (129351, 129400, 2), + (129402, 129483, 2), + (129485, 129535, 2), + (129648, 129652, 2), + (129656, 129658, 2), + (129664, 129670, 2), + (129680, 129704, 2), + (129712, 129718, 2), + (129728, 129730, 2), + (129744, 129750, 2), + (131072, 196605, 2), + (196608, 262141, 2), + (917760, 917999, 0), +] diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_codes.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_codes.py new file mode 100644 index 0000000..1f2877b --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_codes.py @@ -0,0 +1,3610 @@ +EMOJI = { + "1st_place_medal": "🥇", + "2nd_place_medal": "🥈", + "3rd_place_medal": "🥉", + "ab_button_(blood_type)": "🆎", + "atm_sign": "🏧", + "a_button_(blood_type)": "🅰", + "afghanistan": "🇦🇫", + "albania": "🇦🇱", + "algeria": "🇩🇿", + "american_samoa": "🇦🇸", + "andorra": "🇦🇩", + "angola": "🇦🇴", + "anguilla": "🇦🇮", + "antarctica": "🇦🇶", + "antigua_&_barbuda": "🇦🇬", + "aquarius": "♒", + "argentina": "🇦🇷", + "aries": "♈", + "armenia": "🇦🇲", + "aruba": "🇦🇼", + "ascension_island": "🇦🇨", + "australia": "🇦🇺", + "austria": "🇦🇹", + "azerbaijan": "🇦🇿", + "back_arrow": "🔙", + "b_button_(blood_type)": "🅱", + "bahamas": "🇧🇸", + "bahrain": "🇧🇭", + "bangladesh": "🇧🇩", + "barbados": "🇧🇧", + "belarus": "🇧🇾", + "belgium": "🇧🇪", + "belize": "🇧🇿", + "benin": "🇧🇯", + "bermuda": "🇧🇲", + "bhutan": "🇧🇹", + "bolivia": "🇧🇴", + "bosnia_&_herzegovina": "🇧🇦", + "botswana": "🇧🇼", + "bouvet_island": "🇧🇻", + "brazil": "🇧🇷", + "british_indian_ocean_territory": "🇮🇴", + "british_virgin_islands": "🇻🇬", + "brunei": "🇧🇳", + "bulgaria": "🇧🇬", + "burkina_faso": "🇧🇫", + "burundi": "🇧🇮", + "cl_button": "🆑", + "cool_button": "🆒", + "cambodia": "🇰🇭", + "cameroon": "🇨🇲", + "canada": "🇨🇦", + "canary_islands": "🇮🇨", + "cancer": "♋", + "cape_verde": "🇨🇻", + "capricorn": "♑", + "caribbean_netherlands": "🇧🇶", + "cayman_islands": "🇰🇾", + "central_african_republic": "🇨🇫", + "ceuta_&_melilla": "🇪🇦", + "chad": "🇹🇩", + "chile": "🇨🇱", + "china": "🇨🇳", + "christmas_island": "🇨🇽", + "christmas_tree": "🎄", + "clipperton_island": "🇨🇵", + "cocos_(keeling)_islands": "🇨🇨", + "colombia": "🇨🇴", + "comoros": "🇰🇲", + "congo_-_brazzaville": "🇨🇬", + "congo_-_kinshasa": "🇨🇩", + "cook_islands": "🇨🇰", + "costa_rica": "🇨🇷", + "croatia": "🇭🇷", + "cuba": "🇨🇺", + "curaçao": "🇨🇼", + "cyprus": "🇨🇾", + "czechia": "🇨🇿", + "côte_d’ivoire": "🇨🇮", + "denmark": "🇩🇰", + "diego_garcia": "🇩🇬", + "djibouti": "🇩🇯", + "dominica": "🇩🇲", + "dominican_republic": "🇩🇴", + "end_arrow": "🔚", + "ecuador": "🇪🇨", + "egypt": "🇪🇬", + "el_salvador": "🇸🇻", + "england": "🏴\U000e0067\U000e0062\U000e0065\U000e006e\U000e0067\U000e007f", + "equatorial_guinea": "🇬🇶", + "eritrea": "🇪🇷", + "estonia": "🇪🇪", + "ethiopia": "🇪🇹", + "european_union": "🇪🇺", + "free_button": "🆓", + "falkland_islands": "🇫🇰", + "faroe_islands": "🇫🇴", + "fiji": "🇫🇯", + "finland": "🇫🇮", + "france": "🇫🇷", + "french_guiana": "🇬🇫", + "french_polynesia": "🇵🇫", + "french_southern_territories": "🇹🇫", + "gabon": "🇬🇦", + "gambia": "🇬🇲", + "gemini": "♊", + "georgia": "🇬🇪", + "germany": "🇩🇪", + "ghana": "🇬🇭", + "gibraltar": "🇬🇮", + "greece": "🇬🇷", + "greenland": "🇬🇱", + "grenada": "🇬🇩", + "guadeloupe": "🇬🇵", + "guam": "🇬🇺", + "guatemala": "🇬🇹", + "guernsey": "🇬🇬", + "guinea": "🇬🇳", + "guinea-bissau": "🇬🇼", + "guyana": "🇬🇾", + "haiti": "🇭🇹", + "heard_&_mcdonald_islands": "🇭🇲", + "honduras": "🇭🇳", + "hong_kong_sar_china": "🇭🇰", + "hungary": "🇭🇺", + "id_button": "🆔", + "iceland": "🇮🇸", + "india": "🇮🇳", + "indonesia": "🇮🇩", + "iran": "🇮🇷", + "iraq": "🇮🇶", + "ireland": "🇮🇪", + "isle_of_man": "🇮🇲", + "israel": "🇮🇱", + "italy": "🇮🇹", + "jamaica": "🇯🇲", + "japan": "🗾", + "japanese_acceptable_button": "🉑", + "japanese_application_button": "🈸", + "japanese_bargain_button": "🉐", + "japanese_castle": "🏯", + "japanese_congratulations_button": "㊗", + "japanese_discount_button": "🈹", + "japanese_dolls": "🎎", + "japanese_free_of_charge_button": "🈚", + "japanese_here_button": "🈁", + "japanese_monthly_amount_button": "🈷", + "japanese_no_vacancy_button": "🈵", + "japanese_not_free_of_charge_button": "🈶", + "japanese_open_for_business_button": "🈺", + "japanese_passing_grade_button": "🈴", + "japanese_post_office": "🏣", + "japanese_prohibited_button": "🈲", + "japanese_reserved_button": "🈯", + "japanese_secret_button": "㊙", + "japanese_service_charge_button": "🈂", + "japanese_symbol_for_beginner": "🔰", + "japanese_vacancy_button": "🈳", + "jersey": "🇯🇪", + "jordan": "🇯🇴", + "kazakhstan": "🇰🇿", + "kenya": "🇰🇪", + "kiribati": "🇰🇮", + "kosovo": "🇽🇰", + "kuwait": "🇰🇼", + "kyrgyzstan": "🇰🇬", + "laos": "🇱🇦", + "latvia": "🇱🇻", + "lebanon": "🇱🇧", + "leo": "♌", + "lesotho": "🇱🇸", + "liberia": "🇱🇷", + "libra": "♎", + "libya": "🇱🇾", + "liechtenstein": "🇱🇮", + "lithuania": "🇱🇹", + "luxembourg": "🇱🇺", + "macau_sar_china": "🇲🇴", + "macedonia": "🇲🇰", + "madagascar": "🇲🇬", + "malawi": "🇲🇼", + "malaysia": "🇲🇾", + "maldives": "🇲🇻", + "mali": "🇲🇱", + "malta": "🇲🇹", + "marshall_islands": "🇲🇭", + "martinique": "🇲🇶", + "mauritania": "🇲🇷", + "mauritius": "🇲🇺", + "mayotte": "🇾🇹", + "mexico": "🇲🇽", + "micronesia": "🇫🇲", + "moldova": "🇲🇩", + "monaco": "🇲🇨", + "mongolia": "🇲🇳", + "montenegro": "🇲🇪", + "montserrat": "🇲🇸", + "morocco": "🇲🇦", + "mozambique": "🇲🇿", + "mrs._claus": "🤶", + "mrs._claus_dark_skin_tone": "🤶🏿", + "mrs._claus_light_skin_tone": "🤶🏻", + "mrs._claus_medium-dark_skin_tone": "🤶🏾", + "mrs._claus_medium-light_skin_tone": "🤶🏼", + "mrs._claus_medium_skin_tone": "🤶🏽", + "myanmar_(burma)": "🇲🇲", + "new_button": "🆕", + "ng_button": "🆖", + "namibia": "🇳🇦", + "nauru": "🇳🇷", + "nepal": "🇳🇵", + "netherlands": "🇳🇱", + "new_caledonia": "🇳🇨", + "new_zealand": "🇳🇿", + "nicaragua": "🇳🇮", + "niger": "🇳🇪", + "nigeria": "🇳🇬", + "niue": "🇳🇺", + "norfolk_island": "🇳🇫", + "north_korea": "🇰🇵", + "northern_mariana_islands": "🇲🇵", + "norway": "🇳🇴", + "ok_button": "🆗", + "ok_hand": "👌", + "ok_hand_dark_skin_tone": "👌🏿", + "ok_hand_light_skin_tone": "👌🏻", + "ok_hand_medium-dark_skin_tone": "👌🏾", + "ok_hand_medium-light_skin_tone": "👌🏼", + "ok_hand_medium_skin_tone": "👌🏽", + "on!_arrow": "🔛", + "o_button_(blood_type)": "🅾", + "oman": "🇴🇲", + "ophiuchus": "⛎", + "p_button": "🅿", + "pakistan": "🇵🇰", + "palau": "🇵🇼", + "palestinian_territories": "🇵🇸", + "panama": "🇵🇦", + "papua_new_guinea": "🇵🇬", + "paraguay": "🇵🇾", + "peru": "🇵🇪", + "philippines": "🇵🇭", + "pisces": "♓", + "pitcairn_islands": "🇵🇳", + "poland": "🇵🇱", + "portugal": "🇵🇹", + "puerto_rico": "🇵🇷", + "qatar": "🇶🇦", + "romania": "🇷🇴", + "russia": "🇷🇺", + "rwanda": "🇷🇼", + "réunion": "🇷🇪", + "soon_arrow": "🔜", + "sos_button": "🆘", + "sagittarius": "♐", + "samoa": "🇼🇸", + "san_marino": "🇸🇲", + "santa_claus": "🎅", + "santa_claus_dark_skin_tone": "🎅🏿", + "santa_claus_light_skin_tone": "🎅🏻", + "santa_claus_medium-dark_skin_tone": "🎅🏾", + "santa_claus_medium-light_skin_tone": "🎅🏼", + "santa_claus_medium_skin_tone": "🎅🏽", + "saudi_arabia": "🇸🇦", + "scorpio": "♏", + "scotland": "🏴\U000e0067\U000e0062\U000e0073\U000e0063\U000e0074\U000e007f", + "senegal": "🇸🇳", + "serbia": "🇷🇸", + "seychelles": "🇸🇨", + "sierra_leone": "🇸🇱", + "singapore": "🇸🇬", + "sint_maarten": "🇸🇽", + "slovakia": "🇸🇰", + "slovenia": "🇸🇮", + "solomon_islands": "🇸🇧", + "somalia": "🇸🇴", + "south_africa": "🇿🇦", + "south_georgia_&_south_sandwich_islands": "🇬🇸", + "south_korea": "🇰🇷", + "south_sudan": "🇸🇸", + "spain": "🇪🇸", + "sri_lanka": "🇱🇰", + "st._barthélemy": "🇧🇱", + "st._helena": "🇸🇭", + "st._kitts_&_nevis": "🇰🇳", + "st._lucia": "🇱🇨", + "st._martin": "🇲🇫", + "st._pierre_&_miquelon": "🇵🇲", + "st._vincent_&_grenadines": "🇻🇨", + "statue_of_liberty": "🗽", + "sudan": "🇸🇩", + "suriname": "🇸🇷", + "svalbard_&_jan_mayen": "🇸🇯", + "swaziland": "🇸🇿", + "sweden": "🇸🇪", + "switzerland": "🇨🇭", + "syria": "🇸🇾", + "são_tomé_&_príncipe": "🇸🇹", + "t-rex": "🦖", + "top_arrow": "🔝", + "taiwan": "🇹🇼", + "tajikistan": "🇹🇯", + "tanzania": "🇹🇿", + "taurus": "♉", + "thailand": "🇹🇭", + "timor-leste": "🇹🇱", + "togo": "🇹🇬", + "tokelau": "🇹🇰", + "tokyo_tower": "🗼", + "tonga": "🇹🇴", + "trinidad_&_tobago": "🇹🇹", + "tristan_da_cunha": "🇹🇦", + "tunisia": "🇹🇳", + "turkey": "🦃", + "turkmenistan": "🇹🇲", + "turks_&_caicos_islands": "🇹🇨", + "tuvalu": "🇹🇻", + "u.s._outlying_islands": "🇺🇲", + "u.s._virgin_islands": "🇻🇮", + "up!_button": "🆙", + "uganda": "🇺🇬", + "ukraine": "🇺🇦", + "united_arab_emirates": "🇦🇪", + "united_kingdom": "🇬🇧", + "united_nations": "🇺🇳", + "united_states": "🇺🇸", + "uruguay": "🇺🇾", + "uzbekistan": "🇺🇿", + "vs_button": "🆚", + "vanuatu": "🇻🇺", + "vatican_city": "🇻🇦", + "venezuela": "🇻🇪", + "vietnam": "🇻🇳", + "virgo": "♍", + "wales": "🏴\U000e0067\U000e0062\U000e0077\U000e006c\U000e0073\U000e007f", + "wallis_&_futuna": "🇼🇫", + "western_sahara": "🇪🇭", + "yemen": "🇾🇪", + "zambia": "🇿🇲", + "zimbabwe": "🇿🇼", + "abacus": "🧮", + "adhesive_bandage": "🩹", + "admission_tickets": "🎟", + "adult": "🧑", + "adult_dark_skin_tone": "🧑🏿", + "adult_light_skin_tone": "🧑🏻", + "adult_medium-dark_skin_tone": "🧑🏾", + "adult_medium-light_skin_tone": "🧑🏼", + "adult_medium_skin_tone": "🧑🏽", + "aerial_tramway": "🚡", + "airplane": "✈", + "airplane_arrival": "🛬", + "airplane_departure": "🛫", + "alarm_clock": "⏰", + "alembic": "⚗", + "alien": "👽", + "alien_monster": "👾", + "ambulance": "🚑", + "american_football": "🏈", + "amphora": "🏺", + "anchor": "⚓", + "anger_symbol": "💢", + "angry_face": "😠", + "angry_face_with_horns": "👿", + "anguished_face": "😧", + "ant": "🐜", + "antenna_bars": "📶", + "anxious_face_with_sweat": "😰", + "articulated_lorry": "🚛", + "artist_palette": "🎨", + "astonished_face": "😲", + "atom_symbol": "⚛", + "auto_rickshaw": "🛺", + "automobile": "🚗", + "avocado": "🥑", + "axe": "🪓", + "baby": "👶", + "baby_angel": "👼", + "baby_angel_dark_skin_tone": "👼🏿", + "baby_angel_light_skin_tone": "👼🏻", + "baby_angel_medium-dark_skin_tone": "👼🏾", + "baby_angel_medium-light_skin_tone": "👼🏼", + "baby_angel_medium_skin_tone": "👼🏽", + "baby_bottle": "🍼", + "baby_chick": "🐤", + "baby_dark_skin_tone": "👶🏿", + "baby_light_skin_tone": "👶🏻", + "baby_medium-dark_skin_tone": "👶🏾", + "baby_medium-light_skin_tone": "👶🏼", + "baby_medium_skin_tone": "👶🏽", + "baby_symbol": "🚼", + "backhand_index_pointing_down": "👇", + "backhand_index_pointing_down_dark_skin_tone": "👇🏿", + "backhand_index_pointing_down_light_skin_tone": "👇🏻", + "backhand_index_pointing_down_medium-dark_skin_tone": "👇🏾", + "backhand_index_pointing_down_medium-light_skin_tone": "👇🏼", + "backhand_index_pointing_down_medium_skin_tone": "👇🏽", + "backhand_index_pointing_left": "👈", + "backhand_index_pointing_left_dark_skin_tone": "👈🏿", + "backhand_index_pointing_left_light_skin_tone": "👈🏻", + "backhand_index_pointing_left_medium-dark_skin_tone": "👈🏾", + "backhand_index_pointing_left_medium-light_skin_tone": "👈🏼", + "backhand_index_pointing_left_medium_skin_tone": "👈🏽", + "backhand_index_pointing_right": "👉", + "backhand_index_pointing_right_dark_skin_tone": "👉🏿", + "backhand_index_pointing_right_light_skin_tone": "👉🏻", + "backhand_index_pointing_right_medium-dark_skin_tone": "👉🏾", + "backhand_index_pointing_right_medium-light_skin_tone": "👉🏼", + "backhand_index_pointing_right_medium_skin_tone": "👉🏽", + "backhand_index_pointing_up": "👆", + "backhand_index_pointing_up_dark_skin_tone": "👆🏿", + "backhand_index_pointing_up_light_skin_tone": "👆🏻", + "backhand_index_pointing_up_medium-dark_skin_tone": "👆🏾", + "backhand_index_pointing_up_medium-light_skin_tone": "👆🏼", + "backhand_index_pointing_up_medium_skin_tone": "👆🏽", + "bacon": "🥓", + "badger": "🦡", + "badminton": "🏸", + "bagel": "🥯", + "baggage_claim": "🛄", + "baguette_bread": "🥖", + "balance_scale": "⚖", + "bald": "🦲", + "bald_man": "👨\u200d🦲", + "bald_woman": "👩\u200d🦲", + "ballet_shoes": "🩰", + "balloon": "🎈", + "ballot_box_with_ballot": "🗳", + "ballot_box_with_check": "☑", + "banana": "🍌", + "banjo": "🪕", + "bank": "🏦", + "bar_chart": "📊", + "barber_pole": "💈", + "baseball": "⚾", + "basket": "🧺", + "basketball": "🏀", + "bat": "🦇", + "bathtub": "🛁", + "battery": "🔋", + "beach_with_umbrella": "🏖", + "beaming_face_with_smiling_eyes": "😁", + "bear_face": "🐻", + "bearded_person": "🧔", + "bearded_person_dark_skin_tone": "🧔🏿", + "bearded_person_light_skin_tone": "🧔🏻", + "bearded_person_medium-dark_skin_tone": "🧔🏾", + "bearded_person_medium-light_skin_tone": "🧔🏼", + "bearded_person_medium_skin_tone": "🧔🏽", + "beating_heart": "💓", + "bed": "🛏", + "beer_mug": "🍺", + "bell": "🔔", + "bell_with_slash": "🔕", + "bellhop_bell": "🛎", + "bento_box": "🍱", + "beverage_box": "🧃", + "bicycle": "🚲", + "bikini": "👙", + "billed_cap": "🧢", + "biohazard": "☣", + "bird": "🐦", + "birthday_cake": "🎂", + "black_circle": "⚫", + "black_flag": "🏴", + "black_heart": "🖤", + "black_large_square": "⬛", + "black_medium-small_square": "◾", + "black_medium_square": "◼", + "black_nib": "✒", + "black_small_square": "▪", + "black_square_button": "🔲", + "blond-haired_man": "👱\u200d♂️", + "blond-haired_man_dark_skin_tone": "👱🏿\u200d♂️", + "blond-haired_man_light_skin_tone": "👱🏻\u200d♂️", + "blond-haired_man_medium-dark_skin_tone": "👱🏾\u200d♂️", + "blond-haired_man_medium-light_skin_tone": "👱🏼\u200d♂️", + "blond-haired_man_medium_skin_tone": "👱🏽\u200d♂️", + "blond-haired_person": "👱", + "blond-haired_person_dark_skin_tone": "👱🏿", + "blond-haired_person_light_skin_tone": "👱🏻", + "blond-haired_person_medium-dark_skin_tone": "👱🏾", + "blond-haired_person_medium-light_skin_tone": "👱🏼", + "blond-haired_person_medium_skin_tone": "👱🏽", + "blond-haired_woman": "👱\u200d♀️", + "blond-haired_woman_dark_skin_tone": "👱🏿\u200d♀️", + "blond-haired_woman_light_skin_tone": "👱🏻\u200d♀️", + "blond-haired_woman_medium-dark_skin_tone": "👱🏾\u200d♀️", + "blond-haired_woman_medium-light_skin_tone": "👱🏼\u200d♀️", + "blond-haired_woman_medium_skin_tone": "👱🏽\u200d♀️", + "blossom": "🌼", + "blowfish": "🐡", + "blue_book": "📘", + "blue_circle": "🔵", + "blue_heart": "💙", + "blue_square": "🟦", + "boar": "🐗", + "bomb": "💣", + "bone": "🦴", + "bookmark": "🔖", + "bookmark_tabs": "📑", + "books": "📚", + "bottle_with_popping_cork": "🍾", + "bouquet": "💐", + "bow_and_arrow": "🏹", + "bowl_with_spoon": "🥣", + "bowling": "🎳", + "boxing_glove": "🥊", + "boy": "👦", + "boy_dark_skin_tone": "👦🏿", + "boy_light_skin_tone": "👦🏻", + "boy_medium-dark_skin_tone": "👦🏾", + "boy_medium-light_skin_tone": "👦🏼", + "boy_medium_skin_tone": "👦🏽", + "brain": "🧠", + "bread": "🍞", + "breast-feeding": "🤱", + "breast-feeding_dark_skin_tone": "🤱🏿", + "breast-feeding_light_skin_tone": "🤱🏻", + "breast-feeding_medium-dark_skin_tone": "🤱🏾", + "breast-feeding_medium-light_skin_tone": "🤱🏼", + "breast-feeding_medium_skin_tone": "🤱🏽", + "brick": "🧱", + "bride_with_veil": "👰", + "bride_with_veil_dark_skin_tone": "👰🏿", + "bride_with_veil_light_skin_tone": "👰🏻", + "bride_with_veil_medium-dark_skin_tone": "👰🏾", + "bride_with_veil_medium-light_skin_tone": "👰🏼", + "bride_with_veil_medium_skin_tone": "👰🏽", + "bridge_at_night": "🌉", + "briefcase": "💼", + "briefs": "🩲", + "bright_button": "🔆", + "broccoli": "🥦", + "broken_heart": "💔", + "broom": "🧹", + "brown_circle": "🟤", + "brown_heart": "🤎", + "brown_square": "🟫", + "bug": "🐛", + "building_construction": "🏗", + "bullet_train": "🚅", + "burrito": "🌯", + "bus": "🚌", + "bus_stop": "🚏", + "bust_in_silhouette": "👤", + "busts_in_silhouette": "👥", + "butter": "🧈", + "butterfly": "🦋", + "cactus": "🌵", + "calendar": "📆", + "call_me_hand": "🤙", + "call_me_hand_dark_skin_tone": "🤙🏿", + "call_me_hand_light_skin_tone": "🤙🏻", + "call_me_hand_medium-dark_skin_tone": "🤙🏾", + "call_me_hand_medium-light_skin_tone": "🤙🏼", + "call_me_hand_medium_skin_tone": "🤙🏽", + "camel": "🐫", + "camera": "📷", + "camera_with_flash": "📸", + "camping": "🏕", + "candle": "🕯", + "candy": "🍬", + "canned_food": "🥫", + "canoe": "🛶", + "card_file_box": "🗃", + "card_index": "📇", + "card_index_dividers": "🗂", + "carousel_horse": "🎠", + "carp_streamer": "🎏", + "carrot": "🥕", + "castle": "🏰", + "cat": "🐱", + "cat_face": "🐱", + "cat_face_with_tears_of_joy": "😹", + "cat_face_with_wry_smile": "😼", + "chains": "⛓", + "chair": "🪑", + "chart_decreasing": "📉", + "chart_increasing": "📈", + "chart_increasing_with_yen": "💹", + "cheese_wedge": "🧀", + "chequered_flag": "🏁", + "cherries": "🍒", + "cherry_blossom": "🌸", + "chess_pawn": "♟", + "chestnut": "🌰", + "chicken": "🐔", + "child": "🧒", + "child_dark_skin_tone": "🧒🏿", + "child_light_skin_tone": "🧒🏻", + "child_medium-dark_skin_tone": "🧒🏾", + "child_medium-light_skin_tone": "🧒🏼", + "child_medium_skin_tone": "🧒🏽", + "children_crossing": "🚸", + "chipmunk": "🐿", + "chocolate_bar": "🍫", + "chopsticks": "🥢", + "church": "⛪", + "cigarette": "🚬", + "cinema": "🎦", + "circled_m": "Ⓜ", + "circus_tent": "🎪", + "cityscape": "🏙", + "cityscape_at_dusk": "🌆", + "clamp": "🗜", + "clapper_board": "🎬", + "clapping_hands": "👏", + "clapping_hands_dark_skin_tone": "👏🏿", + "clapping_hands_light_skin_tone": "👏🏻", + "clapping_hands_medium-dark_skin_tone": "👏🏾", + "clapping_hands_medium-light_skin_tone": "👏🏼", + "clapping_hands_medium_skin_tone": "👏🏽", + "classical_building": "🏛", + "clinking_beer_mugs": "🍻", + "clinking_glasses": "🥂", + "clipboard": "📋", + "clockwise_vertical_arrows": "🔃", + "closed_book": "📕", + "closed_mailbox_with_lowered_flag": "📪", + "closed_mailbox_with_raised_flag": "📫", + "closed_umbrella": "🌂", + "cloud": "☁", + "cloud_with_lightning": "🌩", + "cloud_with_lightning_and_rain": "⛈", + "cloud_with_rain": "🌧", + "cloud_with_snow": "🌨", + "clown_face": "🤡", + "club_suit": "♣", + "clutch_bag": "👝", + "coat": "🧥", + "cocktail_glass": "🍸", + "coconut": "🥥", + "coffin": "⚰", + "cold_face": "🥶", + "collision": "💥", + "comet": "☄", + "compass": "🧭", + "computer_disk": "💽", + "computer_mouse": "🖱", + "confetti_ball": "🎊", + "confounded_face": "😖", + "confused_face": "😕", + "construction": "🚧", + "construction_worker": "👷", + "construction_worker_dark_skin_tone": "👷🏿", + "construction_worker_light_skin_tone": "👷🏻", + "construction_worker_medium-dark_skin_tone": "👷🏾", + "construction_worker_medium-light_skin_tone": "👷🏼", + "construction_worker_medium_skin_tone": "👷🏽", + "control_knobs": "🎛", + "convenience_store": "🏪", + "cooked_rice": "🍚", + "cookie": "🍪", + "cooking": "🍳", + "copyright": "©", + "couch_and_lamp": "🛋", + "counterclockwise_arrows_button": "🔄", + "couple_with_heart": "💑", + "couple_with_heart_man_man": "👨\u200d❤️\u200d👨", + "couple_with_heart_woman_man": "👩\u200d❤️\u200d👨", + "couple_with_heart_woman_woman": "👩\u200d❤️\u200d👩", + "cow": "🐮", + "cow_face": "🐮", + "cowboy_hat_face": "🤠", + "crab": "🦀", + "crayon": "🖍", + "credit_card": "💳", + "crescent_moon": "🌙", + "cricket": "🦗", + "cricket_game": "🏏", + "crocodile": "🐊", + "croissant": "🥐", + "cross_mark": "❌", + "cross_mark_button": "❎", + "crossed_fingers": "🤞", + "crossed_fingers_dark_skin_tone": "🤞🏿", + "crossed_fingers_light_skin_tone": "🤞🏻", + "crossed_fingers_medium-dark_skin_tone": "🤞🏾", + "crossed_fingers_medium-light_skin_tone": "🤞🏼", + "crossed_fingers_medium_skin_tone": "🤞🏽", + "crossed_flags": "🎌", + "crossed_swords": "⚔", + "crown": "👑", + "crying_cat_face": "😿", + "crying_face": "😢", + "crystal_ball": "🔮", + "cucumber": "🥒", + "cupcake": "🧁", + "cup_with_straw": "🥤", + "curling_stone": "🥌", + "curly_hair": "🦱", + "curly-haired_man": "👨\u200d🦱", + "curly-haired_woman": "👩\u200d🦱", + "curly_loop": "➰", + "currency_exchange": "💱", + "curry_rice": "🍛", + "custard": "🍮", + "customs": "🛃", + "cut_of_meat": "🥩", + "cyclone": "🌀", + "dagger": "🗡", + "dango": "🍡", + "dashing_away": "💨", + "deaf_person": "🧏", + "deciduous_tree": "🌳", + "deer": "🦌", + "delivery_truck": "🚚", + "department_store": "🏬", + "derelict_house": "🏚", + "desert": "🏜", + "desert_island": "🏝", + "desktop_computer": "🖥", + "detective": "🕵", + "detective_dark_skin_tone": "🕵🏿", + "detective_light_skin_tone": "🕵🏻", + "detective_medium-dark_skin_tone": "🕵🏾", + "detective_medium-light_skin_tone": "🕵🏼", + "detective_medium_skin_tone": "🕵🏽", + "diamond_suit": "♦", + "diamond_with_a_dot": "💠", + "dim_button": "🔅", + "direct_hit": "🎯", + "disappointed_face": "😞", + "diving_mask": "🤿", + "diya_lamp": "🪔", + "dizzy": "💫", + "dizzy_face": "😵", + "dna": "🧬", + "dog": "🐶", + "dog_face": "🐶", + "dollar_banknote": "💵", + "dolphin": "🐬", + "door": "🚪", + "dotted_six-pointed_star": "🔯", + "double_curly_loop": "➿", + "double_exclamation_mark": "‼", + "doughnut": "🍩", + "dove": "🕊", + "down-left_arrow": "↙", + "down-right_arrow": "↘", + "down_arrow": "⬇", + "downcast_face_with_sweat": "😓", + "downwards_button": "🔽", + "dragon": "🐉", + "dragon_face": "🐲", + "dress": "👗", + "drooling_face": "🤤", + "drop_of_blood": "🩸", + "droplet": "💧", + "drum": "🥁", + "duck": "🦆", + "dumpling": "🥟", + "dvd": "📀", + "e-mail": "📧", + "eagle": "🦅", + "ear": "👂", + "ear_dark_skin_tone": "👂🏿", + "ear_light_skin_tone": "👂🏻", + "ear_medium-dark_skin_tone": "👂🏾", + "ear_medium-light_skin_tone": "👂🏼", + "ear_medium_skin_tone": "👂🏽", + "ear_of_corn": "🌽", + "ear_with_hearing_aid": "🦻", + "egg": "🍳", + "eggplant": "🍆", + "eight-pointed_star": "✴", + "eight-spoked_asterisk": "✳", + "eight-thirty": "🕣", + "eight_o’clock": "🕗", + "eject_button": "⏏", + "electric_plug": "🔌", + "elephant": "🐘", + "eleven-thirty": "🕦", + "eleven_o’clock": "🕚", + "elf": "🧝", + "elf_dark_skin_tone": "🧝🏿", + "elf_light_skin_tone": "🧝🏻", + "elf_medium-dark_skin_tone": "🧝🏾", + "elf_medium-light_skin_tone": "🧝🏼", + "elf_medium_skin_tone": "🧝🏽", + "envelope": "✉", + "envelope_with_arrow": "📩", + "euro_banknote": "💶", + "evergreen_tree": "🌲", + "ewe": "🐑", + "exclamation_mark": "❗", + "exclamation_question_mark": "⁉", + "exploding_head": "🤯", + "expressionless_face": "😑", + "eye": "👁", + "eye_in_speech_bubble": "👁️\u200d🗨️", + "eyes": "👀", + "face_blowing_a_kiss": "😘", + "face_savoring_food": "😋", + "face_screaming_in_fear": "😱", + "face_vomiting": "🤮", + "face_with_hand_over_mouth": "🤭", + "face_with_head-bandage": "🤕", + "face_with_medical_mask": "😷", + "face_with_monocle": "🧐", + "face_with_open_mouth": "😮", + "face_with_raised_eyebrow": "🤨", + "face_with_rolling_eyes": "🙄", + "face_with_steam_from_nose": "😤", + "face_with_symbols_on_mouth": "🤬", + "face_with_tears_of_joy": "😂", + "face_with_thermometer": "🤒", + "face_with_tongue": "😛", + "face_without_mouth": "😶", + "factory": "🏭", + "fairy": "🧚", + "fairy_dark_skin_tone": "🧚🏿", + "fairy_light_skin_tone": "🧚🏻", + "fairy_medium-dark_skin_tone": "🧚🏾", + "fairy_medium-light_skin_tone": "🧚🏼", + "fairy_medium_skin_tone": "🧚🏽", + "falafel": "🧆", + "fallen_leaf": "🍂", + "family": "👪", + "family_man_boy": "👨\u200d👦", + "family_man_boy_boy": "👨\u200d👦\u200d👦", + "family_man_girl": "👨\u200d👧", + "family_man_girl_boy": "👨\u200d👧\u200d👦", + "family_man_girl_girl": "👨\u200d👧\u200d👧", + "family_man_man_boy": "👨\u200d👨\u200d👦", + "family_man_man_boy_boy": "👨\u200d👨\u200d👦\u200d👦", + "family_man_man_girl": "👨\u200d👨\u200d👧", + "family_man_man_girl_boy": "👨\u200d👨\u200d👧\u200d👦", + "family_man_man_girl_girl": "👨\u200d👨\u200d👧\u200d👧", + "family_man_woman_boy": "👨\u200d👩\u200d👦", + "family_man_woman_boy_boy": "👨\u200d👩\u200d👦\u200d👦", + "family_man_woman_girl": "👨\u200d👩\u200d👧", + "family_man_woman_girl_boy": "👨\u200d👩\u200d👧\u200d👦", + "family_man_woman_girl_girl": "👨\u200d👩\u200d👧\u200d👧", + "family_woman_boy": "👩\u200d👦", + "family_woman_boy_boy": "👩\u200d👦\u200d👦", + "family_woman_girl": "👩\u200d👧", + "family_woman_girl_boy": "👩\u200d👧\u200d👦", + "family_woman_girl_girl": "👩\u200d👧\u200d👧", + "family_woman_woman_boy": "👩\u200d👩\u200d👦", + "family_woman_woman_boy_boy": "👩\u200d👩\u200d👦\u200d👦", + "family_woman_woman_girl": "👩\u200d👩\u200d👧", + "family_woman_woman_girl_boy": "👩\u200d👩\u200d👧\u200d👦", + "family_woman_woman_girl_girl": "👩\u200d👩\u200d👧\u200d👧", + "fast-forward_button": "⏩", + "fast_down_button": "⏬", + "fast_reverse_button": "⏪", + "fast_up_button": "⏫", + "fax_machine": "📠", + "fearful_face": "😨", + "female_sign": "♀", + "ferris_wheel": "🎡", + "ferry": "⛴", + "field_hockey": "🏑", + "file_cabinet": "🗄", + "file_folder": "📁", + "film_frames": "🎞", + "film_projector": "📽", + "fire": "🔥", + "fire_extinguisher": "🧯", + "firecracker": "🧨", + "fire_engine": "🚒", + "fireworks": "🎆", + "first_quarter_moon": "🌓", + "first_quarter_moon_face": "🌛", + "fish": "🐟", + "fish_cake_with_swirl": "🍥", + "fishing_pole": "🎣", + "five-thirty": "🕠", + "five_o’clock": "🕔", + "flag_in_hole": "⛳", + "flamingo": "🦩", + "flashlight": "🔦", + "flat_shoe": "🥿", + "fleur-de-lis": "⚜", + "flexed_biceps": "💪", + "flexed_biceps_dark_skin_tone": "💪🏿", + "flexed_biceps_light_skin_tone": "💪🏻", + "flexed_biceps_medium-dark_skin_tone": "💪🏾", + "flexed_biceps_medium-light_skin_tone": "💪🏼", + "flexed_biceps_medium_skin_tone": "💪🏽", + "floppy_disk": "💾", + "flower_playing_cards": "🎴", + "flushed_face": "😳", + "flying_disc": "🥏", + "flying_saucer": "🛸", + "fog": "🌫", + "foggy": "🌁", + "folded_hands": "🙏", + "folded_hands_dark_skin_tone": "🙏🏿", + "folded_hands_light_skin_tone": "🙏🏻", + "folded_hands_medium-dark_skin_tone": "🙏🏾", + "folded_hands_medium-light_skin_tone": "🙏🏼", + "folded_hands_medium_skin_tone": "🙏🏽", + "foot": "🦶", + "footprints": "👣", + "fork_and_knife": "🍴", + "fork_and_knife_with_plate": "🍽", + "fortune_cookie": "🥠", + "fountain": "⛲", + "fountain_pen": "🖋", + "four-thirty": "🕟", + "four_leaf_clover": "🍀", + "four_o’clock": "🕓", + "fox_face": "🦊", + "framed_picture": "🖼", + "french_fries": "🍟", + "fried_shrimp": "🍤", + "frog_face": "🐸", + "front-facing_baby_chick": "🐥", + "frowning_face": "☹", + "frowning_face_with_open_mouth": "😦", + "fuel_pump": "⛽", + "full_moon": "🌕", + "full_moon_face": "🌝", + "funeral_urn": "⚱", + "game_die": "🎲", + "garlic": "🧄", + "gear": "⚙", + "gem_stone": "💎", + "genie": "🧞", + "ghost": "👻", + "giraffe": "🦒", + "girl": "👧", + "girl_dark_skin_tone": "👧🏿", + "girl_light_skin_tone": "👧🏻", + "girl_medium-dark_skin_tone": "👧🏾", + "girl_medium-light_skin_tone": "👧🏼", + "girl_medium_skin_tone": "👧🏽", + "glass_of_milk": "🥛", + "glasses": "👓", + "globe_showing_americas": "🌎", + "globe_showing_asia-australia": "🌏", + "globe_showing_europe-africa": "🌍", + "globe_with_meridians": "🌐", + "gloves": "🧤", + "glowing_star": "🌟", + "goal_net": "🥅", + "goat": "🐐", + "goblin": "👺", + "goggles": "🥽", + "gorilla": "🦍", + "graduation_cap": "🎓", + "grapes": "🍇", + "green_apple": "🍏", + "green_book": "📗", + "green_circle": "🟢", + "green_heart": "💚", + "green_salad": "🥗", + "green_square": "🟩", + "grimacing_face": "😬", + "grinning_cat_face": "😺", + "grinning_cat_face_with_smiling_eyes": "😸", + "grinning_face": "😀", + "grinning_face_with_big_eyes": "😃", + "grinning_face_with_smiling_eyes": "😄", + "grinning_face_with_sweat": "😅", + "grinning_squinting_face": "😆", + "growing_heart": "💗", + "guard": "💂", + "guard_dark_skin_tone": "💂🏿", + "guard_light_skin_tone": "💂🏻", + "guard_medium-dark_skin_tone": "💂🏾", + "guard_medium-light_skin_tone": "💂🏼", + "guard_medium_skin_tone": "💂🏽", + "guide_dog": "🦮", + "guitar": "🎸", + "hamburger": "🍔", + "hammer": "🔨", + "hammer_and_pick": "⚒", + "hammer_and_wrench": "🛠", + "hamster_face": "🐹", + "hand_with_fingers_splayed": "🖐", + "hand_with_fingers_splayed_dark_skin_tone": "🖐🏿", + "hand_with_fingers_splayed_light_skin_tone": "🖐🏻", + "hand_with_fingers_splayed_medium-dark_skin_tone": "🖐🏾", + "hand_with_fingers_splayed_medium-light_skin_tone": "🖐🏼", + "hand_with_fingers_splayed_medium_skin_tone": "🖐🏽", + "handbag": "👜", + "handshake": "🤝", + "hatching_chick": "🐣", + "headphone": "🎧", + "hear-no-evil_monkey": "🙉", + "heart_decoration": "💟", + "heart_suit": "♥", + "heart_with_arrow": "💘", + "heart_with_ribbon": "💝", + "heavy_check_mark": "✔", + "heavy_division_sign": "➗", + "heavy_dollar_sign": "💲", + "heavy_heart_exclamation": "❣", + "heavy_large_circle": "⭕", + "heavy_minus_sign": "➖", + "heavy_multiplication_x": "✖", + "heavy_plus_sign": "➕", + "hedgehog": "🦔", + "helicopter": "🚁", + "herb": "🌿", + "hibiscus": "🌺", + "high-heeled_shoe": "👠", + "high-speed_train": "🚄", + "high_voltage": "⚡", + "hiking_boot": "🥾", + "hindu_temple": "🛕", + "hippopotamus": "🦛", + "hole": "🕳", + "honey_pot": "🍯", + "honeybee": "🐝", + "horizontal_traffic_light": "🚥", + "horse": "🐴", + "horse_face": "🐴", + "horse_racing": "🏇", + "horse_racing_dark_skin_tone": "🏇🏿", + "horse_racing_light_skin_tone": "🏇🏻", + "horse_racing_medium-dark_skin_tone": "🏇🏾", + "horse_racing_medium-light_skin_tone": "🏇🏼", + "horse_racing_medium_skin_tone": "🏇🏽", + "hospital": "🏥", + "hot_beverage": "☕", + "hot_dog": "🌭", + "hot_face": "🥵", + "hot_pepper": "🌶", + "hot_springs": "♨", + "hotel": "🏨", + "hourglass_done": "⌛", + "hourglass_not_done": "⏳", + "house": "🏠", + "house_with_garden": "🏡", + "houses": "🏘", + "hugging_face": "🤗", + "hundred_points": "💯", + "hushed_face": "😯", + "ice": "🧊", + "ice_cream": "🍨", + "ice_hockey": "🏒", + "ice_skate": "⛸", + "inbox_tray": "📥", + "incoming_envelope": "📨", + "index_pointing_up": "☝", + "index_pointing_up_dark_skin_tone": "☝🏿", + "index_pointing_up_light_skin_tone": "☝🏻", + "index_pointing_up_medium-dark_skin_tone": "☝🏾", + "index_pointing_up_medium-light_skin_tone": "☝🏼", + "index_pointing_up_medium_skin_tone": "☝🏽", + "infinity": "♾", + "information": "ℹ", + "input_latin_letters": "🔤", + "input_latin_lowercase": "🔡", + "input_latin_uppercase": "🔠", + "input_numbers": "🔢", + "input_symbols": "🔣", + "jack-o-lantern": "🎃", + "jeans": "👖", + "jigsaw": "🧩", + "joker": "🃏", + "joystick": "🕹", + "kaaba": "🕋", + "kangaroo": "🦘", + "key": "🔑", + "keyboard": "⌨", + "keycap_#": "#️⃣", + "keycap_*": "*️⃣", + "keycap_0": "0️⃣", + "keycap_1": "1️⃣", + "keycap_10": "🔟", + "keycap_2": "2️⃣", + "keycap_3": "3️⃣", + "keycap_4": "4️⃣", + "keycap_5": "5️⃣", + "keycap_6": "6️⃣", + "keycap_7": "7️⃣", + "keycap_8": "8️⃣", + "keycap_9": "9️⃣", + "kick_scooter": "🛴", + "kimono": "👘", + "kiss": "💋", + "kiss_man_man": "👨\u200d❤️\u200d💋\u200d👨", + "kiss_mark": "💋", + "kiss_woman_man": "👩\u200d❤️\u200d💋\u200d👨", + "kiss_woman_woman": "👩\u200d❤️\u200d💋\u200d👩", + "kissing_cat_face": "😽", + "kissing_face": "😗", + "kissing_face_with_closed_eyes": "😚", + "kissing_face_with_smiling_eyes": "😙", + "kitchen_knife": "🔪", + "kite": "🪁", + "kiwi_fruit": "🥝", + "koala": "🐨", + "lab_coat": "🥼", + "label": "🏷", + "lacrosse": "🥍", + "lady_beetle": "🐞", + "laptop_computer": "💻", + "large_blue_diamond": "🔷", + "large_orange_diamond": "🔶", + "last_quarter_moon": "🌗", + "last_quarter_moon_face": "🌜", + "last_track_button": "⏮", + "latin_cross": "✝", + "leaf_fluttering_in_wind": "🍃", + "leafy_green": "🥬", + "ledger": "📒", + "left-facing_fist": "🤛", + "left-facing_fist_dark_skin_tone": "🤛🏿", + "left-facing_fist_light_skin_tone": "🤛🏻", + "left-facing_fist_medium-dark_skin_tone": "🤛🏾", + "left-facing_fist_medium-light_skin_tone": "🤛🏼", + "left-facing_fist_medium_skin_tone": "🤛🏽", + "left-right_arrow": "↔", + "left_arrow": "⬅", + "left_arrow_curving_right": "↪", + "left_luggage": "🛅", + "left_speech_bubble": "🗨", + "leg": "🦵", + "lemon": "🍋", + "leopard": "🐆", + "level_slider": "🎚", + "light_bulb": "💡", + "light_rail": "🚈", + "link": "🔗", + "linked_paperclips": "🖇", + "lion_face": "🦁", + "lipstick": "💄", + "litter_in_bin_sign": "🚮", + "lizard": "🦎", + "llama": "🦙", + "lobster": "🦞", + "locked": "🔒", + "locked_with_key": "🔐", + "locked_with_pen": "🔏", + "locomotive": "🚂", + "lollipop": "🍭", + "lotion_bottle": "🧴", + "loudly_crying_face": "😭", + "loudspeaker": "📢", + "love-you_gesture": "🤟", + "love-you_gesture_dark_skin_tone": "🤟🏿", + "love-you_gesture_light_skin_tone": "🤟🏻", + "love-you_gesture_medium-dark_skin_tone": "🤟🏾", + "love-you_gesture_medium-light_skin_tone": "🤟🏼", + "love-you_gesture_medium_skin_tone": "🤟🏽", + "love_hotel": "🏩", + "love_letter": "💌", + "luggage": "🧳", + "lying_face": "🤥", + "mage": "🧙", + "mage_dark_skin_tone": "🧙🏿", + "mage_light_skin_tone": "🧙🏻", + "mage_medium-dark_skin_tone": "🧙🏾", + "mage_medium-light_skin_tone": "🧙🏼", + "mage_medium_skin_tone": "🧙🏽", + "magnet": "🧲", + "magnifying_glass_tilted_left": "🔍", + "magnifying_glass_tilted_right": "🔎", + "mahjong_red_dragon": "🀄", + "male_sign": "♂", + "man": "👨", + "man_and_woman_holding_hands": "👫", + "man_artist": "👨\u200d🎨", + "man_artist_dark_skin_tone": "👨🏿\u200d🎨", + "man_artist_light_skin_tone": "👨🏻\u200d🎨", + "man_artist_medium-dark_skin_tone": "👨🏾\u200d🎨", + "man_artist_medium-light_skin_tone": "👨🏼\u200d🎨", + "man_artist_medium_skin_tone": "👨🏽\u200d🎨", + "man_astronaut": "👨\u200d🚀", + "man_astronaut_dark_skin_tone": "👨🏿\u200d🚀", + "man_astronaut_light_skin_tone": "👨🏻\u200d🚀", + "man_astronaut_medium-dark_skin_tone": "👨🏾\u200d🚀", + "man_astronaut_medium-light_skin_tone": "👨🏼\u200d🚀", + "man_astronaut_medium_skin_tone": "👨🏽\u200d🚀", + "man_biking": "🚴\u200d♂️", + "man_biking_dark_skin_tone": "🚴🏿\u200d♂️", + "man_biking_light_skin_tone": "🚴🏻\u200d♂️", + "man_biking_medium-dark_skin_tone": "🚴🏾\u200d♂️", + "man_biking_medium-light_skin_tone": "🚴🏼\u200d♂️", + "man_biking_medium_skin_tone": "🚴🏽\u200d♂️", + "man_bouncing_ball": "⛹️\u200d♂️", + "man_bouncing_ball_dark_skin_tone": "⛹🏿\u200d♂️", + "man_bouncing_ball_light_skin_tone": "⛹🏻\u200d♂️", + "man_bouncing_ball_medium-dark_skin_tone": "⛹🏾\u200d♂️", + "man_bouncing_ball_medium-light_skin_tone": "⛹🏼\u200d♂️", + "man_bouncing_ball_medium_skin_tone": "⛹🏽\u200d♂️", + "man_bowing": "🙇\u200d♂️", + "man_bowing_dark_skin_tone": "🙇🏿\u200d♂️", + "man_bowing_light_skin_tone": "🙇🏻\u200d♂️", + "man_bowing_medium-dark_skin_tone": "🙇🏾\u200d♂️", + "man_bowing_medium-light_skin_tone": "🙇🏼\u200d♂️", + "man_bowing_medium_skin_tone": "🙇🏽\u200d♂️", + "man_cartwheeling": "🤸\u200d♂️", + "man_cartwheeling_dark_skin_tone": "🤸🏿\u200d♂️", + "man_cartwheeling_light_skin_tone": "🤸🏻\u200d♂️", + "man_cartwheeling_medium-dark_skin_tone": "🤸🏾\u200d♂️", + "man_cartwheeling_medium-light_skin_tone": "🤸🏼\u200d♂️", + "man_cartwheeling_medium_skin_tone": "🤸🏽\u200d♂️", + "man_climbing": "🧗\u200d♂️", + "man_climbing_dark_skin_tone": "🧗🏿\u200d♂️", + "man_climbing_light_skin_tone": "🧗🏻\u200d♂️", + "man_climbing_medium-dark_skin_tone": "🧗🏾\u200d♂️", + "man_climbing_medium-light_skin_tone": "🧗🏼\u200d♂️", + "man_climbing_medium_skin_tone": "🧗🏽\u200d♂️", + "man_construction_worker": "👷\u200d♂️", + "man_construction_worker_dark_skin_tone": "👷🏿\u200d♂️", + "man_construction_worker_light_skin_tone": "👷🏻\u200d♂️", + "man_construction_worker_medium-dark_skin_tone": "👷🏾\u200d♂️", + "man_construction_worker_medium-light_skin_tone": "👷🏼\u200d♂️", + "man_construction_worker_medium_skin_tone": "👷🏽\u200d♂️", + "man_cook": "👨\u200d🍳", + "man_cook_dark_skin_tone": "👨🏿\u200d🍳", + "man_cook_light_skin_tone": "👨🏻\u200d🍳", + "man_cook_medium-dark_skin_tone": "👨🏾\u200d🍳", + "man_cook_medium-light_skin_tone": "👨🏼\u200d🍳", + "man_cook_medium_skin_tone": "👨🏽\u200d🍳", + "man_dancing": "🕺", + "man_dancing_dark_skin_tone": "🕺🏿", + "man_dancing_light_skin_tone": "🕺🏻", + "man_dancing_medium-dark_skin_tone": "🕺🏾", + "man_dancing_medium-light_skin_tone": "🕺🏼", + "man_dancing_medium_skin_tone": "🕺🏽", + "man_dark_skin_tone": "👨🏿", + "man_detective": "🕵️\u200d♂️", + "man_detective_dark_skin_tone": "🕵🏿\u200d♂️", + "man_detective_light_skin_tone": "🕵🏻\u200d♂️", + "man_detective_medium-dark_skin_tone": "🕵🏾\u200d♂️", + "man_detective_medium-light_skin_tone": "🕵🏼\u200d♂️", + "man_detective_medium_skin_tone": "🕵🏽\u200d♂️", + "man_elf": "🧝\u200d♂️", + "man_elf_dark_skin_tone": "🧝🏿\u200d♂️", + "man_elf_light_skin_tone": "🧝🏻\u200d♂️", + "man_elf_medium-dark_skin_tone": "🧝🏾\u200d♂️", + "man_elf_medium-light_skin_tone": "🧝🏼\u200d♂️", + "man_elf_medium_skin_tone": "🧝🏽\u200d♂️", + "man_facepalming": "🤦\u200d♂️", + "man_facepalming_dark_skin_tone": "🤦🏿\u200d♂️", + "man_facepalming_light_skin_tone": "🤦🏻\u200d♂️", + "man_facepalming_medium-dark_skin_tone": "🤦🏾\u200d♂️", + "man_facepalming_medium-light_skin_tone": "🤦🏼\u200d♂️", + "man_facepalming_medium_skin_tone": "🤦🏽\u200d♂️", + "man_factory_worker": "👨\u200d🏭", + "man_factory_worker_dark_skin_tone": "👨🏿\u200d🏭", + "man_factory_worker_light_skin_tone": "👨🏻\u200d🏭", + "man_factory_worker_medium-dark_skin_tone": "👨🏾\u200d🏭", + "man_factory_worker_medium-light_skin_tone": "👨🏼\u200d🏭", + "man_factory_worker_medium_skin_tone": "👨🏽\u200d🏭", + "man_fairy": "🧚\u200d♂️", + "man_fairy_dark_skin_tone": "🧚🏿\u200d♂️", + "man_fairy_light_skin_tone": "🧚🏻\u200d♂️", + "man_fairy_medium-dark_skin_tone": "🧚🏾\u200d♂️", + "man_fairy_medium-light_skin_tone": "🧚🏼\u200d♂️", + "man_fairy_medium_skin_tone": "🧚🏽\u200d♂️", + "man_farmer": "👨\u200d🌾", + "man_farmer_dark_skin_tone": "👨🏿\u200d🌾", + "man_farmer_light_skin_tone": "👨🏻\u200d🌾", + "man_farmer_medium-dark_skin_tone": "👨🏾\u200d🌾", + "man_farmer_medium-light_skin_tone": "👨🏼\u200d🌾", + "man_farmer_medium_skin_tone": "👨🏽\u200d🌾", + "man_firefighter": "👨\u200d🚒", + "man_firefighter_dark_skin_tone": "👨🏿\u200d🚒", + "man_firefighter_light_skin_tone": "👨🏻\u200d🚒", + "man_firefighter_medium-dark_skin_tone": "👨🏾\u200d🚒", + "man_firefighter_medium-light_skin_tone": "👨🏼\u200d🚒", + "man_firefighter_medium_skin_tone": "👨🏽\u200d🚒", + "man_frowning": "🙍\u200d♂️", + "man_frowning_dark_skin_tone": "🙍🏿\u200d♂️", + "man_frowning_light_skin_tone": "🙍🏻\u200d♂️", + "man_frowning_medium-dark_skin_tone": "🙍🏾\u200d♂️", + "man_frowning_medium-light_skin_tone": "🙍🏼\u200d♂️", + "man_frowning_medium_skin_tone": "🙍🏽\u200d♂️", + "man_genie": "🧞\u200d♂️", + "man_gesturing_no": "🙅\u200d♂️", + "man_gesturing_no_dark_skin_tone": "🙅🏿\u200d♂️", + "man_gesturing_no_light_skin_tone": "🙅🏻\u200d♂️", + "man_gesturing_no_medium-dark_skin_tone": "🙅🏾\u200d♂️", + "man_gesturing_no_medium-light_skin_tone": "🙅🏼\u200d♂️", + "man_gesturing_no_medium_skin_tone": "🙅🏽\u200d♂️", + "man_gesturing_ok": "🙆\u200d♂️", + "man_gesturing_ok_dark_skin_tone": "🙆🏿\u200d♂️", + "man_gesturing_ok_light_skin_tone": "🙆🏻\u200d♂️", + "man_gesturing_ok_medium-dark_skin_tone": "🙆🏾\u200d♂️", + "man_gesturing_ok_medium-light_skin_tone": "🙆🏼\u200d♂️", + "man_gesturing_ok_medium_skin_tone": "🙆🏽\u200d♂️", + "man_getting_haircut": "💇\u200d♂️", + "man_getting_haircut_dark_skin_tone": "💇🏿\u200d♂️", + "man_getting_haircut_light_skin_tone": "💇🏻\u200d♂️", + "man_getting_haircut_medium-dark_skin_tone": "💇🏾\u200d♂️", + "man_getting_haircut_medium-light_skin_tone": "💇🏼\u200d♂️", + "man_getting_haircut_medium_skin_tone": "💇🏽\u200d♂️", + "man_getting_massage": "💆\u200d♂️", + "man_getting_massage_dark_skin_tone": "💆🏿\u200d♂️", + "man_getting_massage_light_skin_tone": "💆🏻\u200d♂️", + "man_getting_massage_medium-dark_skin_tone": "💆🏾\u200d♂️", + "man_getting_massage_medium-light_skin_tone": "💆🏼\u200d♂️", + "man_getting_massage_medium_skin_tone": "💆🏽\u200d♂️", + "man_golfing": "🏌️\u200d♂️", + "man_golfing_dark_skin_tone": "🏌🏿\u200d♂️", + "man_golfing_light_skin_tone": "🏌🏻\u200d♂️", + "man_golfing_medium-dark_skin_tone": "🏌🏾\u200d♂️", + "man_golfing_medium-light_skin_tone": "🏌🏼\u200d♂️", + "man_golfing_medium_skin_tone": "🏌🏽\u200d♂️", + "man_guard": "💂\u200d♂️", + "man_guard_dark_skin_tone": "💂🏿\u200d♂️", + "man_guard_light_skin_tone": "💂🏻\u200d♂️", + "man_guard_medium-dark_skin_tone": "💂🏾\u200d♂️", + "man_guard_medium-light_skin_tone": "💂🏼\u200d♂️", + "man_guard_medium_skin_tone": "💂🏽\u200d♂️", + "man_health_worker": "👨\u200d⚕️", + "man_health_worker_dark_skin_tone": "👨🏿\u200d⚕️", + "man_health_worker_light_skin_tone": "👨🏻\u200d⚕️", + "man_health_worker_medium-dark_skin_tone": "👨🏾\u200d⚕️", + "man_health_worker_medium-light_skin_tone": "👨🏼\u200d⚕️", + "man_health_worker_medium_skin_tone": "👨🏽\u200d⚕️", + "man_in_lotus_position": "🧘\u200d♂️", + "man_in_lotus_position_dark_skin_tone": "🧘🏿\u200d♂️", + "man_in_lotus_position_light_skin_tone": "🧘🏻\u200d♂️", + "man_in_lotus_position_medium-dark_skin_tone": "🧘🏾\u200d♂️", + "man_in_lotus_position_medium-light_skin_tone": "🧘🏼\u200d♂️", + "man_in_lotus_position_medium_skin_tone": "🧘🏽\u200d♂️", + "man_in_manual_wheelchair": "👨\u200d🦽", + "man_in_motorized_wheelchair": "👨\u200d🦼", + "man_in_steamy_room": "🧖\u200d♂️", + "man_in_steamy_room_dark_skin_tone": "🧖🏿\u200d♂️", + "man_in_steamy_room_light_skin_tone": "🧖🏻\u200d♂️", + "man_in_steamy_room_medium-dark_skin_tone": "🧖🏾\u200d♂️", + "man_in_steamy_room_medium-light_skin_tone": "🧖🏼\u200d♂️", + "man_in_steamy_room_medium_skin_tone": "🧖🏽\u200d♂️", + "man_in_suit_levitating": "🕴", + "man_in_suit_levitating_dark_skin_tone": "🕴🏿", + "man_in_suit_levitating_light_skin_tone": "🕴🏻", + "man_in_suit_levitating_medium-dark_skin_tone": "🕴🏾", + "man_in_suit_levitating_medium-light_skin_tone": "🕴🏼", + "man_in_suit_levitating_medium_skin_tone": "🕴🏽", + "man_in_tuxedo": "🤵", + "man_in_tuxedo_dark_skin_tone": "🤵🏿", + "man_in_tuxedo_light_skin_tone": "🤵🏻", + "man_in_tuxedo_medium-dark_skin_tone": "🤵🏾", + "man_in_tuxedo_medium-light_skin_tone": "🤵🏼", + "man_in_tuxedo_medium_skin_tone": "🤵🏽", + "man_judge": "👨\u200d⚖️", + "man_judge_dark_skin_tone": "👨🏿\u200d⚖️", + "man_judge_light_skin_tone": "👨🏻\u200d⚖️", + "man_judge_medium-dark_skin_tone": "👨🏾\u200d⚖️", + "man_judge_medium-light_skin_tone": "👨🏼\u200d⚖️", + "man_judge_medium_skin_tone": "👨🏽\u200d⚖️", + "man_juggling": "🤹\u200d♂️", + "man_juggling_dark_skin_tone": "🤹🏿\u200d♂️", + "man_juggling_light_skin_tone": "🤹🏻\u200d♂️", + "man_juggling_medium-dark_skin_tone": "🤹🏾\u200d♂️", + "man_juggling_medium-light_skin_tone": "🤹🏼\u200d♂️", + "man_juggling_medium_skin_tone": "🤹🏽\u200d♂️", + "man_lifting_weights": "🏋️\u200d♂️", + "man_lifting_weights_dark_skin_tone": "🏋🏿\u200d♂️", + "man_lifting_weights_light_skin_tone": "🏋🏻\u200d♂️", + "man_lifting_weights_medium-dark_skin_tone": "🏋🏾\u200d♂️", + "man_lifting_weights_medium-light_skin_tone": "🏋🏼\u200d♂️", + "man_lifting_weights_medium_skin_tone": "🏋🏽\u200d♂️", + "man_light_skin_tone": "👨🏻", + "man_mage": "🧙\u200d♂️", + "man_mage_dark_skin_tone": "🧙🏿\u200d♂️", + "man_mage_light_skin_tone": "🧙🏻\u200d♂️", + "man_mage_medium-dark_skin_tone": "🧙🏾\u200d♂️", + "man_mage_medium-light_skin_tone": "🧙🏼\u200d♂️", + "man_mage_medium_skin_tone": "🧙🏽\u200d♂️", + "man_mechanic": "👨\u200d🔧", + "man_mechanic_dark_skin_tone": "👨🏿\u200d🔧", + "man_mechanic_light_skin_tone": "👨🏻\u200d🔧", + "man_mechanic_medium-dark_skin_tone": "👨🏾\u200d🔧", + "man_mechanic_medium-light_skin_tone": "👨🏼\u200d🔧", + "man_mechanic_medium_skin_tone": "👨🏽\u200d🔧", + "man_medium-dark_skin_tone": "👨🏾", + "man_medium-light_skin_tone": "👨🏼", + "man_medium_skin_tone": "👨🏽", + "man_mountain_biking": "🚵\u200d♂️", + "man_mountain_biking_dark_skin_tone": "🚵🏿\u200d♂️", + "man_mountain_biking_light_skin_tone": "🚵🏻\u200d♂️", + "man_mountain_biking_medium-dark_skin_tone": "🚵🏾\u200d♂️", + "man_mountain_biking_medium-light_skin_tone": "🚵🏼\u200d♂️", + "man_mountain_biking_medium_skin_tone": "🚵🏽\u200d♂️", + "man_office_worker": "👨\u200d💼", + "man_office_worker_dark_skin_tone": "👨🏿\u200d💼", + "man_office_worker_light_skin_tone": "👨🏻\u200d💼", + "man_office_worker_medium-dark_skin_tone": "👨🏾\u200d💼", + "man_office_worker_medium-light_skin_tone": "👨🏼\u200d💼", + "man_office_worker_medium_skin_tone": "👨🏽\u200d💼", + "man_pilot": "👨\u200d✈️", + "man_pilot_dark_skin_tone": "👨🏿\u200d✈️", + "man_pilot_light_skin_tone": "👨🏻\u200d✈️", + "man_pilot_medium-dark_skin_tone": "👨🏾\u200d✈️", + "man_pilot_medium-light_skin_tone": "👨🏼\u200d✈️", + "man_pilot_medium_skin_tone": "👨🏽\u200d✈️", + "man_playing_handball": "🤾\u200d♂️", + "man_playing_handball_dark_skin_tone": "🤾🏿\u200d♂️", + "man_playing_handball_light_skin_tone": "🤾🏻\u200d♂️", + "man_playing_handball_medium-dark_skin_tone": "🤾🏾\u200d♂️", + "man_playing_handball_medium-light_skin_tone": "🤾🏼\u200d♂️", + "man_playing_handball_medium_skin_tone": "🤾🏽\u200d♂️", + "man_playing_water_polo": "🤽\u200d♂️", + "man_playing_water_polo_dark_skin_tone": "🤽🏿\u200d♂️", + "man_playing_water_polo_light_skin_tone": "🤽🏻\u200d♂️", + "man_playing_water_polo_medium-dark_skin_tone": "🤽🏾\u200d♂️", + "man_playing_water_polo_medium-light_skin_tone": "🤽🏼\u200d♂️", + "man_playing_water_polo_medium_skin_tone": "🤽🏽\u200d♂️", + "man_police_officer": "👮\u200d♂️", + "man_police_officer_dark_skin_tone": "👮🏿\u200d♂️", + "man_police_officer_light_skin_tone": "👮🏻\u200d♂️", + "man_police_officer_medium-dark_skin_tone": "👮🏾\u200d♂️", + "man_police_officer_medium-light_skin_tone": "👮🏼\u200d♂️", + "man_police_officer_medium_skin_tone": "👮🏽\u200d♂️", + "man_pouting": "🙎\u200d♂️", + "man_pouting_dark_skin_tone": "🙎🏿\u200d♂️", + "man_pouting_light_skin_tone": "🙎🏻\u200d♂️", + "man_pouting_medium-dark_skin_tone": "🙎🏾\u200d♂️", + "man_pouting_medium-light_skin_tone": "🙎🏼\u200d♂️", + "man_pouting_medium_skin_tone": "🙎🏽\u200d♂️", + "man_raising_hand": "🙋\u200d♂️", + "man_raising_hand_dark_skin_tone": "🙋🏿\u200d♂️", + "man_raising_hand_light_skin_tone": "🙋🏻\u200d♂️", + "man_raising_hand_medium-dark_skin_tone": "🙋🏾\u200d♂️", + "man_raising_hand_medium-light_skin_tone": "🙋🏼\u200d♂️", + "man_raising_hand_medium_skin_tone": "🙋🏽\u200d♂️", + "man_rowing_boat": "🚣\u200d♂️", + "man_rowing_boat_dark_skin_tone": "🚣🏿\u200d♂️", + "man_rowing_boat_light_skin_tone": "🚣🏻\u200d♂️", + "man_rowing_boat_medium-dark_skin_tone": "🚣🏾\u200d♂️", + "man_rowing_boat_medium-light_skin_tone": "🚣🏼\u200d♂️", + "man_rowing_boat_medium_skin_tone": "🚣🏽\u200d♂️", + "man_running": "🏃\u200d♂️", + "man_running_dark_skin_tone": "🏃🏿\u200d♂️", + "man_running_light_skin_tone": "🏃🏻\u200d♂️", + "man_running_medium-dark_skin_tone": "🏃🏾\u200d♂️", + "man_running_medium-light_skin_tone": "🏃🏼\u200d♂️", + "man_running_medium_skin_tone": "🏃🏽\u200d♂️", + "man_scientist": "👨\u200d🔬", + "man_scientist_dark_skin_tone": "👨🏿\u200d🔬", + "man_scientist_light_skin_tone": "👨🏻\u200d🔬", + "man_scientist_medium-dark_skin_tone": "👨🏾\u200d🔬", + "man_scientist_medium-light_skin_tone": "👨🏼\u200d🔬", + "man_scientist_medium_skin_tone": "👨🏽\u200d🔬", + "man_shrugging": "🤷\u200d♂️", + "man_shrugging_dark_skin_tone": "🤷🏿\u200d♂️", + "man_shrugging_light_skin_tone": "🤷🏻\u200d♂️", + "man_shrugging_medium-dark_skin_tone": "🤷🏾\u200d♂️", + "man_shrugging_medium-light_skin_tone": "🤷🏼\u200d♂️", + "man_shrugging_medium_skin_tone": "🤷🏽\u200d♂️", + "man_singer": "👨\u200d🎤", + "man_singer_dark_skin_tone": "👨🏿\u200d🎤", + "man_singer_light_skin_tone": "👨🏻\u200d🎤", + "man_singer_medium-dark_skin_tone": "👨🏾\u200d🎤", + "man_singer_medium-light_skin_tone": "👨🏼\u200d🎤", + "man_singer_medium_skin_tone": "👨🏽\u200d🎤", + "man_student": "👨\u200d🎓", + "man_student_dark_skin_tone": "👨🏿\u200d🎓", + "man_student_light_skin_tone": "👨🏻\u200d🎓", + "man_student_medium-dark_skin_tone": "👨🏾\u200d🎓", + "man_student_medium-light_skin_tone": "👨🏼\u200d🎓", + "man_student_medium_skin_tone": "👨🏽\u200d🎓", + "man_surfing": "🏄\u200d♂️", + "man_surfing_dark_skin_tone": "🏄🏿\u200d♂️", + "man_surfing_light_skin_tone": "🏄🏻\u200d♂️", + "man_surfing_medium-dark_skin_tone": "🏄🏾\u200d♂️", + "man_surfing_medium-light_skin_tone": "🏄🏼\u200d♂️", + "man_surfing_medium_skin_tone": "🏄🏽\u200d♂️", + "man_swimming": "🏊\u200d♂️", + "man_swimming_dark_skin_tone": "🏊🏿\u200d♂️", + "man_swimming_light_skin_tone": "🏊🏻\u200d♂️", + "man_swimming_medium-dark_skin_tone": "🏊🏾\u200d♂️", + "man_swimming_medium-light_skin_tone": "🏊🏼\u200d♂️", + "man_swimming_medium_skin_tone": "🏊🏽\u200d♂️", + "man_teacher": "👨\u200d🏫", + "man_teacher_dark_skin_tone": "👨🏿\u200d🏫", + "man_teacher_light_skin_tone": "👨🏻\u200d🏫", + "man_teacher_medium-dark_skin_tone": "👨🏾\u200d🏫", + "man_teacher_medium-light_skin_tone": "👨🏼\u200d🏫", + "man_teacher_medium_skin_tone": "👨🏽\u200d🏫", + "man_technologist": "👨\u200d💻", + "man_technologist_dark_skin_tone": "👨🏿\u200d💻", + "man_technologist_light_skin_tone": "👨🏻\u200d💻", + "man_technologist_medium-dark_skin_tone": "👨🏾\u200d💻", + "man_technologist_medium-light_skin_tone": "👨🏼\u200d💻", + "man_technologist_medium_skin_tone": "👨🏽\u200d💻", + "man_tipping_hand": "💁\u200d♂️", + "man_tipping_hand_dark_skin_tone": "💁🏿\u200d♂️", + "man_tipping_hand_light_skin_tone": "💁🏻\u200d♂️", + "man_tipping_hand_medium-dark_skin_tone": "💁🏾\u200d♂️", + "man_tipping_hand_medium-light_skin_tone": "💁🏼\u200d♂️", + "man_tipping_hand_medium_skin_tone": "💁🏽\u200d♂️", + "man_vampire": "🧛\u200d♂️", + "man_vampire_dark_skin_tone": "🧛🏿\u200d♂️", + "man_vampire_light_skin_tone": "🧛🏻\u200d♂️", + "man_vampire_medium-dark_skin_tone": "🧛🏾\u200d♂️", + "man_vampire_medium-light_skin_tone": "🧛🏼\u200d♂️", + "man_vampire_medium_skin_tone": "🧛🏽\u200d♂️", + "man_walking": "🚶\u200d♂️", + "man_walking_dark_skin_tone": "🚶🏿\u200d♂️", + "man_walking_light_skin_tone": "🚶🏻\u200d♂️", + "man_walking_medium-dark_skin_tone": "🚶🏾\u200d♂️", + "man_walking_medium-light_skin_tone": "🚶🏼\u200d♂️", + "man_walking_medium_skin_tone": "🚶🏽\u200d♂️", + "man_wearing_turban": "👳\u200d♂️", + "man_wearing_turban_dark_skin_tone": "👳🏿\u200d♂️", + "man_wearing_turban_light_skin_tone": "👳🏻\u200d♂️", + "man_wearing_turban_medium-dark_skin_tone": "👳🏾\u200d♂️", + "man_wearing_turban_medium-light_skin_tone": "👳🏼\u200d♂️", + "man_wearing_turban_medium_skin_tone": "👳🏽\u200d♂️", + "man_with_probing_cane": "👨\u200d🦯", + "man_with_chinese_cap": "👲", + "man_with_chinese_cap_dark_skin_tone": "👲🏿", + "man_with_chinese_cap_light_skin_tone": "👲🏻", + "man_with_chinese_cap_medium-dark_skin_tone": "👲🏾", + "man_with_chinese_cap_medium-light_skin_tone": "👲🏼", + "man_with_chinese_cap_medium_skin_tone": "👲🏽", + "man_zombie": "🧟\u200d♂️", + "mango": "🥭", + "mantelpiece_clock": "🕰", + "manual_wheelchair": "🦽", + "man’s_shoe": "👞", + "map_of_japan": "🗾", + "maple_leaf": "🍁", + "martial_arts_uniform": "🥋", + "mate": "🧉", + "meat_on_bone": "🍖", + "mechanical_arm": "🦾", + "mechanical_leg": "🦿", + "medical_symbol": "⚕", + "megaphone": "📣", + "melon": "🍈", + "memo": "📝", + "men_with_bunny_ears": "👯\u200d♂️", + "men_wrestling": "🤼\u200d♂️", + "menorah": "🕎", + "men’s_room": "🚹", + "mermaid": "🧜\u200d♀️", + "mermaid_dark_skin_tone": "🧜🏿\u200d♀️", + "mermaid_light_skin_tone": "🧜🏻\u200d♀️", + "mermaid_medium-dark_skin_tone": "🧜🏾\u200d♀️", + "mermaid_medium-light_skin_tone": "🧜🏼\u200d♀️", + "mermaid_medium_skin_tone": "🧜🏽\u200d♀️", + "merman": "🧜\u200d♂️", + "merman_dark_skin_tone": "🧜🏿\u200d♂️", + "merman_light_skin_tone": "🧜🏻\u200d♂️", + "merman_medium-dark_skin_tone": "🧜🏾\u200d♂️", + "merman_medium-light_skin_tone": "🧜🏼\u200d♂️", + "merman_medium_skin_tone": "🧜🏽\u200d♂️", + "merperson": "🧜", + "merperson_dark_skin_tone": "🧜🏿", + "merperson_light_skin_tone": "🧜🏻", + "merperson_medium-dark_skin_tone": "🧜🏾", + "merperson_medium-light_skin_tone": "🧜🏼", + "merperson_medium_skin_tone": "🧜🏽", + "metro": "🚇", + "microbe": "🦠", + "microphone": "🎤", + "microscope": "🔬", + "middle_finger": "🖕", + "middle_finger_dark_skin_tone": "🖕🏿", + "middle_finger_light_skin_tone": "🖕🏻", + "middle_finger_medium-dark_skin_tone": "🖕🏾", + "middle_finger_medium-light_skin_tone": "🖕🏼", + "middle_finger_medium_skin_tone": "🖕🏽", + "military_medal": "🎖", + "milky_way": "🌌", + "minibus": "🚐", + "moai": "🗿", + "mobile_phone": "📱", + "mobile_phone_off": "📴", + "mobile_phone_with_arrow": "📲", + "money-mouth_face": "🤑", + "money_bag": "💰", + "money_with_wings": "💸", + "monkey": "🐒", + "monkey_face": "🐵", + "monorail": "🚝", + "moon_cake": "🥮", + "moon_viewing_ceremony": "🎑", + "mosque": "🕌", + "mosquito": "🦟", + "motor_boat": "🛥", + "motor_scooter": "🛵", + "motorcycle": "🏍", + "motorized_wheelchair": "🦼", + "motorway": "🛣", + "mount_fuji": "🗻", + "mountain": "⛰", + "mountain_cableway": "🚠", + "mountain_railway": "🚞", + "mouse": "🐭", + "mouse_face": "🐭", + "mouth": "👄", + "movie_camera": "🎥", + "mushroom": "🍄", + "musical_keyboard": "🎹", + "musical_note": "🎵", + "musical_notes": "🎶", + "musical_score": "🎼", + "muted_speaker": "🔇", + "nail_polish": "💅", + "nail_polish_dark_skin_tone": "💅🏿", + "nail_polish_light_skin_tone": "💅🏻", + "nail_polish_medium-dark_skin_tone": "💅🏾", + "nail_polish_medium-light_skin_tone": "💅🏼", + "nail_polish_medium_skin_tone": "💅🏽", + "name_badge": "📛", + "national_park": "🏞", + "nauseated_face": "🤢", + "nazar_amulet": "🧿", + "necktie": "👔", + "nerd_face": "🤓", + "neutral_face": "😐", + "new_moon": "🌑", + "new_moon_face": "🌚", + "newspaper": "📰", + "next_track_button": "⏭", + "night_with_stars": "🌃", + "nine-thirty": "🕤", + "nine_o’clock": "🕘", + "no_bicycles": "🚳", + "no_entry": "⛔", + "no_littering": "🚯", + "no_mobile_phones": "📵", + "no_one_under_eighteen": "🔞", + "no_pedestrians": "🚷", + "no_smoking": "🚭", + "non-potable_water": "🚱", + "nose": "👃", + "nose_dark_skin_tone": "👃🏿", + "nose_light_skin_tone": "👃🏻", + "nose_medium-dark_skin_tone": "👃🏾", + "nose_medium-light_skin_tone": "👃🏼", + "nose_medium_skin_tone": "👃🏽", + "notebook": "📓", + "notebook_with_decorative_cover": "📔", + "nut_and_bolt": "🔩", + "octopus": "🐙", + "oden": "🍢", + "office_building": "🏢", + "ogre": "👹", + "oil_drum": "🛢", + "old_key": "🗝", + "old_man": "👴", + "old_man_dark_skin_tone": "👴🏿", + "old_man_light_skin_tone": "👴🏻", + "old_man_medium-dark_skin_tone": "👴🏾", + "old_man_medium-light_skin_tone": "👴🏼", + "old_man_medium_skin_tone": "👴🏽", + "old_woman": "👵", + "old_woman_dark_skin_tone": "👵🏿", + "old_woman_light_skin_tone": "👵🏻", + "old_woman_medium-dark_skin_tone": "👵🏾", + "old_woman_medium-light_skin_tone": "👵🏼", + "old_woman_medium_skin_tone": "👵🏽", + "older_adult": "🧓", + "older_adult_dark_skin_tone": "🧓🏿", + "older_adult_light_skin_tone": "🧓🏻", + "older_adult_medium-dark_skin_tone": "🧓🏾", + "older_adult_medium-light_skin_tone": "🧓🏼", + "older_adult_medium_skin_tone": "🧓🏽", + "om": "🕉", + "oncoming_automobile": "🚘", + "oncoming_bus": "🚍", + "oncoming_fist": "👊", + "oncoming_fist_dark_skin_tone": "👊🏿", + "oncoming_fist_light_skin_tone": "👊🏻", + "oncoming_fist_medium-dark_skin_tone": "👊🏾", + "oncoming_fist_medium-light_skin_tone": "👊🏼", + "oncoming_fist_medium_skin_tone": "👊🏽", + "oncoming_police_car": "🚔", + "oncoming_taxi": "🚖", + "one-piece_swimsuit": "🩱", + "one-thirty": "🕜", + "one_o’clock": "🕐", + "onion": "🧅", + "open_book": "📖", + "open_file_folder": "📂", + "open_hands": "👐", + "open_hands_dark_skin_tone": "👐🏿", + "open_hands_light_skin_tone": "👐🏻", + "open_hands_medium-dark_skin_tone": "👐🏾", + "open_hands_medium-light_skin_tone": "👐🏼", + "open_hands_medium_skin_tone": "👐🏽", + "open_mailbox_with_lowered_flag": "📭", + "open_mailbox_with_raised_flag": "📬", + "optical_disk": "💿", + "orange_book": "📙", + "orange_circle": "🟠", + "orange_heart": "🧡", + "orange_square": "🟧", + "orangutan": "🦧", + "orthodox_cross": "☦", + "otter": "🦦", + "outbox_tray": "📤", + "owl": "🦉", + "ox": "🐂", + "oyster": "🦪", + "package": "📦", + "page_facing_up": "📄", + "page_with_curl": "📃", + "pager": "📟", + "paintbrush": "🖌", + "palm_tree": "🌴", + "palms_up_together": "🤲", + "palms_up_together_dark_skin_tone": "🤲🏿", + "palms_up_together_light_skin_tone": "🤲🏻", + "palms_up_together_medium-dark_skin_tone": "🤲🏾", + "palms_up_together_medium-light_skin_tone": "🤲🏼", + "palms_up_together_medium_skin_tone": "🤲🏽", + "pancakes": "🥞", + "panda_face": "🐼", + "paperclip": "📎", + "parrot": "🦜", + "part_alternation_mark": "〽", + "party_popper": "🎉", + "partying_face": "🥳", + "passenger_ship": "🛳", + "passport_control": "🛂", + "pause_button": "⏸", + "paw_prints": "🐾", + "peace_symbol": "☮", + "peach": "🍑", + "peacock": "🦚", + "peanuts": "🥜", + "pear": "🍐", + "pen": "🖊", + "pencil": "📝", + "penguin": "🐧", + "pensive_face": "😔", + "people_holding_hands": "🧑\u200d🤝\u200d🧑", + "people_with_bunny_ears": "👯", + "people_wrestling": "🤼", + "performing_arts": "🎭", + "persevering_face": "😣", + "person_biking": "🚴", + "person_biking_dark_skin_tone": "🚴🏿", + "person_biking_light_skin_tone": "🚴🏻", + "person_biking_medium-dark_skin_tone": "🚴🏾", + "person_biking_medium-light_skin_tone": "🚴🏼", + "person_biking_medium_skin_tone": "🚴🏽", + "person_bouncing_ball": "⛹", + "person_bouncing_ball_dark_skin_tone": "⛹🏿", + "person_bouncing_ball_light_skin_tone": "⛹🏻", + "person_bouncing_ball_medium-dark_skin_tone": "⛹🏾", + "person_bouncing_ball_medium-light_skin_tone": "⛹🏼", + "person_bouncing_ball_medium_skin_tone": "⛹🏽", + "person_bowing": "🙇", + "person_bowing_dark_skin_tone": "🙇🏿", + "person_bowing_light_skin_tone": "🙇🏻", + "person_bowing_medium-dark_skin_tone": "🙇🏾", + "person_bowing_medium-light_skin_tone": "🙇🏼", + "person_bowing_medium_skin_tone": "🙇🏽", + "person_cartwheeling": "🤸", + "person_cartwheeling_dark_skin_tone": "🤸🏿", + "person_cartwheeling_light_skin_tone": "🤸🏻", + "person_cartwheeling_medium-dark_skin_tone": "🤸🏾", + "person_cartwheeling_medium-light_skin_tone": "🤸🏼", + "person_cartwheeling_medium_skin_tone": "🤸🏽", + "person_climbing": "🧗", + "person_climbing_dark_skin_tone": "🧗🏿", + "person_climbing_light_skin_tone": "🧗🏻", + "person_climbing_medium-dark_skin_tone": "🧗🏾", + "person_climbing_medium-light_skin_tone": "🧗🏼", + "person_climbing_medium_skin_tone": "🧗🏽", + "person_facepalming": "🤦", + "person_facepalming_dark_skin_tone": "🤦🏿", + "person_facepalming_light_skin_tone": "🤦🏻", + "person_facepalming_medium-dark_skin_tone": "🤦🏾", + "person_facepalming_medium-light_skin_tone": "🤦🏼", + "person_facepalming_medium_skin_tone": "🤦🏽", + "person_fencing": "🤺", + "person_frowning": "🙍", + "person_frowning_dark_skin_tone": "🙍🏿", + "person_frowning_light_skin_tone": "🙍🏻", + "person_frowning_medium-dark_skin_tone": "🙍🏾", + "person_frowning_medium-light_skin_tone": "🙍🏼", + "person_frowning_medium_skin_tone": "🙍🏽", + "person_gesturing_no": "🙅", + "person_gesturing_no_dark_skin_tone": "🙅🏿", + "person_gesturing_no_light_skin_tone": "🙅🏻", + "person_gesturing_no_medium-dark_skin_tone": "🙅🏾", + "person_gesturing_no_medium-light_skin_tone": "🙅🏼", + "person_gesturing_no_medium_skin_tone": "🙅🏽", + "person_gesturing_ok": "🙆", + "person_gesturing_ok_dark_skin_tone": "🙆🏿", + "person_gesturing_ok_light_skin_tone": "🙆🏻", + "person_gesturing_ok_medium-dark_skin_tone": "🙆🏾", + "person_gesturing_ok_medium-light_skin_tone": "🙆🏼", + "person_gesturing_ok_medium_skin_tone": "🙆🏽", + "person_getting_haircut": "💇", + "person_getting_haircut_dark_skin_tone": "💇🏿", + "person_getting_haircut_light_skin_tone": "💇🏻", + "person_getting_haircut_medium-dark_skin_tone": "💇🏾", + "person_getting_haircut_medium-light_skin_tone": "💇🏼", + "person_getting_haircut_medium_skin_tone": "💇🏽", + "person_getting_massage": "💆", + "person_getting_massage_dark_skin_tone": "💆🏿", + "person_getting_massage_light_skin_tone": "💆🏻", + "person_getting_massage_medium-dark_skin_tone": "💆🏾", + "person_getting_massage_medium-light_skin_tone": "💆🏼", + "person_getting_massage_medium_skin_tone": "💆🏽", + "person_golfing": "🏌", + "person_golfing_dark_skin_tone": "🏌🏿", + "person_golfing_light_skin_tone": "🏌🏻", + "person_golfing_medium-dark_skin_tone": "🏌🏾", + "person_golfing_medium-light_skin_tone": "🏌🏼", + "person_golfing_medium_skin_tone": "🏌🏽", + "person_in_bed": "🛌", + "person_in_bed_dark_skin_tone": "🛌🏿", + "person_in_bed_light_skin_tone": "🛌🏻", + "person_in_bed_medium-dark_skin_tone": "🛌🏾", + "person_in_bed_medium-light_skin_tone": "🛌🏼", + "person_in_bed_medium_skin_tone": "🛌🏽", + "person_in_lotus_position": "🧘", + "person_in_lotus_position_dark_skin_tone": "🧘🏿", + "person_in_lotus_position_light_skin_tone": "🧘🏻", + "person_in_lotus_position_medium-dark_skin_tone": "🧘🏾", + "person_in_lotus_position_medium-light_skin_tone": "🧘🏼", + "person_in_lotus_position_medium_skin_tone": "🧘🏽", + "person_in_steamy_room": "🧖", + "person_in_steamy_room_dark_skin_tone": "🧖🏿", + "person_in_steamy_room_light_skin_tone": "🧖🏻", + "person_in_steamy_room_medium-dark_skin_tone": "🧖🏾", + "person_in_steamy_room_medium-light_skin_tone": "🧖🏼", + "person_in_steamy_room_medium_skin_tone": "🧖🏽", + "person_juggling": "🤹", + "person_juggling_dark_skin_tone": "🤹🏿", + "person_juggling_light_skin_tone": "🤹🏻", + "person_juggling_medium-dark_skin_tone": "🤹🏾", + "person_juggling_medium-light_skin_tone": "🤹🏼", + "person_juggling_medium_skin_tone": "🤹🏽", + "person_kneeling": "🧎", + "person_lifting_weights": "🏋", + "person_lifting_weights_dark_skin_tone": "🏋🏿", + "person_lifting_weights_light_skin_tone": "🏋🏻", + "person_lifting_weights_medium-dark_skin_tone": "🏋🏾", + "person_lifting_weights_medium-light_skin_tone": "🏋🏼", + "person_lifting_weights_medium_skin_tone": "🏋🏽", + "person_mountain_biking": "🚵", + "person_mountain_biking_dark_skin_tone": "🚵🏿", + "person_mountain_biking_light_skin_tone": "🚵🏻", + "person_mountain_biking_medium-dark_skin_tone": "🚵🏾", + "person_mountain_biking_medium-light_skin_tone": "🚵🏼", + "person_mountain_biking_medium_skin_tone": "🚵🏽", + "person_playing_handball": "🤾", + "person_playing_handball_dark_skin_tone": "🤾🏿", + "person_playing_handball_light_skin_tone": "🤾🏻", + "person_playing_handball_medium-dark_skin_tone": "🤾🏾", + "person_playing_handball_medium-light_skin_tone": "🤾🏼", + "person_playing_handball_medium_skin_tone": "🤾🏽", + "person_playing_water_polo": "🤽", + "person_playing_water_polo_dark_skin_tone": "🤽🏿", + "person_playing_water_polo_light_skin_tone": "🤽🏻", + "person_playing_water_polo_medium-dark_skin_tone": "🤽🏾", + "person_playing_water_polo_medium-light_skin_tone": "🤽🏼", + "person_playing_water_polo_medium_skin_tone": "🤽🏽", + "person_pouting": "🙎", + "person_pouting_dark_skin_tone": "🙎🏿", + "person_pouting_light_skin_tone": "🙎🏻", + "person_pouting_medium-dark_skin_tone": "🙎🏾", + "person_pouting_medium-light_skin_tone": "🙎🏼", + "person_pouting_medium_skin_tone": "🙎🏽", + "person_raising_hand": "🙋", + "person_raising_hand_dark_skin_tone": "🙋🏿", + "person_raising_hand_light_skin_tone": "🙋🏻", + "person_raising_hand_medium-dark_skin_tone": "🙋🏾", + "person_raising_hand_medium-light_skin_tone": "🙋🏼", + "person_raising_hand_medium_skin_tone": "🙋🏽", + "person_rowing_boat": "🚣", + "person_rowing_boat_dark_skin_tone": "🚣🏿", + "person_rowing_boat_light_skin_tone": "🚣🏻", + "person_rowing_boat_medium-dark_skin_tone": "🚣🏾", + "person_rowing_boat_medium-light_skin_tone": "🚣🏼", + "person_rowing_boat_medium_skin_tone": "🚣🏽", + "person_running": "🏃", + "person_running_dark_skin_tone": "🏃🏿", + "person_running_light_skin_tone": "🏃🏻", + "person_running_medium-dark_skin_tone": "🏃🏾", + "person_running_medium-light_skin_tone": "🏃🏼", + "person_running_medium_skin_tone": "🏃🏽", + "person_shrugging": "🤷", + "person_shrugging_dark_skin_tone": "🤷🏿", + "person_shrugging_light_skin_tone": "🤷🏻", + "person_shrugging_medium-dark_skin_tone": "🤷🏾", + "person_shrugging_medium-light_skin_tone": "🤷🏼", + "person_shrugging_medium_skin_tone": "🤷🏽", + "person_standing": "🧍", + "person_surfing": "🏄", + "person_surfing_dark_skin_tone": "🏄🏿", + "person_surfing_light_skin_tone": "🏄🏻", + "person_surfing_medium-dark_skin_tone": "🏄🏾", + "person_surfing_medium-light_skin_tone": "🏄🏼", + "person_surfing_medium_skin_tone": "🏄🏽", + "person_swimming": "🏊", + "person_swimming_dark_skin_tone": "🏊🏿", + "person_swimming_light_skin_tone": "🏊🏻", + "person_swimming_medium-dark_skin_tone": "🏊🏾", + "person_swimming_medium-light_skin_tone": "🏊🏼", + "person_swimming_medium_skin_tone": "🏊🏽", + "person_taking_bath": "🛀", + "person_taking_bath_dark_skin_tone": "🛀🏿", + "person_taking_bath_light_skin_tone": "🛀🏻", + "person_taking_bath_medium-dark_skin_tone": "🛀🏾", + "person_taking_bath_medium-light_skin_tone": "🛀🏼", + "person_taking_bath_medium_skin_tone": "🛀🏽", + "person_tipping_hand": "💁", + "person_tipping_hand_dark_skin_tone": "💁🏿", + "person_tipping_hand_light_skin_tone": "💁🏻", + "person_tipping_hand_medium-dark_skin_tone": "💁🏾", + "person_tipping_hand_medium-light_skin_tone": "💁🏼", + "person_tipping_hand_medium_skin_tone": "💁🏽", + "person_walking": "🚶", + "person_walking_dark_skin_tone": "🚶🏿", + "person_walking_light_skin_tone": "🚶🏻", + "person_walking_medium-dark_skin_tone": "🚶🏾", + "person_walking_medium-light_skin_tone": "🚶🏼", + "person_walking_medium_skin_tone": "🚶🏽", + "person_wearing_turban": "👳", + "person_wearing_turban_dark_skin_tone": "👳🏿", + "person_wearing_turban_light_skin_tone": "👳🏻", + "person_wearing_turban_medium-dark_skin_tone": "👳🏾", + "person_wearing_turban_medium-light_skin_tone": "👳🏼", + "person_wearing_turban_medium_skin_tone": "👳🏽", + "petri_dish": "🧫", + "pick": "⛏", + "pie": "🥧", + "pig": "🐷", + "pig_face": "🐷", + "pig_nose": "🐽", + "pile_of_poo": "💩", + "pill": "💊", + "pinching_hand": "🤏", + "pine_decoration": "🎍", + "pineapple": "🍍", + "ping_pong": "🏓", + "pirate_flag": "🏴\u200d☠️", + "pistol": "🔫", + "pizza": "🍕", + "place_of_worship": "🛐", + "play_button": "▶", + "play_or_pause_button": "⏯", + "pleading_face": "🥺", + "police_car": "🚓", + "police_car_light": "🚨", + "police_officer": "👮", + "police_officer_dark_skin_tone": "👮🏿", + "police_officer_light_skin_tone": "👮🏻", + "police_officer_medium-dark_skin_tone": "👮🏾", + "police_officer_medium-light_skin_tone": "👮🏼", + "police_officer_medium_skin_tone": "👮🏽", + "poodle": "🐩", + "pool_8_ball": "🎱", + "popcorn": "🍿", + "post_office": "🏣", + "postal_horn": "📯", + "postbox": "📮", + "pot_of_food": "🍲", + "potable_water": "🚰", + "potato": "🥔", + "poultry_leg": "🍗", + "pound_banknote": "💷", + "pouting_cat_face": "😾", + "pouting_face": "😡", + "prayer_beads": "📿", + "pregnant_woman": "🤰", + "pregnant_woman_dark_skin_tone": "🤰🏿", + "pregnant_woman_light_skin_tone": "🤰🏻", + "pregnant_woman_medium-dark_skin_tone": "🤰🏾", + "pregnant_woman_medium-light_skin_tone": "🤰🏼", + "pregnant_woman_medium_skin_tone": "🤰🏽", + "pretzel": "🥨", + "probing_cane": "🦯", + "prince": "🤴", + "prince_dark_skin_tone": "🤴🏿", + "prince_light_skin_tone": "🤴🏻", + "prince_medium-dark_skin_tone": "🤴🏾", + "prince_medium-light_skin_tone": "🤴🏼", + "prince_medium_skin_tone": "🤴🏽", + "princess": "👸", + "princess_dark_skin_tone": "👸🏿", + "princess_light_skin_tone": "👸🏻", + "princess_medium-dark_skin_tone": "👸🏾", + "princess_medium-light_skin_tone": "👸🏼", + "princess_medium_skin_tone": "👸🏽", + "printer": "🖨", + "prohibited": "🚫", + "purple_circle": "🟣", + "purple_heart": "💜", + "purple_square": "🟪", + "purse": "👛", + "pushpin": "📌", + "question_mark": "❓", + "rabbit": "🐰", + "rabbit_face": "🐰", + "raccoon": "🦝", + "racing_car": "🏎", + "radio": "📻", + "radio_button": "🔘", + "radioactive": "☢", + "railway_car": "🚃", + "railway_track": "🛤", + "rainbow": "🌈", + "rainbow_flag": "🏳️\u200d🌈", + "raised_back_of_hand": "🤚", + "raised_back_of_hand_dark_skin_tone": "🤚🏿", + "raised_back_of_hand_light_skin_tone": "🤚🏻", + "raised_back_of_hand_medium-dark_skin_tone": "🤚🏾", + "raised_back_of_hand_medium-light_skin_tone": "🤚🏼", + "raised_back_of_hand_medium_skin_tone": "🤚🏽", + "raised_fist": "✊", + "raised_fist_dark_skin_tone": "✊🏿", + "raised_fist_light_skin_tone": "✊🏻", + "raised_fist_medium-dark_skin_tone": "✊🏾", + "raised_fist_medium-light_skin_tone": "✊🏼", + "raised_fist_medium_skin_tone": "✊🏽", + "raised_hand": "✋", + "raised_hand_dark_skin_tone": "✋🏿", + "raised_hand_light_skin_tone": "✋🏻", + "raised_hand_medium-dark_skin_tone": "✋🏾", + "raised_hand_medium-light_skin_tone": "✋🏼", + "raised_hand_medium_skin_tone": "✋🏽", + "raising_hands": "🙌", + "raising_hands_dark_skin_tone": "🙌🏿", + "raising_hands_light_skin_tone": "🙌🏻", + "raising_hands_medium-dark_skin_tone": "🙌🏾", + "raising_hands_medium-light_skin_tone": "🙌🏼", + "raising_hands_medium_skin_tone": "🙌🏽", + "ram": "🐏", + "rat": "🐀", + "razor": "🪒", + "ringed_planet": "🪐", + "receipt": "🧾", + "record_button": "⏺", + "recycling_symbol": "♻", + "red_apple": "🍎", + "red_circle": "🔴", + "red_envelope": "🧧", + "red_hair": "🦰", + "red-haired_man": "👨\u200d🦰", + "red-haired_woman": "👩\u200d🦰", + "red_heart": "❤", + "red_paper_lantern": "🏮", + "red_square": "🟥", + "red_triangle_pointed_down": "🔻", + "red_triangle_pointed_up": "🔺", + "registered": "®", + "relieved_face": "😌", + "reminder_ribbon": "🎗", + "repeat_button": "🔁", + "repeat_single_button": "🔂", + "rescue_worker’s_helmet": "⛑", + "restroom": "🚻", + "reverse_button": "◀", + "revolving_hearts": "💞", + "rhinoceros": "🦏", + "ribbon": "🎀", + "rice_ball": "🍙", + "rice_cracker": "🍘", + "right-facing_fist": "🤜", + "right-facing_fist_dark_skin_tone": "🤜🏿", + "right-facing_fist_light_skin_tone": "🤜🏻", + "right-facing_fist_medium-dark_skin_tone": "🤜🏾", + "right-facing_fist_medium-light_skin_tone": "🤜🏼", + "right-facing_fist_medium_skin_tone": "🤜🏽", + "right_anger_bubble": "🗯", + "right_arrow": "➡", + "right_arrow_curving_down": "⤵", + "right_arrow_curving_left": "↩", + "right_arrow_curving_up": "⤴", + "ring": "💍", + "roasted_sweet_potato": "🍠", + "robot_face": "🤖", + "rocket": "🚀", + "roll_of_paper": "🧻", + "rolled-up_newspaper": "🗞", + "roller_coaster": "🎢", + "rolling_on_the_floor_laughing": "🤣", + "rooster": "🐓", + "rose": "🌹", + "rosette": "🏵", + "round_pushpin": "📍", + "rugby_football": "🏉", + "running_shirt": "🎽", + "running_shoe": "👟", + "sad_but_relieved_face": "😥", + "safety_pin": "🧷", + "safety_vest": "🦺", + "salt": "🧂", + "sailboat": "⛵", + "sake": "🍶", + "sandwich": "🥪", + "sari": "🥻", + "satellite": "📡", + "satellite_antenna": "📡", + "sauropod": "🦕", + "saxophone": "🎷", + "scarf": "🧣", + "school": "🏫", + "school_backpack": "🎒", + "scissors": "✂", + "scorpion": "🦂", + "scroll": "📜", + "seat": "💺", + "see-no-evil_monkey": "🙈", + "seedling": "🌱", + "selfie": "🤳", + "selfie_dark_skin_tone": "🤳🏿", + "selfie_light_skin_tone": "🤳🏻", + "selfie_medium-dark_skin_tone": "🤳🏾", + "selfie_medium-light_skin_tone": "🤳🏼", + "selfie_medium_skin_tone": "🤳🏽", + "service_dog": "🐕\u200d🦺", + "seven-thirty": "🕢", + "seven_o’clock": "🕖", + "shallow_pan_of_food": "🥘", + "shamrock": "☘", + "shark": "🦈", + "shaved_ice": "🍧", + "sheaf_of_rice": "🌾", + "shield": "🛡", + "shinto_shrine": "⛩", + "ship": "🚢", + "shooting_star": "🌠", + "shopping_bags": "🛍", + "shopping_cart": "🛒", + "shortcake": "🍰", + "shorts": "🩳", + "shower": "🚿", + "shrimp": "🦐", + "shuffle_tracks_button": "🔀", + "shushing_face": "🤫", + "sign_of_the_horns": "🤘", + "sign_of_the_horns_dark_skin_tone": "🤘🏿", + "sign_of_the_horns_light_skin_tone": "🤘🏻", + "sign_of_the_horns_medium-dark_skin_tone": "🤘🏾", + "sign_of_the_horns_medium-light_skin_tone": "🤘🏼", + "sign_of_the_horns_medium_skin_tone": "🤘🏽", + "six-thirty": "🕡", + "six_o’clock": "🕕", + "skateboard": "🛹", + "skier": "⛷", + "skis": "🎿", + "skull": "💀", + "skull_and_crossbones": "☠", + "skunk": "🦨", + "sled": "🛷", + "sleeping_face": "😴", + "sleepy_face": "😪", + "slightly_frowning_face": "🙁", + "slightly_smiling_face": "🙂", + "slot_machine": "🎰", + "sloth": "🦥", + "small_airplane": "🛩", + "small_blue_diamond": "🔹", + "small_orange_diamond": "🔸", + "smiling_cat_face_with_heart-eyes": "😻", + "smiling_face": "☺", + "smiling_face_with_halo": "😇", + "smiling_face_with_3_hearts": "🥰", + "smiling_face_with_heart-eyes": "😍", + "smiling_face_with_horns": "😈", + "smiling_face_with_smiling_eyes": "😊", + "smiling_face_with_sunglasses": "😎", + "smirking_face": "😏", + "snail": "🐌", + "snake": "🐍", + "sneezing_face": "🤧", + "snow-capped_mountain": "🏔", + "snowboarder": "🏂", + "snowboarder_dark_skin_tone": "🏂🏿", + "snowboarder_light_skin_tone": "🏂🏻", + "snowboarder_medium-dark_skin_tone": "🏂🏾", + "snowboarder_medium-light_skin_tone": "🏂🏼", + "snowboarder_medium_skin_tone": "🏂🏽", + "snowflake": "❄", + "snowman": "☃", + "snowman_without_snow": "⛄", + "soap": "🧼", + "soccer_ball": "⚽", + "socks": "🧦", + "softball": "🥎", + "soft_ice_cream": "🍦", + "spade_suit": "♠", + "spaghetti": "🍝", + "sparkle": "❇", + "sparkler": "🎇", + "sparkles": "✨", + "sparkling_heart": "💖", + "speak-no-evil_monkey": "🙊", + "speaker_high_volume": "🔊", + "speaker_low_volume": "🔈", + "speaker_medium_volume": "🔉", + "speaking_head": "🗣", + "speech_balloon": "💬", + "speedboat": "🚤", + "spider": "🕷", + "spider_web": "🕸", + "spiral_calendar": "🗓", + "spiral_notepad": "🗒", + "spiral_shell": "🐚", + "spoon": "🥄", + "sponge": "🧽", + "sport_utility_vehicle": "🚙", + "sports_medal": "🏅", + "spouting_whale": "🐳", + "squid": "🦑", + "squinting_face_with_tongue": "😝", + "stadium": "🏟", + "star-struck": "🤩", + "star_and_crescent": "☪", + "star_of_david": "✡", + "station": "🚉", + "steaming_bowl": "🍜", + "stethoscope": "🩺", + "stop_button": "⏹", + "stop_sign": "🛑", + "stopwatch": "⏱", + "straight_ruler": "📏", + "strawberry": "🍓", + "studio_microphone": "🎙", + "stuffed_flatbread": "🥙", + "sun": "☀", + "sun_behind_cloud": "⛅", + "sun_behind_large_cloud": "🌥", + "sun_behind_rain_cloud": "🌦", + "sun_behind_small_cloud": "🌤", + "sun_with_face": "🌞", + "sunflower": "🌻", + "sunglasses": "😎", + "sunrise": "🌅", + "sunrise_over_mountains": "🌄", + "sunset": "🌇", + "superhero": "🦸", + "supervillain": "🦹", + "sushi": "🍣", + "suspension_railway": "🚟", + "swan": "🦢", + "sweat_droplets": "💦", + "synagogue": "🕍", + "syringe": "💉", + "t-shirt": "👕", + "taco": "🌮", + "takeout_box": "🥡", + "tanabata_tree": "🎋", + "tangerine": "🍊", + "taxi": "🚕", + "teacup_without_handle": "🍵", + "tear-off_calendar": "📆", + "teddy_bear": "🧸", + "telephone": "☎", + "telephone_receiver": "📞", + "telescope": "🔭", + "television": "📺", + "ten-thirty": "🕥", + "ten_o’clock": "🕙", + "tennis": "🎾", + "tent": "⛺", + "test_tube": "🧪", + "thermometer": "🌡", + "thinking_face": "🤔", + "thought_balloon": "💭", + "thread": "🧵", + "three-thirty": "🕞", + "three_o’clock": "🕒", + "thumbs_down": "👎", + "thumbs_down_dark_skin_tone": "👎🏿", + "thumbs_down_light_skin_tone": "👎🏻", + "thumbs_down_medium-dark_skin_tone": "👎🏾", + "thumbs_down_medium-light_skin_tone": "👎🏼", + "thumbs_down_medium_skin_tone": "👎🏽", + "thumbs_up": "👍", + "thumbs_up_dark_skin_tone": "👍🏿", + "thumbs_up_light_skin_tone": "👍🏻", + "thumbs_up_medium-dark_skin_tone": "👍🏾", + "thumbs_up_medium-light_skin_tone": "👍🏼", + "thumbs_up_medium_skin_tone": "👍🏽", + "ticket": "🎫", + "tiger": "🐯", + "tiger_face": "🐯", + "timer_clock": "⏲", + "tired_face": "😫", + "toolbox": "🧰", + "toilet": "🚽", + "tomato": "🍅", + "tongue": "👅", + "tooth": "🦷", + "top_hat": "🎩", + "tornado": "🌪", + "trackball": "🖲", + "tractor": "🚜", + "trade_mark": "™", + "train": "🚋", + "tram": "🚊", + "tram_car": "🚋", + "triangular_flag": "🚩", + "triangular_ruler": "📐", + "trident_emblem": "🔱", + "trolleybus": "🚎", + "trophy": "🏆", + "tropical_drink": "🍹", + "tropical_fish": "🐠", + "trumpet": "🎺", + "tulip": "🌷", + "tumbler_glass": "🥃", + "turtle": "🐢", + "twelve-thirty": "🕧", + "twelve_o’clock": "🕛", + "two-hump_camel": "🐫", + "two-thirty": "🕝", + "two_hearts": "💕", + "two_men_holding_hands": "👬", + "two_o’clock": "🕑", + "two_women_holding_hands": "👭", + "umbrella": "☂", + "umbrella_on_ground": "⛱", + "umbrella_with_rain_drops": "☔", + "unamused_face": "😒", + "unicorn_face": "🦄", + "unlocked": "🔓", + "up-down_arrow": "↕", + "up-left_arrow": "↖", + "up-right_arrow": "↗", + "up_arrow": "⬆", + "upside-down_face": "🙃", + "upwards_button": "🔼", + "vampire": "🧛", + "vampire_dark_skin_tone": "🧛🏿", + "vampire_light_skin_tone": "🧛🏻", + "vampire_medium-dark_skin_tone": "🧛🏾", + "vampire_medium-light_skin_tone": "🧛🏼", + "vampire_medium_skin_tone": "🧛🏽", + "vertical_traffic_light": "🚦", + "vibration_mode": "📳", + "victory_hand": "✌", + "victory_hand_dark_skin_tone": "✌🏿", + "victory_hand_light_skin_tone": "✌🏻", + "victory_hand_medium-dark_skin_tone": "✌🏾", + "victory_hand_medium-light_skin_tone": "✌🏼", + "victory_hand_medium_skin_tone": "✌🏽", + "video_camera": "📹", + "video_game": "🎮", + "videocassette": "📼", + "violin": "🎻", + "volcano": "🌋", + "volleyball": "🏐", + "vulcan_salute": "🖖", + "vulcan_salute_dark_skin_tone": "🖖🏿", + "vulcan_salute_light_skin_tone": "🖖🏻", + "vulcan_salute_medium-dark_skin_tone": "🖖🏾", + "vulcan_salute_medium-light_skin_tone": "🖖🏼", + "vulcan_salute_medium_skin_tone": "🖖🏽", + "waffle": "🧇", + "waning_crescent_moon": "🌘", + "waning_gibbous_moon": "🌖", + "warning": "⚠", + "wastebasket": "🗑", + "watch": "⌚", + "water_buffalo": "🐃", + "water_closet": "🚾", + "water_wave": "🌊", + "watermelon": "🍉", + "waving_hand": "👋", + "waving_hand_dark_skin_tone": "👋🏿", + "waving_hand_light_skin_tone": "👋🏻", + "waving_hand_medium-dark_skin_tone": "👋🏾", + "waving_hand_medium-light_skin_tone": "👋🏼", + "waving_hand_medium_skin_tone": "👋🏽", + "wavy_dash": "〰", + "waxing_crescent_moon": "🌒", + "waxing_gibbous_moon": "🌔", + "weary_cat_face": "🙀", + "weary_face": "😩", + "wedding": "💒", + "whale": "🐳", + "wheel_of_dharma": "☸", + "wheelchair_symbol": "♿", + "white_circle": "⚪", + "white_exclamation_mark": "❕", + "white_flag": "🏳", + "white_flower": "💮", + "white_hair": "🦳", + "white-haired_man": "👨\u200d🦳", + "white-haired_woman": "👩\u200d🦳", + "white_heart": "🤍", + "white_heavy_check_mark": "✅", + "white_large_square": "⬜", + "white_medium-small_square": "◽", + "white_medium_square": "◻", + "white_medium_star": "⭐", + "white_question_mark": "❔", + "white_small_square": "▫", + "white_square_button": "🔳", + "wilted_flower": "🥀", + "wind_chime": "🎐", + "wind_face": "🌬", + "wine_glass": "🍷", + "winking_face": "😉", + "winking_face_with_tongue": "😜", + "wolf_face": "🐺", + "woman": "👩", + "woman_artist": "👩\u200d🎨", + "woman_artist_dark_skin_tone": "👩🏿\u200d🎨", + "woman_artist_light_skin_tone": "👩🏻\u200d🎨", + "woman_artist_medium-dark_skin_tone": "👩🏾\u200d🎨", + "woman_artist_medium-light_skin_tone": "👩🏼\u200d🎨", + "woman_artist_medium_skin_tone": "👩🏽\u200d🎨", + "woman_astronaut": "👩\u200d🚀", + "woman_astronaut_dark_skin_tone": "👩🏿\u200d🚀", + "woman_astronaut_light_skin_tone": "👩🏻\u200d🚀", + "woman_astronaut_medium-dark_skin_tone": "👩🏾\u200d🚀", + "woman_astronaut_medium-light_skin_tone": "👩🏼\u200d🚀", + "woman_astronaut_medium_skin_tone": "👩🏽\u200d🚀", + "woman_biking": "🚴\u200d♀️", + "woman_biking_dark_skin_tone": "🚴🏿\u200d♀️", + "woman_biking_light_skin_tone": "🚴🏻\u200d♀️", + "woman_biking_medium-dark_skin_tone": "🚴🏾\u200d♀️", + "woman_biking_medium-light_skin_tone": "🚴🏼\u200d♀️", + "woman_biking_medium_skin_tone": "🚴🏽\u200d♀️", + "woman_bouncing_ball": "⛹️\u200d♀️", + "woman_bouncing_ball_dark_skin_tone": "⛹🏿\u200d♀️", + "woman_bouncing_ball_light_skin_tone": "⛹🏻\u200d♀️", + "woman_bouncing_ball_medium-dark_skin_tone": "⛹🏾\u200d♀️", + "woman_bouncing_ball_medium-light_skin_tone": "⛹🏼\u200d♀️", + "woman_bouncing_ball_medium_skin_tone": "⛹🏽\u200d♀️", + "woman_bowing": "🙇\u200d♀️", + "woman_bowing_dark_skin_tone": "🙇🏿\u200d♀️", + "woman_bowing_light_skin_tone": "🙇🏻\u200d♀️", + "woman_bowing_medium-dark_skin_tone": "🙇🏾\u200d♀️", + "woman_bowing_medium-light_skin_tone": "🙇🏼\u200d♀️", + "woman_bowing_medium_skin_tone": "🙇🏽\u200d♀️", + "woman_cartwheeling": "🤸\u200d♀️", + "woman_cartwheeling_dark_skin_tone": "🤸🏿\u200d♀️", + "woman_cartwheeling_light_skin_tone": "🤸🏻\u200d♀️", + "woman_cartwheeling_medium-dark_skin_tone": "🤸🏾\u200d♀️", + "woman_cartwheeling_medium-light_skin_tone": "🤸🏼\u200d♀️", + "woman_cartwheeling_medium_skin_tone": "🤸🏽\u200d♀️", + "woman_climbing": "🧗\u200d♀️", + "woman_climbing_dark_skin_tone": "🧗🏿\u200d♀️", + "woman_climbing_light_skin_tone": "🧗🏻\u200d♀️", + "woman_climbing_medium-dark_skin_tone": "🧗🏾\u200d♀️", + "woman_climbing_medium-light_skin_tone": "🧗🏼\u200d♀️", + "woman_climbing_medium_skin_tone": "🧗🏽\u200d♀️", + "woman_construction_worker": "👷\u200d♀️", + "woman_construction_worker_dark_skin_tone": "👷🏿\u200d♀️", + "woman_construction_worker_light_skin_tone": "👷🏻\u200d♀️", + "woman_construction_worker_medium-dark_skin_tone": "👷🏾\u200d♀️", + "woman_construction_worker_medium-light_skin_tone": "👷🏼\u200d♀️", + "woman_construction_worker_medium_skin_tone": "👷🏽\u200d♀️", + "woman_cook": "👩\u200d🍳", + "woman_cook_dark_skin_tone": "👩🏿\u200d🍳", + "woman_cook_light_skin_tone": "👩🏻\u200d🍳", + "woman_cook_medium-dark_skin_tone": "👩🏾\u200d🍳", + "woman_cook_medium-light_skin_tone": "👩🏼\u200d🍳", + "woman_cook_medium_skin_tone": "👩🏽\u200d🍳", + "woman_dancing": "💃", + "woman_dancing_dark_skin_tone": "💃🏿", + "woman_dancing_light_skin_tone": "💃🏻", + "woman_dancing_medium-dark_skin_tone": "💃🏾", + "woman_dancing_medium-light_skin_tone": "💃🏼", + "woman_dancing_medium_skin_tone": "💃🏽", + "woman_dark_skin_tone": "👩🏿", + "woman_detective": "🕵️\u200d♀️", + "woman_detective_dark_skin_tone": "🕵🏿\u200d♀️", + "woman_detective_light_skin_tone": "🕵🏻\u200d♀️", + "woman_detective_medium-dark_skin_tone": "🕵🏾\u200d♀️", + "woman_detective_medium-light_skin_tone": "🕵🏼\u200d♀️", + "woman_detective_medium_skin_tone": "🕵🏽\u200d♀️", + "woman_elf": "🧝\u200d♀️", + "woman_elf_dark_skin_tone": "🧝🏿\u200d♀️", + "woman_elf_light_skin_tone": "🧝🏻\u200d♀️", + "woman_elf_medium-dark_skin_tone": "🧝🏾\u200d♀️", + "woman_elf_medium-light_skin_tone": "🧝🏼\u200d♀️", + "woman_elf_medium_skin_tone": "🧝🏽\u200d♀️", + "woman_facepalming": "🤦\u200d♀️", + "woman_facepalming_dark_skin_tone": "🤦🏿\u200d♀️", + "woman_facepalming_light_skin_tone": "🤦🏻\u200d♀️", + "woman_facepalming_medium-dark_skin_tone": "🤦🏾\u200d♀️", + "woman_facepalming_medium-light_skin_tone": "🤦🏼\u200d♀️", + "woman_facepalming_medium_skin_tone": "🤦🏽\u200d♀️", + "woman_factory_worker": "👩\u200d🏭", + "woman_factory_worker_dark_skin_tone": "👩🏿\u200d🏭", + "woman_factory_worker_light_skin_tone": "👩🏻\u200d🏭", + "woman_factory_worker_medium-dark_skin_tone": "👩🏾\u200d🏭", + "woman_factory_worker_medium-light_skin_tone": "👩🏼\u200d🏭", + "woman_factory_worker_medium_skin_tone": "👩🏽\u200d🏭", + "woman_fairy": "🧚\u200d♀️", + "woman_fairy_dark_skin_tone": "🧚🏿\u200d♀️", + "woman_fairy_light_skin_tone": "🧚🏻\u200d♀️", + "woman_fairy_medium-dark_skin_tone": "🧚🏾\u200d♀️", + "woman_fairy_medium-light_skin_tone": "🧚🏼\u200d♀️", + "woman_fairy_medium_skin_tone": "🧚🏽\u200d♀️", + "woman_farmer": "👩\u200d🌾", + "woman_farmer_dark_skin_tone": "👩🏿\u200d🌾", + "woman_farmer_light_skin_tone": "👩🏻\u200d🌾", + "woman_farmer_medium-dark_skin_tone": "👩🏾\u200d🌾", + "woman_farmer_medium-light_skin_tone": "👩🏼\u200d🌾", + "woman_farmer_medium_skin_tone": "👩🏽\u200d🌾", + "woman_firefighter": "👩\u200d🚒", + "woman_firefighter_dark_skin_tone": "👩🏿\u200d🚒", + "woman_firefighter_light_skin_tone": "👩🏻\u200d🚒", + "woman_firefighter_medium-dark_skin_tone": "👩🏾\u200d🚒", + "woman_firefighter_medium-light_skin_tone": "👩🏼\u200d🚒", + "woman_firefighter_medium_skin_tone": "👩🏽\u200d🚒", + "woman_frowning": "🙍\u200d♀️", + "woman_frowning_dark_skin_tone": "🙍🏿\u200d♀️", + "woman_frowning_light_skin_tone": "🙍🏻\u200d♀️", + "woman_frowning_medium-dark_skin_tone": "🙍🏾\u200d♀️", + "woman_frowning_medium-light_skin_tone": "🙍🏼\u200d♀️", + "woman_frowning_medium_skin_tone": "🙍🏽\u200d♀️", + "woman_genie": "🧞\u200d♀️", + "woman_gesturing_no": "🙅\u200d♀️", + "woman_gesturing_no_dark_skin_tone": "🙅🏿\u200d♀️", + "woman_gesturing_no_light_skin_tone": "🙅🏻\u200d♀️", + "woman_gesturing_no_medium-dark_skin_tone": "🙅🏾\u200d♀️", + "woman_gesturing_no_medium-light_skin_tone": "🙅🏼\u200d♀️", + "woman_gesturing_no_medium_skin_tone": "🙅🏽\u200d♀️", + "woman_gesturing_ok": "🙆\u200d♀️", + "woman_gesturing_ok_dark_skin_tone": "🙆🏿\u200d♀️", + "woman_gesturing_ok_light_skin_tone": "🙆🏻\u200d♀️", + "woman_gesturing_ok_medium-dark_skin_tone": "🙆🏾\u200d♀️", + "woman_gesturing_ok_medium-light_skin_tone": "🙆🏼\u200d♀️", + "woman_gesturing_ok_medium_skin_tone": "🙆🏽\u200d♀️", + "woman_getting_haircut": "💇\u200d♀️", + "woman_getting_haircut_dark_skin_tone": "💇🏿\u200d♀️", + "woman_getting_haircut_light_skin_tone": "💇🏻\u200d♀️", + "woman_getting_haircut_medium-dark_skin_tone": "💇🏾\u200d♀️", + "woman_getting_haircut_medium-light_skin_tone": "💇🏼\u200d♀️", + "woman_getting_haircut_medium_skin_tone": "💇🏽\u200d♀️", + "woman_getting_massage": "💆\u200d♀️", + "woman_getting_massage_dark_skin_tone": "💆🏿\u200d♀️", + "woman_getting_massage_light_skin_tone": "💆🏻\u200d♀️", + "woman_getting_massage_medium-dark_skin_tone": "💆🏾\u200d♀️", + "woman_getting_massage_medium-light_skin_tone": "💆🏼\u200d♀️", + "woman_getting_massage_medium_skin_tone": "💆🏽\u200d♀️", + "woman_golfing": "🏌️\u200d♀️", + "woman_golfing_dark_skin_tone": "🏌🏿\u200d♀️", + "woman_golfing_light_skin_tone": "🏌🏻\u200d♀️", + "woman_golfing_medium-dark_skin_tone": "🏌🏾\u200d♀️", + "woman_golfing_medium-light_skin_tone": "🏌🏼\u200d♀️", + "woman_golfing_medium_skin_tone": "🏌🏽\u200d♀️", + "woman_guard": "💂\u200d♀️", + "woman_guard_dark_skin_tone": "💂🏿\u200d♀️", + "woman_guard_light_skin_tone": "💂🏻\u200d♀️", + "woman_guard_medium-dark_skin_tone": "💂🏾\u200d♀️", + "woman_guard_medium-light_skin_tone": "💂🏼\u200d♀️", + "woman_guard_medium_skin_tone": "💂🏽\u200d♀️", + "woman_health_worker": "👩\u200d⚕️", + "woman_health_worker_dark_skin_tone": "👩🏿\u200d⚕️", + "woman_health_worker_light_skin_tone": "👩🏻\u200d⚕️", + "woman_health_worker_medium-dark_skin_tone": "👩🏾\u200d⚕️", + "woman_health_worker_medium-light_skin_tone": "👩🏼\u200d⚕️", + "woman_health_worker_medium_skin_tone": "👩🏽\u200d⚕️", + "woman_in_lotus_position": "🧘\u200d♀️", + "woman_in_lotus_position_dark_skin_tone": "🧘🏿\u200d♀️", + "woman_in_lotus_position_light_skin_tone": "🧘🏻\u200d♀️", + "woman_in_lotus_position_medium-dark_skin_tone": "🧘🏾\u200d♀️", + "woman_in_lotus_position_medium-light_skin_tone": "🧘🏼\u200d♀️", + "woman_in_lotus_position_medium_skin_tone": "🧘🏽\u200d♀️", + "woman_in_manual_wheelchair": "👩\u200d🦽", + "woman_in_motorized_wheelchair": "👩\u200d🦼", + "woman_in_steamy_room": "🧖\u200d♀️", + "woman_in_steamy_room_dark_skin_tone": "🧖🏿\u200d♀️", + "woman_in_steamy_room_light_skin_tone": "🧖🏻\u200d♀️", + "woman_in_steamy_room_medium-dark_skin_tone": "🧖🏾\u200d♀️", + "woman_in_steamy_room_medium-light_skin_tone": "🧖🏼\u200d♀️", + "woman_in_steamy_room_medium_skin_tone": "🧖🏽\u200d♀️", + "woman_judge": "👩\u200d⚖️", + "woman_judge_dark_skin_tone": "👩🏿\u200d⚖️", + "woman_judge_light_skin_tone": "👩🏻\u200d⚖️", + "woman_judge_medium-dark_skin_tone": "👩🏾\u200d⚖️", + "woman_judge_medium-light_skin_tone": "👩🏼\u200d⚖️", + "woman_judge_medium_skin_tone": "👩🏽\u200d⚖️", + "woman_juggling": "🤹\u200d♀️", + "woman_juggling_dark_skin_tone": "🤹🏿\u200d♀️", + "woman_juggling_light_skin_tone": "🤹🏻\u200d♀️", + "woman_juggling_medium-dark_skin_tone": "🤹🏾\u200d♀️", + "woman_juggling_medium-light_skin_tone": "🤹🏼\u200d♀️", + "woman_juggling_medium_skin_tone": "🤹🏽\u200d♀️", + "woman_lifting_weights": "🏋️\u200d♀️", + "woman_lifting_weights_dark_skin_tone": "🏋🏿\u200d♀️", + "woman_lifting_weights_light_skin_tone": "🏋🏻\u200d♀️", + "woman_lifting_weights_medium-dark_skin_tone": "🏋🏾\u200d♀️", + "woman_lifting_weights_medium-light_skin_tone": "🏋🏼\u200d♀️", + "woman_lifting_weights_medium_skin_tone": "🏋🏽\u200d♀️", + "woman_light_skin_tone": "👩🏻", + "woman_mage": "🧙\u200d♀️", + "woman_mage_dark_skin_tone": "🧙🏿\u200d♀️", + "woman_mage_light_skin_tone": "🧙🏻\u200d♀️", + "woman_mage_medium-dark_skin_tone": "🧙🏾\u200d♀️", + "woman_mage_medium-light_skin_tone": "🧙🏼\u200d♀️", + "woman_mage_medium_skin_tone": "🧙🏽\u200d♀️", + "woman_mechanic": "👩\u200d🔧", + "woman_mechanic_dark_skin_tone": "👩🏿\u200d🔧", + "woman_mechanic_light_skin_tone": "👩🏻\u200d🔧", + "woman_mechanic_medium-dark_skin_tone": "👩🏾\u200d🔧", + "woman_mechanic_medium-light_skin_tone": "👩🏼\u200d🔧", + "woman_mechanic_medium_skin_tone": "👩🏽\u200d🔧", + "woman_medium-dark_skin_tone": "👩🏾", + "woman_medium-light_skin_tone": "👩🏼", + "woman_medium_skin_tone": "👩🏽", + "woman_mountain_biking": "🚵\u200d♀️", + "woman_mountain_biking_dark_skin_tone": "🚵🏿\u200d♀️", + "woman_mountain_biking_light_skin_tone": "🚵🏻\u200d♀️", + "woman_mountain_biking_medium-dark_skin_tone": "🚵🏾\u200d♀️", + "woman_mountain_biking_medium-light_skin_tone": "🚵🏼\u200d♀️", + "woman_mountain_biking_medium_skin_tone": "🚵🏽\u200d♀️", + "woman_office_worker": "👩\u200d💼", + "woman_office_worker_dark_skin_tone": "👩🏿\u200d💼", + "woman_office_worker_light_skin_tone": "👩🏻\u200d💼", + "woman_office_worker_medium-dark_skin_tone": "👩🏾\u200d💼", + "woman_office_worker_medium-light_skin_tone": "👩🏼\u200d💼", + "woman_office_worker_medium_skin_tone": "👩🏽\u200d💼", + "woman_pilot": "👩\u200d✈️", + "woman_pilot_dark_skin_tone": "👩🏿\u200d✈️", + "woman_pilot_light_skin_tone": "👩🏻\u200d✈️", + "woman_pilot_medium-dark_skin_tone": "👩🏾\u200d✈️", + "woman_pilot_medium-light_skin_tone": "👩🏼\u200d✈️", + "woman_pilot_medium_skin_tone": "👩🏽\u200d✈️", + "woman_playing_handball": "🤾\u200d♀️", + "woman_playing_handball_dark_skin_tone": "🤾🏿\u200d♀️", + "woman_playing_handball_light_skin_tone": "🤾🏻\u200d♀️", + "woman_playing_handball_medium-dark_skin_tone": "🤾🏾\u200d♀️", + "woman_playing_handball_medium-light_skin_tone": "🤾🏼\u200d♀️", + "woman_playing_handball_medium_skin_tone": "🤾🏽\u200d♀️", + "woman_playing_water_polo": "🤽\u200d♀️", + "woman_playing_water_polo_dark_skin_tone": "🤽🏿\u200d♀️", + "woman_playing_water_polo_light_skin_tone": "🤽🏻\u200d♀️", + "woman_playing_water_polo_medium-dark_skin_tone": "🤽🏾\u200d♀️", + "woman_playing_water_polo_medium-light_skin_tone": "🤽🏼\u200d♀️", + "woman_playing_water_polo_medium_skin_tone": "🤽🏽\u200d♀️", + "woman_police_officer": "👮\u200d♀️", + "woman_police_officer_dark_skin_tone": "👮🏿\u200d♀️", + "woman_police_officer_light_skin_tone": "👮🏻\u200d♀️", + "woman_police_officer_medium-dark_skin_tone": "👮🏾\u200d♀️", + "woman_police_officer_medium-light_skin_tone": "👮🏼\u200d♀️", + "woman_police_officer_medium_skin_tone": "👮🏽\u200d♀️", + "woman_pouting": "🙎\u200d♀️", + "woman_pouting_dark_skin_tone": "🙎🏿\u200d♀️", + "woman_pouting_light_skin_tone": "🙎🏻\u200d♀️", + "woman_pouting_medium-dark_skin_tone": "🙎🏾\u200d♀️", + "woman_pouting_medium-light_skin_tone": "🙎🏼\u200d♀️", + "woman_pouting_medium_skin_tone": "🙎🏽\u200d♀️", + "woman_raising_hand": "🙋\u200d♀️", + "woman_raising_hand_dark_skin_tone": "🙋🏿\u200d♀️", + "woman_raising_hand_light_skin_tone": "🙋🏻\u200d♀️", + "woman_raising_hand_medium-dark_skin_tone": "🙋🏾\u200d♀️", + "woman_raising_hand_medium-light_skin_tone": "🙋🏼\u200d♀️", + "woman_raising_hand_medium_skin_tone": "🙋🏽\u200d♀️", + "woman_rowing_boat": "🚣\u200d♀️", + "woman_rowing_boat_dark_skin_tone": "🚣🏿\u200d♀️", + "woman_rowing_boat_light_skin_tone": "🚣🏻\u200d♀️", + "woman_rowing_boat_medium-dark_skin_tone": "🚣🏾\u200d♀️", + "woman_rowing_boat_medium-light_skin_tone": "🚣🏼\u200d♀️", + "woman_rowing_boat_medium_skin_tone": "🚣🏽\u200d♀️", + "woman_running": "🏃\u200d♀️", + "woman_running_dark_skin_tone": "🏃🏿\u200d♀️", + "woman_running_light_skin_tone": "🏃🏻\u200d♀️", + "woman_running_medium-dark_skin_tone": "🏃🏾\u200d♀️", + "woman_running_medium-light_skin_tone": "🏃🏼\u200d♀️", + "woman_running_medium_skin_tone": "🏃🏽\u200d♀️", + "woman_scientist": "👩\u200d🔬", + "woman_scientist_dark_skin_tone": "👩🏿\u200d🔬", + "woman_scientist_light_skin_tone": "👩🏻\u200d🔬", + "woman_scientist_medium-dark_skin_tone": "👩🏾\u200d🔬", + "woman_scientist_medium-light_skin_tone": "👩🏼\u200d🔬", + "woman_scientist_medium_skin_tone": "👩🏽\u200d🔬", + "woman_shrugging": "🤷\u200d♀️", + "woman_shrugging_dark_skin_tone": "🤷🏿\u200d♀️", + "woman_shrugging_light_skin_tone": "🤷🏻\u200d♀️", + "woman_shrugging_medium-dark_skin_tone": "🤷🏾\u200d♀️", + "woman_shrugging_medium-light_skin_tone": "🤷🏼\u200d♀️", + "woman_shrugging_medium_skin_tone": "🤷🏽\u200d♀️", + "woman_singer": "👩\u200d🎤", + "woman_singer_dark_skin_tone": "👩🏿\u200d🎤", + "woman_singer_light_skin_tone": "👩🏻\u200d🎤", + "woman_singer_medium-dark_skin_tone": "👩🏾\u200d🎤", + "woman_singer_medium-light_skin_tone": "👩🏼\u200d🎤", + "woman_singer_medium_skin_tone": "👩🏽\u200d🎤", + "woman_student": "👩\u200d🎓", + "woman_student_dark_skin_tone": "👩🏿\u200d🎓", + "woman_student_light_skin_tone": "👩🏻\u200d🎓", + "woman_student_medium-dark_skin_tone": "👩🏾\u200d🎓", + "woman_student_medium-light_skin_tone": "👩🏼\u200d🎓", + "woman_student_medium_skin_tone": "👩🏽\u200d🎓", + "woman_surfing": "🏄\u200d♀️", + "woman_surfing_dark_skin_tone": "🏄🏿\u200d♀️", + "woman_surfing_light_skin_tone": "🏄🏻\u200d♀️", + "woman_surfing_medium-dark_skin_tone": "🏄🏾\u200d♀️", + "woman_surfing_medium-light_skin_tone": "🏄🏼\u200d♀️", + "woman_surfing_medium_skin_tone": "🏄🏽\u200d♀️", + "woman_swimming": "🏊\u200d♀️", + "woman_swimming_dark_skin_tone": "🏊🏿\u200d♀️", + "woman_swimming_light_skin_tone": "🏊🏻\u200d♀️", + "woman_swimming_medium-dark_skin_tone": "🏊🏾\u200d♀️", + "woman_swimming_medium-light_skin_tone": "🏊🏼\u200d♀️", + "woman_swimming_medium_skin_tone": "🏊🏽\u200d♀️", + "woman_teacher": "👩\u200d🏫", + "woman_teacher_dark_skin_tone": "👩🏿\u200d🏫", + "woman_teacher_light_skin_tone": "👩🏻\u200d🏫", + "woman_teacher_medium-dark_skin_tone": "👩🏾\u200d🏫", + "woman_teacher_medium-light_skin_tone": "👩🏼\u200d🏫", + "woman_teacher_medium_skin_tone": "👩🏽\u200d🏫", + "woman_technologist": "👩\u200d💻", + "woman_technologist_dark_skin_tone": "👩🏿\u200d💻", + "woman_technologist_light_skin_tone": "👩🏻\u200d💻", + "woman_technologist_medium-dark_skin_tone": "👩🏾\u200d💻", + "woman_technologist_medium-light_skin_tone": "👩🏼\u200d💻", + "woman_technologist_medium_skin_tone": "👩🏽\u200d💻", + "woman_tipping_hand": "💁\u200d♀️", + "woman_tipping_hand_dark_skin_tone": "💁🏿\u200d♀️", + "woman_tipping_hand_light_skin_tone": "💁🏻\u200d♀️", + "woman_tipping_hand_medium-dark_skin_tone": "💁🏾\u200d♀️", + "woman_tipping_hand_medium-light_skin_tone": "💁🏼\u200d♀️", + "woman_tipping_hand_medium_skin_tone": "💁🏽\u200d♀️", + "woman_vampire": "🧛\u200d♀️", + "woman_vampire_dark_skin_tone": "🧛🏿\u200d♀️", + "woman_vampire_light_skin_tone": "🧛🏻\u200d♀️", + "woman_vampire_medium-dark_skin_tone": "🧛🏾\u200d♀️", + "woman_vampire_medium-light_skin_tone": "🧛🏼\u200d♀️", + "woman_vampire_medium_skin_tone": "🧛🏽\u200d♀️", + "woman_walking": "🚶\u200d♀️", + "woman_walking_dark_skin_tone": "🚶🏿\u200d♀️", + "woman_walking_light_skin_tone": "🚶🏻\u200d♀️", + "woman_walking_medium-dark_skin_tone": "🚶🏾\u200d♀️", + "woman_walking_medium-light_skin_tone": "🚶🏼\u200d♀️", + "woman_walking_medium_skin_tone": "🚶🏽\u200d♀️", + "woman_wearing_turban": "👳\u200d♀️", + "woman_wearing_turban_dark_skin_tone": "👳🏿\u200d♀️", + "woman_wearing_turban_light_skin_tone": "👳🏻\u200d♀️", + "woman_wearing_turban_medium-dark_skin_tone": "👳🏾\u200d♀️", + "woman_wearing_turban_medium-light_skin_tone": "👳🏼\u200d♀️", + "woman_wearing_turban_medium_skin_tone": "👳🏽\u200d♀️", + "woman_with_headscarf": "🧕", + "woman_with_headscarf_dark_skin_tone": "🧕🏿", + "woman_with_headscarf_light_skin_tone": "🧕🏻", + "woman_with_headscarf_medium-dark_skin_tone": "🧕🏾", + "woman_with_headscarf_medium-light_skin_tone": "🧕🏼", + "woman_with_headscarf_medium_skin_tone": "🧕🏽", + "woman_with_probing_cane": "👩\u200d🦯", + "woman_zombie": "🧟\u200d♀️", + "woman’s_boot": "👢", + "woman’s_clothes": "👚", + "woman’s_hat": "👒", + "woman’s_sandal": "👡", + "women_with_bunny_ears": "👯\u200d♀️", + "women_wrestling": "🤼\u200d♀️", + "women’s_room": "🚺", + "woozy_face": "🥴", + "world_map": "🗺", + "worried_face": "😟", + "wrapped_gift": "🎁", + "wrench": "🔧", + "writing_hand": "✍", + "writing_hand_dark_skin_tone": "✍🏿", + "writing_hand_light_skin_tone": "✍🏻", + "writing_hand_medium-dark_skin_tone": "✍🏾", + "writing_hand_medium-light_skin_tone": "✍🏼", + "writing_hand_medium_skin_tone": "✍🏽", + "yarn": "🧶", + "yawning_face": "🥱", + "yellow_circle": "🟡", + "yellow_heart": "💛", + "yellow_square": "🟨", + "yen_banknote": "💴", + "yo-yo": "🪀", + "yin_yang": "☯", + "zany_face": "🤪", + "zebra": "🦓", + "zipper-mouth_face": "🤐", + "zombie": "🧟", + "zzz": "💤", + "åland_islands": "🇦🇽", + "keycap_asterisk": "*⃣", + "keycap_digit_eight": "8⃣", + "keycap_digit_five": "5⃣", + "keycap_digit_four": "4⃣", + "keycap_digit_nine": "9⃣", + "keycap_digit_one": "1⃣", + "keycap_digit_seven": "7⃣", + "keycap_digit_six": "6⃣", + "keycap_digit_three": "3⃣", + "keycap_digit_two": "2⃣", + "keycap_digit_zero": "0⃣", + "keycap_number_sign": "#⃣", + "light_skin_tone": "🏻", + "medium_light_skin_tone": "🏼", + "medium_skin_tone": "🏽", + "medium_dark_skin_tone": "🏾", + "dark_skin_tone": "🏿", + "regional_indicator_symbol_letter_a": "🇦", + "regional_indicator_symbol_letter_b": "🇧", + "regional_indicator_symbol_letter_c": "🇨", + "regional_indicator_symbol_letter_d": "🇩", + "regional_indicator_symbol_letter_e": "🇪", + "regional_indicator_symbol_letter_f": "🇫", + "regional_indicator_symbol_letter_g": "🇬", + "regional_indicator_symbol_letter_h": "🇭", + "regional_indicator_symbol_letter_i": "🇮", + "regional_indicator_symbol_letter_j": "🇯", + "regional_indicator_symbol_letter_k": "🇰", + "regional_indicator_symbol_letter_l": "🇱", + "regional_indicator_symbol_letter_m": "🇲", + "regional_indicator_symbol_letter_n": "🇳", + "regional_indicator_symbol_letter_o": "🇴", + "regional_indicator_symbol_letter_p": "🇵", + "regional_indicator_symbol_letter_q": "🇶", + "regional_indicator_symbol_letter_r": "🇷", + "regional_indicator_symbol_letter_s": "🇸", + "regional_indicator_symbol_letter_t": "🇹", + "regional_indicator_symbol_letter_u": "🇺", + "regional_indicator_symbol_letter_v": "🇻", + "regional_indicator_symbol_letter_w": "🇼", + "regional_indicator_symbol_letter_x": "🇽", + "regional_indicator_symbol_letter_y": "🇾", + "regional_indicator_symbol_letter_z": "🇿", + "airplane_arriving": "🛬", + "space_invader": "👾", + "football": "🏈", + "anger": "💢", + "angry": "😠", + "anguished": "😧", + "signal_strength": "📶", + "arrows_counterclockwise": "🔄", + "arrow_heading_down": "⤵", + "arrow_heading_up": "⤴", + "art": "🎨", + "astonished": "😲", + "athletic_shoe": "👟", + "atm": "🏧", + "car": "🚗", + "red_car": "🚗", + "angel": "👼", + "back": "🔙", + "badminton_racquet_and_shuttlecock": "🏸", + "dollar": "💵", + "euro": "💶", + "pound": "💷", + "yen": "💴", + "barber": "💈", + "bath": "🛀", + "bear": "🐻", + "heartbeat": "💓", + "beer": "🍺", + "no_bell": "🔕", + "bento": "🍱", + "bike": "🚲", + "bicyclist": "🚴", + "8ball": "🎱", + "biohazard_sign": "☣", + "birthday": "🎂", + "black_circle_for_record": "⏺", + "clubs": "♣", + "diamonds": "♦", + "arrow_double_down": "⏬", + "hearts": "♥", + "rewind": "⏪", + "black_left__pointing_double_triangle_with_vertical_bar": "⏮", + "arrow_backward": "◀", + "black_medium_small_square": "◾", + "question": "❓", + "fast_forward": "⏩", + "black_right__pointing_double_triangle_with_vertical_bar": "⏭", + "arrow_forward": "▶", + "black_right__pointing_triangle_with_double_vertical_bar": "⏯", + "arrow_right": "➡", + "spades": "♠", + "black_square_for_stop": "⏹", + "sunny": "☀", + "phone": "☎", + "recycle": "♻", + "arrow_double_up": "⏫", + "busstop": "🚏", + "date": "📅", + "flags": "🎏", + "cat2": "🐈", + "joy_cat": "😹", + "smirk_cat": "😼", + "chart_with_downwards_trend": "📉", + "chart_with_upwards_trend": "📈", + "chart": "💹", + "mega": "📣", + "checkered_flag": "🏁", + "accept": "🉑", + "ideograph_advantage": "🉐", + "congratulations": "㊗", + "secret": "㊙", + "m": "Ⓜ", + "city_sunset": "🌆", + "clapper": "🎬", + "clap": "👏", + "beers": "🍻", + "clock830": "🕣", + "clock8": "🕗", + "clock1130": "🕦", + "clock11": "🕚", + "clock530": "🕠", + "clock5": "🕔", + "clock430": "🕟", + "clock4": "🕓", + "clock930": "🕤", + "clock9": "🕘", + "clock130": "🕜", + "clock1": "🕐", + "clock730": "🕢", + "clock7": "🕖", + "clock630": "🕡", + "clock6": "🕕", + "clock1030": "🕥", + "clock10": "🕙", + "clock330": "🕞", + "clock3": "🕒", + "clock1230": "🕧", + "clock12": "🕛", + "clock230": "🕝", + "clock2": "🕑", + "arrows_clockwise": "🔃", + "repeat": "🔁", + "repeat_one": "🔂", + "closed_lock_with_key": "🔐", + "mailbox_closed": "📪", + "mailbox": "📫", + "cloud_with_tornado": "🌪", + "cocktail": "🍸", + "boom": "💥", + "compression": "🗜", + "confounded": "😖", + "confused": "😕", + "rice": "🍚", + "cow2": "🐄", + "cricket_bat_and_ball": "🏏", + "x": "❌", + "cry": "😢", + "curry": "🍛", + "dagger_knife": "🗡", + "dancer": "💃", + "dark_sunglasses": "🕶", + "dash": "💨", + "truck": "🚚", + "derelict_house_building": "🏚", + "diamond_shape_with_a_dot_inside": "💠", + "dart": "🎯", + "disappointed_relieved": "😥", + "disappointed": "😞", + "do_not_litter": "🚯", + "dog2": "🐕", + "flipper": "🐬", + "loop": "➿", + "bangbang": "‼", + "double_vertical_bar": "⏸", + "dove_of_peace": "🕊", + "small_red_triangle_down": "🔻", + "arrow_down_small": "🔽", + "arrow_down": "⬇", + "dromedary_camel": "🐪", + "e__mail": "📧", + "corn": "🌽", + "ear_of_rice": "🌾", + "earth_americas": "🌎", + "earth_asia": "🌏", + "earth_africa": "🌍", + "eight_pointed_black_star": "✴", + "eight_spoked_asterisk": "✳", + "eject_symbol": "⏏", + "bulb": "💡", + "emoji_modifier_fitzpatrick_type__1__2": "🏻", + "emoji_modifier_fitzpatrick_type__3": "🏼", + "emoji_modifier_fitzpatrick_type__4": "🏽", + "emoji_modifier_fitzpatrick_type__5": "🏾", + "emoji_modifier_fitzpatrick_type__6": "🏿", + "end": "🔚", + "email": "✉", + "european_castle": "🏰", + "european_post_office": "🏤", + "interrobang": "⁉", + "expressionless": "😑", + "eyeglasses": "👓", + "massage": "💆", + "yum": "😋", + "scream": "😱", + "kissing_heart": "😘", + "sweat": "😓", + "face_with_head__bandage": "🤕", + "triumph": "😤", + "mask": "😷", + "no_good": "🙅", + "ok_woman": "🙆", + "open_mouth": "😮", + "cold_sweat": "😰", + "stuck_out_tongue": "😛", + "stuck_out_tongue_closed_eyes": "😝", + "stuck_out_tongue_winking_eye": "😜", + "joy": "😂", + "no_mouth": "😶", + "santa": "🎅", + "fax": "📠", + "fearful": "😨", + "field_hockey_stick_and_ball": "🏑", + "first_quarter_moon_with_face": "🌛", + "fish_cake": "🍥", + "fishing_pole_and_fish": "🎣", + "facepunch": "👊", + "punch": "👊", + "flag_for_afghanistan": "🇦🇫", + "flag_for_albania": "🇦🇱", + "flag_for_algeria": "🇩🇿", + "flag_for_american_samoa": "🇦🇸", + "flag_for_andorra": "🇦🇩", + "flag_for_angola": "🇦🇴", + "flag_for_anguilla": "🇦🇮", + "flag_for_antarctica": "🇦🇶", + "flag_for_antigua_&_barbuda": "🇦🇬", + "flag_for_argentina": "🇦🇷", + "flag_for_armenia": "🇦🇲", + "flag_for_aruba": "🇦🇼", + "flag_for_ascension_island": "🇦🇨", + "flag_for_australia": "🇦🇺", + "flag_for_austria": "🇦🇹", + "flag_for_azerbaijan": "🇦🇿", + "flag_for_bahamas": "🇧🇸", + "flag_for_bahrain": "🇧🇭", + "flag_for_bangladesh": "🇧🇩", + "flag_for_barbados": "🇧🇧", + "flag_for_belarus": "🇧🇾", + "flag_for_belgium": "🇧🇪", + "flag_for_belize": "🇧🇿", + "flag_for_benin": "🇧🇯", + "flag_for_bermuda": "🇧🇲", + "flag_for_bhutan": "🇧🇹", + "flag_for_bolivia": "🇧🇴", + "flag_for_bosnia_&_herzegovina": "🇧🇦", + "flag_for_botswana": "🇧🇼", + "flag_for_bouvet_island": "🇧🇻", + "flag_for_brazil": "🇧🇷", + "flag_for_british_indian_ocean_territory": "🇮🇴", + "flag_for_british_virgin_islands": "🇻🇬", + "flag_for_brunei": "🇧🇳", + "flag_for_bulgaria": "🇧🇬", + "flag_for_burkina_faso": "🇧🇫", + "flag_for_burundi": "🇧🇮", + "flag_for_cambodia": "🇰🇭", + "flag_for_cameroon": "🇨🇲", + "flag_for_canada": "🇨🇦", + "flag_for_canary_islands": "🇮🇨", + "flag_for_cape_verde": "🇨🇻", + "flag_for_caribbean_netherlands": "🇧🇶", + "flag_for_cayman_islands": "🇰🇾", + "flag_for_central_african_republic": "🇨🇫", + "flag_for_ceuta_&_melilla": "🇪🇦", + "flag_for_chad": "🇹🇩", + "flag_for_chile": "🇨🇱", + "flag_for_china": "🇨🇳", + "flag_for_christmas_island": "🇨🇽", + "flag_for_clipperton_island": "🇨🇵", + "flag_for_cocos__islands": "🇨🇨", + "flag_for_colombia": "🇨🇴", + "flag_for_comoros": "🇰🇲", + "flag_for_congo____brazzaville": "🇨🇬", + "flag_for_congo____kinshasa": "🇨🇩", + "flag_for_cook_islands": "🇨🇰", + "flag_for_costa_rica": "🇨🇷", + "flag_for_croatia": "🇭🇷", + "flag_for_cuba": "🇨🇺", + "flag_for_curaçao": "🇨🇼", + "flag_for_cyprus": "🇨🇾", + "flag_for_czech_republic": "🇨🇿", + "flag_for_côte_d’ivoire": "🇨🇮", + "flag_for_denmark": "🇩🇰", + "flag_for_diego_garcia": "🇩🇬", + "flag_for_djibouti": "🇩🇯", + "flag_for_dominica": "🇩🇲", + "flag_for_dominican_republic": "🇩🇴", + "flag_for_ecuador": "🇪🇨", + "flag_for_egypt": "🇪🇬", + "flag_for_el_salvador": "🇸🇻", + "flag_for_equatorial_guinea": "🇬🇶", + "flag_for_eritrea": "🇪🇷", + "flag_for_estonia": "🇪🇪", + "flag_for_ethiopia": "🇪🇹", + "flag_for_european_union": "🇪🇺", + "flag_for_falkland_islands": "🇫🇰", + "flag_for_faroe_islands": "🇫🇴", + "flag_for_fiji": "🇫🇯", + "flag_for_finland": "🇫🇮", + "flag_for_france": "🇫🇷", + "flag_for_french_guiana": "🇬🇫", + "flag_for_french_polynesia": "🇵🇫", + "flag_for_french_southern_territories": "🇹🇫", + "flag_for_gabon": "🇬🇦", + "flag_for_gambia": "🇬🇲", + "flag_for_georgia": "🇬🇪", + "flag_for_germany": "🇩🇪", + "flag_for_ghana": "🇬🇭", + "flag_for_gibraltar": "🇬🇮", + "flag_for_greece": "🇬🇷", + "flag_for_greenland": "🇬🇱", + "flag_for_grenada": "🇬🇩", + "flag_for_guadeloupe": "🇬🇵", + "flag_for_guam": "🇬🇺", + "flag_for_guatemala": "🇬🇹", + "flag_for_guernsey": "🇬🇬", + "flag_for_guinea": "🇬🇳", + "flag_for_guinea__bissau": "🇬🇼", + "flag_for_guyana": "🇬🇾", + "flag_for_haiti": "🇭🇹", + "flag_for_heard_&_mcdonald_islands": "🇭🇲", + "flag_for_honduras": "🇭🇳", + "flag_for_hong_kong": "🇭🇰", + "flag_for_hungary": "🇭🇺", + "flag_for_iceland": "🇮🇸", + "flag_for_india": "🇮🇳", + "flag_for_indonesia": "🇮🇩", + "flag_for_iran": "🇮🇷", + "flag_for_iraq": "🇮🇶", + "flag_for_ireland": "🇮🇪", + "flag_for_isle_of_man": "🇮🇲", + "flag_for_israel": "🇮🇱", + "flag_for_italy": "🇮🇹", + "flag_for_jamaica": "🇯🇲", + "flag_for_japan": "🇯🇵", + "flag_for_jersey": "🇯🇪", + "flag_for_jordan": "🇯🇴", + "flag_for_kazakhstan": "🇰🇿", + "flag_for_kenya": "🇰🇪", + "flag_for_kiribati": "🇰🇮", + "flag_for_kosovo": "🇽🇰", + "flag_for_kuwait": "🇰🇼", + "flag_for_kyrgyzstan": "🇰🇬", + "flag_for_laos": "🇱🇦", + "flag_for_latvia": "🇱🇻", + "flag_for_lebanon": "🇱🇧", + "flag_for_lesotho": "🇱🇸", + "flag_for_liberia": "🇱🇷", + "flag_for_libya": "🇱🇾", + "flag_for_liechtenstein": "🇱🇮", + "flag_for_lithuania": "🇱🇹", + "flag_for_luxembourg": "🇱🇺", + "flag_for_macau": "🇲🇴", + "flag_for_macedonia": "🇲🇰", + "flag_for_madagascar": "🇲🇬", + "flag_for_malawi": "🇲🇼", + "flag_for_malaysia": "🇲🇾", + "flag_for_maldives": "🇲🇻", + "flag_for_mali": "🇲🇱", + "flag_for_malta": "🇲🇹", + "flag_for_marshall_islands": "🇲🇭", + "flag_for_martinique": "🇲🇶", + "flag_for_mauritania": "🇲🇷", + "flag_for_mauritius": "🇲🇺", + "flag_for_mayotte": "🇾🇹", + "flag_for_mexico": "🇲🇽", + "flag_for_micronesia": "🇫🇲", + "flag_for_moldova": "🇲🇩", + "flag_for_monaco": "🇲🇨", + "flag_for_mongolia": "🇲🇳", + "flag_for_montenegro": "🇲🇪", + "flag_for_montserrat": "🇲🇸", + "flag_for_morocco": "🇲🇦", + "flag_for_mozambique": "🇲🇿", + "flag_for_myanmar": "🇲🇲", + "flag_for_namibia": "🇳🇦", + "flag_for_nauru": "🇳🇷", + "flag_for_nepal": "🇳🇵", + "flag_for_netherlands": "🇳🇱", + "flag_for_new_caledonia": "🇳🇨", + "flag_for_new_zealand": "🇳🇿", + "flag_for_nicaragua": "🇳🇮", + "flag_for_niger": "🇳🇪", + "flag_for_nigeria": "🇳🇬", + "flag_for_niue": "🇳🇺", + "flag_for_norfolk_island": "🇳🇫", + "flag_for_north_korea": "🇰🇵", + "flag_for_northern_mariana_islands": "🇲🇵", + "flag_for_norway": "🇳🇴", + "flag_for_oman": "🇴🇲", + "flag_for_pakistan": "🇵🇰", + "flag_for_palau": "🇵🇼", + "flag_for_palestinian_territories": "🇵🇸", + "flag_for_panama": "🇵🇦", + "flag_for_papua_new_guinea": "🇵🇬", + "flag_for_paraguay": "🇵🇾", + "flag_for_peru": "🇵🇪", + "flag_for_philippines": "🇵🇭", + "flag_for_pitcairn_islands": "🇵🇳", + "flag_for_poland": "🇵🇱", + "flag_for_portugal": "🇵🇹", + "flag_for_puerto_rico": "🇵🇷", + "flag_for_qatar": "🇶🇦", + "flag_for_romania": "🇷🇴", + "flag_for_russia": "🇷🇺", + "flag_for_rwanda": "🇷🇼", + "flag_for_réunion": "🇷🇪", + "flag_for_samoa": "🇼🇸", + "flag_for_san_marino": "🇸🇲", + "flag_for_saudi_arabia": "🇸🇦", + "flag_for_senegal": "🇸🇳", + "flag_for_serbia": "🇷🇸", + "flag_for_seychelles": "🇸🇨", + "flag_for_sierra_leone": "🇸🇱", + "flag_for_singapore": "🇸🇬", + "flag_for_sint_maarten": "🇸🇽", + "flag_for_slovakia": "🇸🇰", + "flag_for_slovenia": "🇸🇮", + "flag_for_solomon_islands": "🇸🇧", + "flag_for_somalia": "🇸🇴", + "flag_for_south_africa": "🇿🇦", + "flag_for_south_georgia_&_south_sandwich_islands": "🇬🇸", + "flag_for_south_korea": "🇰🇷", + "flag_for_south_sudan": "🇸🇸", + "flag_for_spain": "🇪🇸", + "flag_for_sri_lanka": "🇱🇰", + "flag_for_st._barthélemy": "🇧🇱", + "flag_for_st._helena": "🇸🇭", + "flag_for_st._kitts_&_nevis": "🇰🇳", + "flag_for_st._lucia": "🇱🇨", + "flag_for_st._martin": "🇲🇫", + "flag_for_st._pierre_&_miquelon": "🇵🇲", + "flag_for_st._vincent_&_grenadines": "🇻🇨", + "flag_for_sudan": "🇸🇩", + "flag_for_suriname": "🇸🇷", + "flag_for_svalbard_&_jan_mayen": "🇸🇯", + "flag_for_swaziland": "🇸🇿", + "flag_for_sweden": "🇸🇪", + "flag_for_switzerland": "🇨🇭", + "flag_for_syria": "🇸🇾", + "flag_for_são_tomé_&_príncipe": "🇸🇹", + "flag_for_taiwan": "🇹🇼", + "flag_for_tajikistan": "🇹🇯", + "flag_for_tanzania": "🇹🇿", + "flag_for_thailand": "🇹🇭", + "flag_for_timor__leste": "🇹🇱", + "flag_for_togo": "🇹🇬", + "flag_for_tokelau": "🇹🇰", + "flag_for_tonga": "🇹🇴", + "flag_for_trinidad_&_tobago": "🇹🇹", + "flag_for_tristan_da_cunha": "🇹🇦", + "flag_for_tunisia": "🇹🇳", + "flag_for_turkey": "🇹🇷", + "flag_for_turkmenistan": "🇹🇲", + "flag_for_turks_&_caicos_islands": "🇹🇨", + "flag_for_tuvalu": "🇹🇻", + "flag_for_u.s._outlying_islands": "🇺🇲", + "flag_for_u.s._virgin_islands": "🇻🇮", + "flag_for_uganda": "🇺🇬", + "flag_for_ukraine": "🇺🇦", + "flag_for_united_arab_emirates": "🇦🇪", + "flag_for_united_kingdom": "🇬🇧", + "flag_for_united_states": "🇺🇸", + "flag_for_uruguay": "🇺🇾", + "flag_for_uzbekistan": "🇺🇿", + "flag_for_vanuatu": "🇻🇺", + "flag_for_vatican_city": "🇻🇦", + "flag_for_venezuela": "🇻🇪", + "flag_for_vietnam": "🇻🇳", + "flag_for_wallis_&_futuna": "🇼🇫", + "flag_for_western_sahara": "🇪🇭", + "flag_for_yemen": "🇾🇪", + "flag_for_zambia": "🇿🇲", + "flag_for_zimbabwe": "🇿🇼", + "flag_for_åland_islands": "🇦🇽", + "golf": "⛳", + "fleur__de__lis": "⚜", + "muscle": "💪", + "flushed": "😳", + "frame_with_picture": "🖼", + "fries": "🍟", + "frog": "🐸", + "hatched_chick": "🐥", + "frowning": "😦", + "fuelpump": "⛽", + "full_moon_with_face": "🌝", + "gem": "💎", + "star2": "🌟", + "golfer": "🏌", + "mortar_board": "🎓", + "grimacing": "😬", + "smile_cat": "😸", + "grinning": "😀", + "grin": "😁", + "heartpulse": "💗", + "guardsman": "💂", + "haircut": "💇", + "hamster": "🐹", + "raising_hand": "🙋", + "headphones": "🎧", + "hear_no_evil": "🙉", + "cupid": "💘", + "gift_heart": "💝", + "heart": "❤", + "exclamation": "❗", + "heavy_exclamation_mark": "❗", + "heavy_heart_exclamation_mark_ornament": "❣", + "o": "⭕", + "helm_symbol": "⎈", + "helmet_with_white_cross": "⛑", + "high_heel": "👠", + "bullettrain_side": "🚄", + "bullettrain_front": "🚅", + "high_brightness": "🔆", + "zap": "⚡", + "hocho": "🔪", + "knife": "🔪", + "bee": "🐝", + "traffic_light": "🚥", + "racehorse": "🐎", + "coffee": "☕", + "hotsprings": "♨", + "hourglass": "⌛", + "hourglass_flowing_sand": "⏳", + "house_buildings": "🏘", + "100": "💯", + "hushed": "😯", + "ice_hockey_stick_and_puck": "🏒", + "imp": "👿", + "information_desk_person": "💁", + "information_source": "ℹ", + "capital_abcd": "🔠", + "abc": "🔤", + "abcd": "🔡", + "1234": "🔢", + "symbols": "🔣", + "izakaya_lantern": "🏮", + "lantern": "🏮", + "jack_o_lantern": "🎃", + "dolls": "🎎", + "japanese_goblin": "👺", + "japanese_ogre": "👹", + "beginner": "🔰", + "zero": "0️⃣", + "one": "1️⃣", + "ten": "🔟", + "two": "2️⃣", + "three": "3️⃣", + "four": "4️⃣", + "five": "5️⃣", + "six": "6️⃣", + "seven": "7️⃣", + "eight": "8️⃣", + "nine": "9️⃣", + "couplekiss": "💏", + "kissing_cat": "😽", + "kissing": "😗", + "kissing_closed_eyes": "😚", + "kissing_smiling_eyes": "😙", + "beetle": "🐞", + "large_blue_circle": "🔵", + "last_quarter_moon_with_face": "🌜", + "leaves": "🍃", + "mag": "🔍", + "left_right_arrow": "↔", + "leftwards_arrow_with_hook": "↩", + "arrow_left": "⬅", + "lock": "🔒", + "lock_with_ink_pen": "🔏", + "sob": "😭", + "low_brightness": "🔅", + "lower_left_ballpoint_pen": "🖊", + "lower_left_crayon": "🖍", + "lower_left_fountain_pen": "🖋", + "lower_left_paintbrush": "🖌", + "mahjong": "🀄", + "couple": "👫", + "man_in_business_suit_levitating": "🕴", + "man_with_gua_pi_mao": "👲", + "man_with_turban": "👳", + "mans_shoe": "👞", + "shoe": "👞", + "menorah_with_nine_branches": "🕎", + "mens": "🚹", + "minidisc": "💽", + "iphone": "📱", + "calling": "📲", + "money__mouth_face": "🤑", + "moneybag": "💰", + "rice_scene": "🎑", + "mountain_bicyclist": "🚵", + "mouse2": "🐁", + "lips": "👄", + "moyai": "🗿", + "notes": "🎶", + "nail_care": "💅", + "ab": "🆎", + "negative_squared_cross_mark": "❎", + "a": "🅰", + "b": "🅱", + "o2": "🅾", + "parking": "🅿", + "new_moon_with_face": "🌚", + "no_entry_sign": "🚫", + "underage": "🔞", + "non__potable_water": "🚱", + "arrow_upper_right": "↗", + "arrow_upper_left": "↖", + "office": "🏢", + "older_man": "👴", + "older_woman": "👵", + "om_symbol": "🕉", + "on": "🔛", + "book": "📖", + "unlock": "🔓", + "mailbox_with_no_mail": "📭", + "mailbox_with_mail": "📬", + "cd": "💿", + "tada": "🎉", + "feet": "🐾", + "walking": "🚶", + "pencil2": "✏", + "pensive": "😔", + "persevere": "😣", + "bow": "🙇", + "raised_hands": "🙌", + "person_with_ball": "⛹", + "person_with_blond_hair": "👱", + "pray": "🙏", + "person_with_pouting_face": "🙎", + "computer": "💻", + "pig2": "🐖", + "hankey": "💩", + "poop": "💩", + "shit": "💩", + "bamboo": "🎍", + "gun": "🔫", + "black_joker": "🃏", + "rotating_light": "🚨", + "cop": "👮", + "stew": "🍲", + "pouch": "👝", + "pouting_cat": "😾", + "rage": "😡", + "put_litter_in_its_place": "🚮", + "rabbit2": "🐇", + "racing_motorcycle": "🏍", + "radioactive_sign": "☢", + "fist": "✊", + "hand": "✋", + "raised_hand_with_fingers_splayed": "🖐", + "raised_hand_with_part_between_middle_and_ring_fingers": "🖖", + "blue_car": "🚙", + "apple": "🍎", + "relieved": "😌", + "reversed_hand_with_middle_finger_extended": "🖕", + "mag_right": "🔎", + "arrow_right_hook": "↪", + "sweet_potato": "🍠", + "robot": "🤖", + "rolled__up_newspaper": "🗞", + "rowboat": "🚣", + "runner": "🏃", + "running": "🏃", + "running_shirt_with_sash": "🎽", + "boat": "⛵", + "scales": "⚖", + "school_satchel": "🎒", + "scorpius": "♏", + "see_no_evil": "🙈", + "sheep": "🐑", + "stars": "🌠", + "cake": "🍰", + "six_pointed_star": "🔯", + "ski": "🎿", + "sleeping_accommodation": "🛌", + "sleeping": "😴", + "sleepy": "😪", + "sleuth_or_spy": "🕵", + "heart_eyes_cat": "😻", + "smiley_cat": "😺", + "innocent": "😇", + "heart_eyes": "😍", + "smiling_imp": "😈", + "smiley": "😃", + "sweat_smile": "😅", + "smile": "😄", + "laughing": "😆", + "satisfied": "😆", + "blush": "😊", + "smirk": "😏", + "smoking": "🚬", + "snow_capped_mountain": "🏔", + "soccer": "⚽", + "icecream": "🍦", + "soon": "🔜", + "arrow_lower_right": "↘", + "arrow_lower_left": "↙", + "speak_no_evil": "🙊", + "speaker": "🔈", + "mute": "🔇", + "sound": "🔉", + "loud_sound": "🔊", + "speaking_head_in_silhouette": "🗣", + "spiral_calendar_pad": "🗓", + "spiral_note_pad": "🗒", + "shell": "🐚", + "sweat_drops": "💦", + "u5272": "🈹", + "u5408": "🈴", + "u55b6": "🈺", + "u6307": "🈯", + "u6708": "🈷", + "u6709": "🈶", + "u6e80": "🈵", + "u7121": "🈚", + "u7533": "🈸", + "u7981": "🈲", + "u7a7a": "🈳", + "cl": "🆑", + "cool": "🆒", + "free": "🆓", + "id": "🆔", + "koko": "🈁", + "sa": "🈂", + "new": "🆕", + "ng": "🆖", + "ok": "🆗", + "sos": "🆘", + "up": "🆙", + "vs": "🆚", + "steam_locomotive": "🚂", + "ramen": "🍜", + "partly_sunny": "⛅", + "city_sunrise": "🌇", + "surfer": "🏄", + "swimmer": "🏊", + "shirt": "👕", + "tshirt": "👕", + "table_tennis_paddle_and_ball": "🏓", + "tea": "🍵", + "tv": "📺", + "three_button_mouse": "🖱", + "+1": "👍", + "thumbsup": "👍", + "__1": "👎", + "-1": "👎", + "thumbsdown": "👎", + "thunder_cloud_and_rain": "⛈", + "tiger2": "🐅", + "tophat": "🎩", + "top": "🔝", + "tm": "™", + "train2": "🚆", + "triangular_flag_on_post": "🚩", + "trident": "🔱", + "twisted_rightwards_arrows": "🔀", + "unamused": "😒", + "small_red_triangle": "🔺", + "arrow_up_small": "🔼", + "arrow_up_down": "↕", + "upside__down_face": "🙃", + "arrow_up": "⬆", + "v": "✌", + "vhs": "📼", + "wc": "🚾", + "ocean": "🌊", + "waving_black_flag": "🏴", + "wave": "👋", + "waving_white_flag": "🏳", + "moon": "🌔", + "scream_cat": "🙀", + "weary": "😩", + "weight_lifter": "🏋", + "whale2": "🐋", + "wheelchair": "♿", + "point_down": "👇", + "grey_exclamation": "❕", + "white_frowning_face": "☹", + "white_check_mark": "✅", + "point_left": "👈", + "white_medium_small_square": "◽", + "star": "⭐", + "grey_question": "❔", + "point_right": "👉", + "relaxed": "☺", + "white_sun_behind_cloud": "🌥", + "white_sun_behind_cloud_with_rain": "🌦", + "white_sun_with_small_cloud": "🌤", + "point_up_2": "👆", + "point_up": "☝", + "wind_blowing_face": "🌬", + "wink": "😉", + "wolf": "🐺", + "dancers": "👯", + "boot": "👢", + "womans_clothes": "👚", + "womans_hat": "👒", + "sandal": "👡", + "womens": "🚺", + "worried": "😟", + "gift": "🎁", + "zipper__mouth_face": "🤐", + "regional_indicator_a": "🇦", + "regional_indicator_b": "🇧", + "regional_indicator_c": "🇨", + "regional_indicator_d": "🇩", + "regional_indicator_e": "🇪", + "regional_indicator_f": "🇫", + "regional_indicator_g": "🇬", + "regional_indicator_h": "🇭", + "regional_indicator_i": "🇮", + "regional_indicator_j": "🇯", + "regional_indicator_k": "🇰", + "regional_indicator_l": "🇱", + "regional_indicator_m": "🇲", + "regional_indicator_n": "🇳", + "regional_indicator_o": "🇴", + "regional_indicator_p": "🇵", + "regional_indicator_q": "🇶", + "regional_indicator_r": "🇷", + "regional_indicator_s": "🇸", + "regional_indicator_t": "🇹", + "regional_indicator_u": "🇺", + "regional_indicator_v": "🇻", + "regional_indicator_w": "🇼", + "regional_indicator_x": "🇽", + "regional_indicator_y": "🇾", + "regional_indicator_z": "🇿", +} diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_replace.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_replace.py new file mode 100644 index 0000000..bb2cafa --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_replace.py @@ -0,0 +1,32 @@ +from typing import Callable, Match, Optional +import re + +from ._emoji_codes import EMOJI + + +_ReStringMatch = Match[str] # regex match object +_ReSubCallable = Callable[[_ReStringMatch], str] # Callable invoked by re.sub +_EmojiSubMethod = Callable[[_ReSubCallable, str], str] # Sub method of a compiled re + + +def _emoji_replace( + text: str, + default_variant: Optional[str] = None, + _emoji_sub: _EmojiSubMethod = re.compile(r"(:(\S*?)(?:(?:\-)(emoji|text))?:)").sub, +) -> str: + """Replace emoji code in text.""" + get_emoji = EMOJI.__getitem__ + variants = {"text": "\uFE0E", "emoji": "\uFE0F"} + get_variant = variants.get + default_variant_code = variants.get(default_variant, "") if default_variant else "" + + def do_replace(match: Match[str]) -> str: + emoji_code, emoji_name, variant = match.groups() + try: + return get_emoji(emoji_name.lower()) + get_variant( + variant, default_variant_code + ) + except KeyError: + return emoji_code + + return _emoji_sub(do_replace, text) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_export_format.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_export_format.py new file mode 100644 index 0000000..094d2dc --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_export_format.py @@ -0,0 +1,76 @@ +CONSOLE_HTML_FORMAT = """\ + + + + + + + +
{code}
+ + +""" + +CONSOLE_SVG_FORMAT = """\ + + + + + + + + + {lines} + + + {chrome} + + {backgrounds} + + {matrix} + + + +""" + +_SVG_FONT_FAMILY = "Rich Fira Code" +_SVG_CLASSES_PREFIX = "rich-svg" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_extension.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_extension.py new file mode 100644 index 0000000..cbd6da9 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_extension.py @@ -0,0 +1,10 @@ +from typing import Any + + +def load_ipython_extension(ip: Any) -> None: # pragma: no cover + # prevent circular import + from pip._vendor.rich.pretty import install + from pip._vendor.rich.traceback import install as tr_install + + install() + tr_install() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_fileno.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_fileno.py new file mode 100644 index 0000000..b17ee65 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_fileno.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import IO, Callable + + +def get_fileno(file_like: IO[str]) -> int | None: + """Get fileno() from a file, accounting for poorly implemented file-like objects. + + Args: + file_like (IO): A file-like object. + + Returns: + int | None: The result of fileno if available, or None if operation failed. + """ + fileno: Callable[[], int] | None = getattr(file_like, "fileno", None) + if fileno is not None: + try: + return fileno() + except Exception: + # `fileno` is documented as potentially raising a OSError + # Alas, from the issues, there are so many poorly implemented file-like objects, + # that `fileno()` can raise just about anything. + return None + return None diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_inspect.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_inspect.py new file mode 100644 index 0000000..30446ce --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_inspect.py @@ -0,0 +1,270 @@ +from __future__ import absolute_import + +import inspect +from inspect import cleandoc, getdoc, getfile, isclass, ismodule, signature +from typing import Any, Collection, Iterable, Optional, Tuple, Type, Union + +from .console import Group, RenderableType +from .control import escape_control_codes +from .highlighter import ReprHighlighter +from .jupyter import JupyterMixin +from .panel import Panel +from .pretty import Pretty +from .table import Table +from .text import Text, TextType + + +def _first_paragraph(doc: str) -> str: + """Get the first paragraph from a docstring.""" + paragraph, _, _ = doc.partition("\n\n") + return paragraph + + +class Inspect(JupyterMixin): + """A renderable to inspect any Python Object. + + Args: + obj (Any): An object to inspect. + title (str, optional): Title to display over inspect result, or None use type. Defaults to None. + help (bool, optional): Show full help text rather than just first paragraph. Defaults to False. + methods (bool, optional): Enable inspection of callables. Defaults to False. + docs (bool, optional): Also render doc strings. Defaults to True. + private (bool, optional): Show private attributes (beginning with underscore). Defaults to False. + dunder (bool, optional): Show attributes starting with double underscore. Defaults to False. + sort (bool, optional): Sort attributes alphabetically. Defaults to True. + all (bool, optional): Show all attributes. Defaults to False. + value (bool, optional): Pretty print value of object. Defaults to True. + """ + + def __init__( + self, + obj: Any, + *, + title: Optional[TextType] = None, + help: bool = False, + methods: bool = False, + docs: bool = True, + private: bool = False, + dunder: bool = False, + sort: bool = True, + all: bool = True, + value: bool = True, + ) -> None: + self.highlighter = ReprHighlighter() + self.obj = obj + self.title = title or self._make_title(obj) + if all: + methods = private = dunder = True + self.help = help + self.methods = methods + self.docs = docs or help + self.private = private or dunder + self.dunder = dunder + self.sort = sort + self.value = value + + def _make_title(self, obj: Any) -> Text: + """Make a default title.""" + title_str = ( + str(obj) + if (isclass(obj) or callable(obj) or ismodule(obj)) + else str(type(obj)) + ) + title_text = self.highlighter(title_str) + return title_text + + def __rich__(self) -> Panel: + return Panel.fit( + Group(*self._render()), + title=self.title, + border_style="scope.border", + padding=(0, 1), + ) + + def _get_signature(self, name: str, obj: Any) -> Optional[Text]: + """Get a signature for a callable.""" + try: + _signature = str(signature(obj)) + ":" + except ValueError: + _signature = "(...)" + except TypeError: + return None + + source_filename: Optional[str] = None + try: + source_filename = getfile(obj) + except (OSError, TypeError): + # OSError is raised if obj has no source file, e.g. when defined in REPL. + pass + + callable_name = Text(name, style="inspect.callable") + if source_filename: + callable_name.stylize(f"link file://{source_filename}") + signature_text = self.highlighter(_signature) + + qualname = name or getattr(obj, "__qualname__", name) + + # If obj is a module, there may be classes (which are callable) to display + if inspect.isclass(obj): + prefix = "class" + elif inspect.iscoroutinefunction(obj): + prefix = "async def" + else: + prefix = "def" + + qual_signature = Text.assemble( + (f"{prefix} ", f"inspect.{prefix.replace(' ', '_')}"), + (qualname, "inspect.callable"), + signature_text, + ) + + return qual_signature + + def _render(self) -> Iterable[RenderableType]: + """Render object.""" + + def sort_items(item: Tuple[str, Any]) -> Tuple[bool, str]: + key, (_error, value) = item + return (callable(value), key.strip("_").lower()) + + def safe_getattr(attr_name: str) -> Tuple[Any, Any]: + """Get attribute or any exception.""" + try: + return (None, getattr(obj, attr_name)) + except Exception as error: + return (error, None) + + obj = self.obj + keys = dir(obj) + total_items = len(keys) + if not self.dunder: + keys = [key for key in keys if not key.startswith("__")] + if not self.private: + keys = [key for key in keys if not key.startswith("_")] + not_shown_count = total_items - len(keys) + items = [(key, safe_getattr(key)) for key in keys] + if self.sort: + items.sort(key=sort_items) + + items_table = Table.grid(padding=(0, 1), expand=False) + items_table.add_column(justify="right") + add_row = items_table.add_row + highlighter = self.highlighter + + if callable(obj): + signature = self._get_signature("", obj) + if signature is not None: + yield signature + yield "" + + if self.docs: + _doc = self._get_formatted_doc(obj) + if _doc is not None: + doc_text = Text(_doc, style="inspect.help") + doc_text = highlighter(doc_text) + yield doc_text + yield "" + + if self.value and not (isclass(obj) or callable(obj) or ismodule(obj)): + yield Panel( + Pretty(obj, indent_guides=True, max_length=10, max_string=60), + border_style="inspect.value.border", + ) + yield "" + + for key, (error, value) in items: + key_text = Text.assemble( + ( + key, + "inspect.attr.dunder" if key.startswith("__") else "inspect.attr", + ), + (" =", "inspect.equals"), + ) + if error is not None: + warning = key_text.copy() + warning.stylize("inspect.error") + add_row(warning, highlighter(repr(error))) + continue + + if callable(value): + if not self.methods: + continue + + _signature_text = self._get_signature(key, value) + if _signature_text is None: + add_row(key_text, Pretty(value, highlighter=highlighter)) + else: + if self.docs: + docs = self._get_formatted_doc(value) + if docs is not None: + _signature_text.append("\n" if "\n" in docs else " ") + doc = highlighter(docs) + doc.stylize("inspect.doc") + _signature_text.append(doc) + + add_row(key_text, _signature_text) + else: + add_row(key_text, Pretty(value, highlighter=highlighter)) + if items_table.row_count: + yield items_table + elif not_shown_count: + yield Text.from_markup( + f"[b cyan]{not_shown_count}[/][i] attribute(s) not shown.[/i] " + f"Run [b][magenta]inspect[/]([not b]inspect[/])[/b] for options." + ) + + def _get_formatted_doc(self, object_: Any) -> Optional[str]: + """ + Extract the docstring of an object, process it and returns it. + The processing consists in cleaning up the doctring's indentation, + taking only its 1st paragraph if `self.help` is not True, + and escape its control codes. + + Args: + object_ (Any): the object to get the docstring from. + + Returns: + Optional[str]: the processed docstring, or None if no docstring was found. + """ + docs = getdoc(object_) + if docs is None: + return None + docs = cleandoc(docs).strip() + if not self.help: + docs = _first_paragraph(docs) + return escape_control_codes(docs) + + +def get_object_types_mro(obj: Union[object, Type[Any]]) -> Tuple[type, ...]: + """Returns the MRO of an object's class, or of the object itself if it's a class.""" + if not hasattr(obj, "__mro__"): + # N.B. we cannot use `if type(obj) is type` here because it doesn't work with + # some types of classes, such as the ones that use abc.ABCMeta. + obj = type(obj) + return getattr(obj, "__mro__", ()) + + +def get_object_types_mro_as_strings(obj: object) -> Collection[str]: + """ + Returns the MRO of an object's class as full qualified names, or of the object itself if it's a class. + + Examples: + `object_types_mro_as_strings(JSONDecoder)` will return `['json.decoder.JSONDecoder', 'builtins.object']` + """ + return [ + f'{getattr(type_, "__module__", "")}.{getattr(type_, "__qualname__", "")}' + for type_ in get_object_types_mro(obj) + ] + + +def is_object_one_of_types( + obj: object, fully_qualified_types_names: Collection[str] +) -> bool: + """ + Returns `True` if the given object's class (or the object itself, if it's a class) has one of the + fully qualified names in its MRO. + """ + for type_name in get_object_types_mro_as_strings(obj): + if type_name in fully_qualified_types_names: + return True + return False diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_log_render.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_log_render.py new file mode 100644 index 0000000..fc16c84 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_log_render.py @@ -0,0 +1,94 @@ +from datetime import datetime +from typing import Iterable, List, Optional, TYPE_CHECKING, Union, Callable + + +from .text import Text, TextType + +if TYPE_CHECKING: + from .console import Console, ConsoleRenderable, RenderableType + from .table import Table + +FormatTimeCallable = Callable[[datetime], Text] + + +class LogRender: + def __init__( + self, + show_time: bool = True, + show_level: bool = False, + show_path: bool = True, + time_format: Union[str, FormatTimeCallable] = "[%x %X]", + omit_repeated_times: bool = True, + level_width: Optional[int] = 8, + ) -> None: + self.show_time = show_time + self.show_level = show_level + self.show_path = show_path + self.time_format = time_format + self.omit_repeated_times = omit_repeated_times + self.level_width = level_width + self._last_time: Optional[Text] = None + + def __call__( + self, + console: "Console", + renderables: Iterable["ConsoleRenderable"], + log_time: Optional[datetime] = None, + time_format: Optional[Union[str, FormatTimeCallable]] = None, + level: TextType = "", + path: Optional[str] = None, + line_no: Optional[int] = None, + link_path: Optional[str] = None, + ) -> "Table": + from .containers import Renderables + from .table import Table + + output = Table.grid(padding=(0, 1)) + output.expand = True + if self.show_time: + output.add_column(style="log.time") + if self.show_level: + output.add_column(style="log.level", width=self.level_width) + output.add_column(ratio=1, style="log.message", overflow="fold") + if self.show_path and path: + output.add_column(style="log.path") + row: List["RenderableType"] = [] + if self.show_time: + log_time = log_time or console.get_datetime() + time_format = time_format or self.time_format + if callable(time_format): + log_time_display = time_format(log_time) + else: + log_time_display = Text(log_time.strftime(time_format)) + if log_time_display == self._last_time and self.omit_repeated_times: + row.append(Text(" " * len(log_time_display))) + else: + row.append(log_time_display) + self._last_time = log_time_display + if self.show_level: + row.append(level) + + row.append(Renderables(renderables)) + if self.show_path and path: + path_text = Text() + path_text.append( + path, style=f"link file://{link_path}" if link_path else "" + ) + if line_no: + path_text.append(":") + path_text.append( + f"{line_no}", + style=f"link file://{link_path}#{line_no}" if link_path else "", + ) + row.append(path_text) + + output.add_row(*row) + return output + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.console import Console + + c = Console() + c.print("[on blue]Hello", justify="right") + c.log("[on blue]hello", justify="right") diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_loop.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_loop.py new file mode 100644 index 0000000..01c6caf --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_loop.py @@ -0,0 +1,43 @@ +from typing import Iterable, Tuple, TypeVar + +T = TypeVar("T") + + +def loop_first(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: + """Iterate and generate a tuple with a flag for first value.""" + iter_values = iter(values) + try: + value = next(iter_values) + except StopIteration: + return + yield True, value + for value in iter_values: + yield False, value + + +def loop_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: + """Iterate and generate a tuple with a flag for last value.""" + iter_values = iter(values) + try: + previous_value = next(iter_values) + except StopIteration: + return + for value in iter_values: + yield False, previous_value + previous_value = value + yield True, previous_value + + +def loop_first_last(values: Iterable[T]) -> Iterable[Tuple[bool, bool, T]]: + """Iterate and generate a tuple with a flag for first and last value.""" + iter_values = iter(values) + try: + previous_value = next(iter_values) + except StopIteration: + return + first = True + for value in iter_values: + yield first, False, previous_value + first = False + previous_value = value + yield first, True, previous_value diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_null_file.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_null_file.py new file mode 100644 index 0000000..b659673 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_null_file.py @@ -0,0 +1,69 @@ +from types import TracebackType +from typing import IO, Iterable, Iterator, List, Optional, Type + + +class NullFile(IO[str]): + def close(self) -> None: + pass + + def isatty(self) -> bool: + return False + + def read(self, __n: int = 1) -> str: + return "" + + def readable(self) -> bool: + return False + + def readline(self, __limit: int = 1) -> str: + return "" + + def readlines(self, __hint: int = 1) -> List[str]: + return [] + + def seek(self, __offset: int, __whence: int = 1) -> int: + return 0 + + def seekable(self) -> bool: + return False + + def tell(self) -> int: + return 0 + + def truncate(self, __size: Optional[int] = 1) -> int: + return 0 + + def writable(self) -> bool: + return False + + def writelines(self, __lines: Iterable[str]) -> None: + pass + + def __next__(self) -> str: + return "" + + def __iter__(self) -> Iterator[str]: + return iter([""]) + + def __enter__(self) -> IO[str]: + pass + + def __exit__( + self, + __t: Optional[Type[BaseException]], + __value: Optional[BaseException], + __traceback: Optional[TracebackType], + ) -> None: + pass + + def write(self, text: str) -> int: + return 0 + + def flush(self) -> None: + pass + + def fileno(self) -> int: + return -1 + + +NULL_FILE = NullFile() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_palettes.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_palettes.py new file mode 100644 index 0000000..3c748d3 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_palettes.py @@ -0,0 +1,309 @@ +from .palette import Palette + + +# Taken from https://en.wikipedia.org/wiki/ANSI_escape_code (Windows 10 column) +WINDOWS_PALETTE = Palette( + [ + (12, 12, 12), + (197, 15, 31), + (19, 161, 14), + (193, 156, 0), + (0, 55, 218), + (136, 23, 152), + (58, 150, 221), + (204, 204, 204), + (118, 118, 118), + (231, 72, 86), + (22, 198, 12), + (249, 241, 165), + (59, 120, 255), + (180, 0, 158), + (97, 214, 214), + (242, 242, 242), + ] +) + +# # The standard ansi colors (including bright variants) +STANDARD_PALETTE = Palette( + [ + (0, 0, 0), + (170, 0, 0), + (0, 170, 0), + (170, 85, 0), + (0, 0, 170), + (170, 0, 170), + (0, 170, 170), + (170, 170, 170), + (85, 85, 85), + (255, 85, 85), + (85, 255, 85), + (255, 255, 85), + (85, 85, 255), + (255, 85, 255), + (85, 255, 255), + (255, 255, 255), + ] +) + + +# The 256 color palette +EIGHT_BIT_PALETTE = Palette( + [ + (0, 0, 0), + (128, 0, 0), + (0, 128, 0), + (128, 128, 0), + (0, 0, 128), + (128, 0, 128), + (0, 128, 128), + (192, 192, 192), + (128, 128, 128), + (255, 0, 0), + (0, 255, 0), + (255, 255, 0), + (0, 0, 255), + (255, 0, 255), + (0, 255, 255), + (255, 255, 255), + (0, 0, 0), + (0, 0, 95), + (0, 0, 135), + (0, 0, 175), + (0, 0, 215), + (0, 0, 255), + (0, 95, 0), + (0, 95, 95), + (0, 95, 135), + (0, 95, 175), + (0, 95, 215), + (0, 95, 255), + (0, 135, 0), + (0, 135, 95), + (0, 135, 135), + (0, 135, 175), + (0, 135, 215), + (0, 135, 255), + (0, 175, 0), + (0, 175, 95), + (0, 175, 135), + (0, 175, 175), + (0, 175, 215), + (0, 175, 255), + (0, 215, 0), + (0, 215, 95), + (0, 215, 135), + (0, 215, 175), + (0, 215, 215), + (0, 215, 255), + (0, 255, 0), + (0, 255, 95), + (0, 255, 135), + (0, 255, 175), + (0, 255, 215), + (0, 255, 255), + (95, 0, 0), + (95, 0, 95), + (95, 0, 135), + (95, 0, 175), + (95, 0, 215), + (95, 0, 255), + (95, 95, 0), + (95, 95, 95), + (95, 95, 135), + (95, 95, 175), + (95, 95, 215), + (95, 95, 255), + (95, 135, 0), + (95, 135, 95), + (95, 135, 135), + (95, 135, 175), + (95, 135, 215), + (95, 135, 255), + (95, 175, 0), + (95, 175, 95), + (95, 175, 135), + (95, 175, 175), + (95, 175, 215), + (95, 175, 255), + (95, 215, 0), + (95, 215, 95), + (95, 215, 135), + (95, 215, 175), + (95, 215, 215), + (95, 215, 255), + (95, 255, 0), + (95, 255, 95), + (95, 255, 135), + (95, 255, 175), + (95, 255, 215), + (95, 255, 255), + (135, 0, 0), + (135, 0, 95), + (135, 0, 135), + (135, 0, 175), + (135, 0, 215), + (135, 0, 255), + (135, 95, 0), + (135, 95, 95), + (135, 95, 135), + (135, 95, 175), + (135, 95, 215), + (135, 95, 255), + (135, 135, 0), + (135, 135, 95), + (135, 135, 135), + (135, 135, 175), + (135, 135, 215), + (135, 135, 255), + (135, 175, 0), + (135, 175, 95), + (135, 175, 135), + (135, 175, 175), + (135, 175, 215), + (135, 175, 255), + (135, 215, 0), + (135, 215, 95), + (135, 215, 135), + (135, 215, 175), + (135, 215, 215), + (135, 215, 255), + (135, 255, 0), + (135, 255, 95), + (135, 255, 135), + (135, 255, 175), + (135, 255, 215), + (135, 255, 255), + (175, 0, 0), + (175, 0, 95), + (175, 0, 135), + (175, 0, 175), + (175, 0, 215), + (175, 0, 255), + (175, 95, 0), + (175, 95, 95), + (175, 95, 135), + (175, 95, 175), + (175, 95, 215), + (175, 95, 255), + (175, 135, 0), + (175, 135, 95), + (175, 135, 135), + (175, 135, 175), + (175, 135, 215), + (175, 135, 255), + (175, 175, 0), + (175, 175, 95), + (175, 175, 135), + (175, 175, 175), + (175, 175, 215), + (175, 175, 255), + (175, 215, 0), + (175, 215, 95), + (175, 215, 135), + (175, 215, 175), + (175, 215, 215), + (175, 215, 255), + (175, 255, 0), + (175, 255, 95), + (175, 255, 135), + (175, 255, 175), + (175, 255, 215), + (175, 255, 255), + (215, 0, 0), + (215, 0, 95), + (215, 0, 135), + (215, 0, 175), + (215, 0, 215), + (215, 0, 255), + (215, 95, 0), + (215, 95, 95), + (215, 95, 135), + (215, 95, 175), + (215, 95, 215), + (215, 95, 255), + (215, 135, 0), + (215, 135, 95), + (215, 135, 135), + (215, 135, 175), + (215, 135, 215), + (215, 135, 255), + (215, 175, 0), + (215, 175, 95), + (215, 175, 135), + (215, 175, 175), + (215, 175, 215), + (215, 175, 255), + (215, 215, 0), + (215, 215, 95), + (215, 215, 135), + (215, 215, 175), + (215, 215, 215), + (215, 215, 255), + (215, 255, 0), + (215, 255, 95), + (215, 255, 135), + (215, 255, 175), + (215, 255, 215), + (215, 255, 255), + (255, 0, 0), + (255, 0, 95), + (255, 0, 135), + (255, 0, 175), + (255, 0, 215), + (255, 0, 255), + (255, 95, 0), + (255, 95, 95), + (255, 95, 135), + (255, 95, 175), + (255, 95, 215), + (255, 95, 255), + (255, 135, 0), + (255, 135, 95), + (255, 135, 135), + (255, 135, 175), + (255, 135, 215), + (255, 135, 255), + (255, 175, 0), + (255, 175, 95), + (255, 175, 135), + (255, 175, 175), + (255, 175, 215), + (255, 175, 255), + (255, 215, 0), + (255, 215, 95), + (255, 215, 135), + (255, 215, 175), + (255, 215, 215), + (255, 215, 255), + (255, 255, 0), + (255, 255, 95), + (255, 255, 135), + (255, 255, 175), + (255, 255, 215), + (255, 255, 255), + (8, 8, 8), + (18, 18, 18), + (28, 28, 28), + (38, 38, 38), + (48, 48, 48), + (58, 58, 58), + (68, 68, 68), + (78, 78, 78), + (88, 88, 88), + (98, 98, 98), + (108, 108, 108), + (118, 118, 118), + (128, 128, 128), + (138, 138, 138), + (148, 148, 148), + (158, 158, 158), + (168, 168, 168), + (178, 178, 178), + (188, 188, 188), + (198, 198, 198), + (208, 208, 208), + (218, 218, 218), + (228, 228, 228), + (238, 238, 238), + ] +) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_pick.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_pick.py new file mode 100644 index 0000000..4f6d8b2 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_pick.py @@ -0,0 +1,17 @@ +from typing import Optional + + +def pick_bool(*values: Optional[bool]) -> bool: + """Pick the first non-none bool or return the last value. + + Args: + *values (bool): Any number of boolean or None values. + + Returns: + bool: First non-none boolean. + """ + assert values, "1 or more values required" + for value in values: + if value is not None: + return value + return bool(value) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_ratio.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_ratio.py new file mode 100644 index 0000000..e8a3a67 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_ratio.py @@ -0,0 +1,160 @@ +import sys +from fractions import Fraction +from math import ceil +from typing import cast, List, Optional, Sequence + +if sys.version_info >= (3, 8): + from typing import Protocol +else: + from pip._vendor.typing_extensions import Protocol # pragma: no cover + + +class Edge(Protocol): + """Any object that defines an edge (such as Layout).""" + + size: Optional[int] = None + ratio: int = 1 + minimum_size: int = 1 + + +def ratio_resolve(total: int, edges: Sequence[Edge]) -> List[int]: + """Divide total space to satisfy size, ratio, and minimum_size, constraints. + + The returned list of integers should add up to total in most cases, unless it is + impossible to satisfy all the constraints. For instance, if there are two edges + with a minimum size of 20 each and `total` is 30 then the returned list will be + greater than total. In practice, this would mean that a Layout object would + clip the rows that would overflow the screen height. + + Args: + total (int): Total number of characters. + edges (List[Edge]): Edges within total space. + + Returns: + List[int]: Number of characters for each edge. + """ + # Size of edge or None for yet to be determined + sizes = [(edge.size or None) for edge in edges] + + _Fraction = Fraction + + # While any edges haven't been calculated + while None in sizes: + # Get flexible edges and index to map these back on to sizes list + flexible_edges = [ + (index, edge) + for index, (size, edge) in enumerate(zip(sizes, edges)) + if size is None + ] + # Remaining space in total + remaining = total - sum(size or 0 for size in sizes) + if remaining <= 0: + # No room for flexible edges + return [ + ((edge.minimum_size or 1) if size is None else size) + for size, edge in zip(sizes, edges) + ] + # Calculate number of characters in a ratio portion + portion = _Fraction( + remaining, sum((edge.ratio or 1) for _, edge in flexible_edges) + ) + + # If any edges will be less than their minimum, replace size with the minimum + for index, edge in flexible_edges: + if portion * edge.ratio <= edge.minimum_size: + sizes[index] = edge.minimum_size + # New fixed size will invalidate calculations, so we need to repeat the process + break + else: + # Distribute flexible space and compensate for rounding error + # Since edge sizes can only be integers we need to add the remainder + # to the following line + remainder = _Fraction(0) + for index, edge in flexible_edges: + size, remainder = divmod(portion * edge.ratio + remainder, 1) + sizes[index] = size + break + # Sizes now contains integers only + return cast(List[int], sizes) + + +def ratio_reduce( + total: int, ratios: List[int], maximums: List[int], values: List[int] +) -> List[int]: + """Divide an integer total in to parts based on ratios. + + Args: + total (int): The total to divide. + ratios (List[int]): A list of integer ratios. + maximums (List[int]): List of maximums values for each slot. + values (List[int]): List of values + + Returns: + List[int]: A list of integers guaranteed to sum to total. + """ + ratios = [ratio if _max else 0 for ratio, _max in zip(ratios, maximums)] + total_ratio = sum(ratios) + if not total_ratio: + return values[:] + total_remaining = total + result: List[int] = [] + append = result.append + for ratio, maximum, value in zip(ratios, maximums, values): + if ratio and total_ratio > 0: + distributed = min(maximum, round(ratio * total_remaining / total_ratio)) + append(value - distributed) + total_remaining -= distributed + total_ratio -= ratio + else: + append(value) + return result + + +def ratio_distribute( + total: int, ratios: List[int], minimums: Optional[List[int]] = None +) -> List[int]: + """Distribute an integer total in to parts based on ratios. + + Args: + total (int): The total to divide. + ratios (List[int]): A list of integer ratios. + minimums (List[int]): List of minimum values for each slot. + + Returns: + List[int]: A list of integers guaranteed to sum to total. + """ + if minimums: + ratios = [ratio if _min else 0 for ratio, _min in zip(ratios, minimums)] + total_ratio = sum(ratios) + assert total_ratio > 0, "Sum of ratios must be > 0" + + total_remaining = total + distributed_total: List[int] = [] + append = distributed_total.append + if minimums is None: + _minimums = [0] * len(ratios) + else: + _minimums = minimums + for ratio, minimum in zip(ratios, _minimums): + if total_ratio > 0: + distributed = max(minimum, ceil(ratio * total_remaining / total_ratio)) + else: + distributed = total_remaining + append(distributed) + total_ratio -= ratio + total_remaining -= distributed + return distributed_total + + +if __name__ == "__main__": + from dataclasses import dataclass + + @dataclass + class E: + + size: Optional[int] = None + ratio: int = 1 + minimum_size: int = 1 + + resolved = ratio_resolve(110, [E(None, 1, 1), E(None, 1, 1), E(None, 1, 1)]) + print(sum(resolved)) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_spinners.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_spinners.py new file mode 100644 index 0000000..d0bb1fe --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_spinners.py @@ -0,0 +1,482 @@ +""" +Spinners are from: +* cli-spinners: + MIT License + Copyright (c) Sindre Sorhus (sindresorhus.com) + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, + INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE + FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + IN THE SOFTWARE. +""" + +SPINNERS = { + "dots": { + "interval": 80, + "frames": "⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏", + }, + "dots2": {"interval": 80, "frames": "⣾⣽⣻⢿⡿⣟⣯⣷"}, + "dots3": { + "interval": 80, + "frames": "⠋⠙⠚⠞⠖⠦⠴⠲⠳⠓", + }, + "dots4": { + "interval": 80, + "frames": "⠄⠆⠇⠋⠙⠸⠰⠠⠰⠸⠙⠋⠇⠆", + }, + "dots5": { + "interval": 80, + "frames": "⠋⠙⠚⠒⠂⠂⠒⠲⠴⠦⠖⠒⠐⠐⠒⠓⠋", + }, + "dots6": { + "interval": 80, + "frames": "⠁⠉⠙⠚⠒⠂⠂⠒⠲⠴⠤⠄⠄⠤⠴⠲⠒⠂⠂⠒⠚⠙⠉⠁", + }, + "dots7": { + "interval": 80, + "frames": "⠈⠉⠋⠓⠒⠐⠐⠒⠖⠦⠤⠠⠠⠤⠦⠖⠒⠐⠐⠒⠓⠋⠉⠈", + }, + "dots8": { + "interval": 80, + "frames": "⠁⠁⠉⠙⠚⠒⠂⠂⠒⠲⠴⠤⠄⠄⠤⠠⠠⠤⠦⠖⠒⠐⠐⠒⠓⠋⠉⠈⠈", + }, + "dots9": {"interval": 80, "frames": "⢹⢺⢼⣸⣇⡧⡗⡏"}, + "dots10": {"interval": 80, "frames": "⢄⢂⢁⡁⡈⡐⡠"}, + "dots11": {"interval": 100, "frames": "⠁⠂⠄⡀⢀⠠⠐⠈"}, + "dots12": { + "interval": 80, + "frames": [ + "⢀⠀", + "⡀⠀", + "⠄⠀", + "⢂⠀", + "⡂⠀", + "⠅⠀", + "⢃⠀", + "⡃⠀", + "⠍⠀", + "⢋⠀", + "⡋⠀", + "⠍⠁", + "⢋⠁", + "⡋⠁", + "⠍⠉", + "⠋⠉", + "⠋⠉", + "⠉⠙", + "⠉⠙", + "⠉⠩", + "⠈⢙", + "⠈⡙", + "⢈⠩", + "⡀⢙", + "⠄⡙", + "⢂⠩", + "⡂⢘", + "⠅⡘", + "⢃⠨", + "⡃⢐", + "⠍⡐", + "⢋⠠", + "⡋⢀", + "⠍⡁", + "⢋⠁", + "⡋⠁", + "⠍⠉", + "⠋⠉", + "⠋⠉", + "⠉⠙", + "⠉⠙", + "⠉⠩", + "⠈⢙", + "⠈⡙", + "⠈⠩", + "⠀⢙", + "⠀⡙", + "⠀⠩", + "⠀⢘", + "⠀⡘", + "⠀⠨", + "⠀⢐", + "⠀⡐", + "⠀⠠", + "⠀⢀", + "⠀⡀", + ], + }, + "dots8Bit": { + "interval": 80, + "frames": "⠀⠁⠂⠃⠄⠅⠆⠇⡀⡁⡂⡃⡄⡅⡆⡇⠈⠉⠊⠋⠌⠍⠎⠏⡈⡉⡊⡋⡌⡍⡎⡏⠐⠑⠒⠓⠔⠕⠖⠗⡐⡑⡒⡓⡔⡕⡖⡗⠘⠙⠚⠛⠜⠝⠞⠟⡘⡙" + "⡚⡛⡜⡝⡞⡟⠠⠡⠢⠣⠤⠥⠦⠧⡠⡡⡢⡣⡤⡥⡦⡧⠨⠩⠪⠫⠬⠭⠮⠯⡨⡩⡪⡫⡬⡭⡮⡯⠰⠱⠲⠳⠴⠵⠶⠷⡰⡱⡲⡳⡴⡵⡶⡷⠸⠹⠺⠻" + "⠼⠽⠾⠿⡸⡹⡺⡻⡼⡽⡾⡿⢀⢁⢂⢃⢄⢅⢆⢇⣀⣁⣂⣃⣄⣅⣆⣇⢈⢉⢊⢋⢌⢍⢎⢏⣈⣉⣊⣋⣌⣍⣎⣏⢐⢑⢒⢓⢔⢕⢖⢗⣐⣑⣒⣓⣔⣕" + "⣖⣗⢘⢙⢚⢛⢜⢝⢞⢟⣘⣙⣚⣛⣜⣝⣞⣟⢠⢡⢢⢣⢤⢥⢦⢧⣠⣡⣢⣣⣤⣥⣦⣧⢨⢩⢪⢫⢬⢭⢮⢯⣨⣩⣪⣫⣬⣭⣮⣯⢰⢱⢲⢳⢴⢵⢶⢷" + "⣰⣱⣲⣳⣴⣵⣶⣷⢸⢹⢺⢻⢼⢽⢾⢿⣸⣹⣺⣻⣼⣽⣾⣿", + }, + "line": {"interval": 130, "frames": ["-", "\\", "|", "/"]}, + "line2": {"interval": 100, "frames": "⠂-–—–-"}, + "pipe": {"interval": 100, "frames": "┤┘┴└├┌┬┐"}, + "simpleDots": {"interval": 400, "frames": [". ", ".. ", "...", " "]}, + "simpleDotsScrolling": { + "interval": 200, + "frames": [". ", ".. ", "...", " ..", " .", " "], + }, + "star": {"interval": 70, "frames": "✶✸✹✺✹✷"}, + "star2": {"interval": 80, "frames": "+x*"}, + "flip": { + "interval": 70, + "frames": "___-``'´-___", + }, + "hamburger": {"interval": 100, "frames": "☱☲☴"}, + "growVertical": { + "interval": 120, + "frames": "▁▃▄▅▆▇▆▅▄▃", + }, + "growHorizontal": { + "interval": 120, + "frames": "▏▎▍▌▋▊▉▊▋▌▍▎", + }, + "balloon": {"interval": 140, "frames": " .oO@* "}, + "balloon2": {"interval": 120, "frames": ".oO°Oo."}, + "noise": {"interval": 100, "frames": "▓▒░"}, + "bounce": {"interval": 120, "frames": "⠁⠂⠄⠂"}, + "boxBounce": {"interval": 120, "frames": "▖▘▝▗"}, + "boxBounce2": {"interval": 100, "frames": "▌▀▐▄"}, + "triangle": {"interval": 50, "frames": "◢◣◤◥"}, + "arc": {"interval": 100, "frames": "◜◠◝◞◡◟"}, + "circle": {"interval": 120, "frames": "◡⊙◠"}, + "squareCorners": {"interval": 180, "frames": "◰◳◲◱"}, + "circleQuarters": {"interval": 120, "frames": "◴◷◶◵"}, + "circleHalves": {"interval": 50, "frames": "◐◓◑◒"}, + "squish": {"interval": 100, "frames": "╫╪"}, + "toggle": {"interval": 250, "frames": "⊶⊷"}, + "toggle2": {"interval": 80, "frames": "▫▪"}, + "toggle3": {"interval": 120, "frames": "□■"}, + "toggle4": {"interval": 100, "frames": "■□▪▫"}, + "toggle5": {"interval": 100, "frames": "▮▯"}, + "toggle6": {"interval": 300, "frames": "ဝ၀"}, + "toggle7": {"interval": 80, "frames": "⦾⦿"}, + "toggle8": {"interval": 100, "frames": "◍◌"}, + "toggle9": {"interval": 100, "frames": "◉◎"}, + "toggle10": {"interval": 100, "frames": "㊂㊀㊁"}, + "toggle11": {"interval": 50, "frames": "⧇⧆"}, + "toggle12": {"interval": 120, "frames": "☗☖"}, + "toggle13": {"interval": 80, "frames": "=*-"}, + "arrow": {"interval": 100, "frames": "←↖↑↗→↘↓↙"}, + "arrow2": { + "interval": 80, + "frames": ["⬆️ ", "↗️ ", "➡️ ", "↘️ ", "⬇️ ", "↙️ ", "⬅️ ", "↖️ "], + }, + "arrow3": { + "interval": 120, + "frames": ["▹▹▹▹▹", "▸▹▹▹▹", "▹▸▹▹▹", "▹▹▸▹▹", "▹▹▹▸▹", "▹▹▹▹▸"], + }, + "bouncingBar": { + "interval": 80, + "frames": [ + "[ ]", + "[= ]", + "[== ]", + "[=== ]", + "[ ===]", + "[ ==]", + "[ =]", + "[ ]", + "[ =]", + "[ ==]", + "[ ===]", + "[====]", + "[=== ]", + "[== ]", + "[= ]", + ], + }, + "bouncingBall": { + "interval": 80, + "frames": [ + "( ● )", + "( ● )", + "( ● )", + "( ● )", + "( ●)", + "( ● )", + "( ● )", + "( ● )", + "( ● )", + "(● )", + ], + }, + "smiley": {"interval": 200, "frames": ["😄 ", "😝 "]}, + "monkey": {"interval": 300, "frames": ["🙈 ", "🙈 ", "🙉 ", "🙊 "]}, + "hearts": {"interval": 100, "frames": ["💛 ", "💙 ", "💜 ", "💚 ", "❤️ "]}, + "clock": { + "interval": 100, + "frames": [ + "🕛 ", + "🕐 ", + "🕑 ", + "🕒 ", + "🕓 ", + "🕔 ", + "🕕 ", + "🕖 ", + "🕗 ", + "🕘 ", + "🕙 ", + "🕚 ", + ], + }, + "earth": {"interval": 180, "frames": ["🌍 ", "🌎 ", "🌏 "]}, + "material": { + "interval": 17, + "frames": [ + "█▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "███▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "████▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "███████▁▁▁▁▁▁▁▁▁▁▁▁▁", + "████████▁▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "██████████▁▁▁▁▁▁▁▁▁▁", + "███████████▁▁▁▁▁▁▁▁▁", + "█████████████▁▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁▁██████████████▁▁▁▁", + "▁▁▁██████████████▁▁▁", + "▁▁▁▁█████████████▁▁▁", + "▁▁▁▁██████████████▁▁", + "▁▁▁▁██████████████▁▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁▁██████████████", + "▁▁▁▁▁▁██████████████", + "▁▁▁▁▁▁▁█████████████", + "▁▁▁▁▁▁▁█████████████", + "▁▁▁▁▁▁▁▁████████████", + "▁▁▁▁▁▁▁▁████████████", + "▁▁▁▁▁▁▁▁▁███████████", + "▁▁▁▁▁▁▁▁▁███████████", + "▁▁▁▁▁▁▁▁▁▁██████████", + "▁▁▁▁▁▁▁▁▁▁██████████", + "▁▁▁▁▁▁▁▁▁▁▁▁████████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁██████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "█▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "███▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "████▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "█████▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "█████▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "████████▁▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "███████████▁▁▁▁▁▁▁▁▁", + "████████████▁▁▁▁▁▁▁▁", + "████████████▁▁▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁▁▁█████████████▁▁▁▁", + "▁▁▁▁▁████████████▁▁▁", + "▁▁▁▁▁████████████▁▁▁", + "▁▁▁▁▁▁███████████▁▁▁", + "▁▁▁▁▁▁▁▁█████████▁▁▁", + "▁▁▁▁▁▁▁▁█████████▁▁▁", + "▁▁▁▁▁▁▁▁▁█████████▁▁", + "▁▁▁▁▁▁▁▁▁█████████▁▁", + "▁▁▁▁▁▁▁▁▁▁█████████▁", + "▁▁▁▁▁▁▁▁▁▁▁████████▁", + "▁▁▁▁▁▁▁▁▁▁▁████████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁███████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁███████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + ], + }, + "moon": { + "interval": 80, + "frames": ["🌑 ", "🌒 ", "🌓 ", "🌔 ", "🌕 ", "🌖 ", "🌗 ", "🌘 "], + }, + "runner": {"interval": 140, "frames": ["🚶 ", "🏃 "]}, + "pong": { + "interval": 80, + "frames": [ + "▐⠂ ▌", + "▐⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂▌", + "▐ ⠠▌", + "▐ ⡀▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐⠠ ▌", + ], + }, + "shark": { + "interval": 120, + "frames": [ + "▐|\\____________▌", + "▐_|\\___________▌", + "▐__|\\__________▌", + "▐___|\\_________▌", + "▐____|\\________▌", + "▐_____|\\_______▌", + "▐______|\\______▌", + "▐_______|\\_____▌", + "▐________|\\____▌", + "▐_________|\\___▌", + "▐__________|\\__▌", + "▐___________|\\_▌", + "▐____________|\\▌", + "▐____________/|▌", + "▐___________/|_▌", + "▐__________/|__▌", + "▐_________/|___▌", + "▐________/|____▌", + "▐_______/|_____▌", + "▐______/|______▌", + "▐_____/|_______▌", + "▐____/|________▌", + "▐___/|_________▌", + "▐__/|__________▌", + "▐_/|___________▌", + "▐/|____________▌", + ], + }, + "dqpb": {"interval": 100, "frames": "dqpb"}, + "weather": { + "interval": 100, + "frames": [ + "☀️ ", + "☀️ ", + "☀️ ", + "🌤 ", + "⛅️ ", + "🌥 ", + "☁️ ", + "🌧 ", + "🌨 ", + "🌧 ", + "🌨 ", + "🌧 ", + "🌨 ", + "⛈ ", + "🌨 ", + "🌧 ", + "🌨 ", + "☁️ ", + "🌥 ", + "⛅️ ", + "🌤 ", + "☀️ ", + "☀️ ", + ], + }, + "christmas": {"interval": 400, "frames": "🌲🎄"}, + "grenade": { + "interval": 80, + "frames": [ + "، ", + "′ ", + " ´ ", + " ‾ ", + " ⸌", + " ⸊", + " |", + " ⁎", + " ⁕", + " ෴ ", + " ⁓", + " ", + " ", + " ", + ], + }, + "point": {"interval": 125, "frames": ["∙∙∙", "●∙∙", "∙●∙", "∙∙●", "∙∙∙"]}, + "layer": {"interval": 150, "frames": "-=≡"}, + "betaWave": { + "interval": 80, + "frames": [ + "ρββββββ", + "βρβββββ", + "ββρββββ", + "βββρβββ", + "ββββρββ", + "βββββρβ", + "ββββββρ", + ], + }, + "aesthetic": { + "interval": 80, + "frames": [ + "▰▱▱▱▱▱▱", + "▰▰▱▱▱▱▱", + "▰▰▰▱▱▱▱", + "▰▰▰▰▱▱▱", + "▰▰▰▰▰▱▱", + "▰▰▰▰▰▰▱", + "▰▰▰▰▰▰▰", + "▰▱▱▱▱▱▱", + ], + }, +} diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_stack.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_stack.py new file mode 100644 index 0000000..194564e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_stack.py @@ -0,0 +1,16 @@ +from typing import List, TypeVar + +T = TypeVar("T") + + +class Stack(List[T]): + """A small shim over builtin list.""" + + @property + def top(self) -> T: + """Get top of stack.""" + return self[-1] + + def push(self, item: T) -> None: + """Push an item on to the stack (append in stack nomenclature).""" + self.append(item) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_timer.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_timer.py new file mode 100644 index 0000000..a2ca6be --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_timer.py @@ -0,0 +1,19 @@ +""" +Timer context manager, only used in debug. + +""" + +from time import time + +import contextlib +from typing import Generator + + +@contextlib.contextmanager +def timer(subject: str = "time") -> Generator[None, None, None]: + """print the elapsed time. (only used in debugging)""" + start = time() + yield + elapsed = time() - start + elapsed_ms = elapsed * 1000 + print(f"{subject} elapsed {elapsed_ms:.1f}ms") diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_win32_console.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_win32_console.py new file mode 100644 index 0000000..81b1082 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_win32_console.py @@ -0,0 +1,662 @@ +"""Light wrapper around the Win32 Console API - this module should only be imported on Windows + +The API that this module wraps is documented at https://docs.microsoft.com/en-us/windows/console/console-functions +""" +import ctypes +import sys +from typing import Any + +windll: Any = None +if sys.platform == "win32": + windll = ctypes.LibraryLoader(ctypes.WinDLL) +else: + raise ImportError(f"{__name__} can only be imported on Windows") + +import time +from ctypes import Structure, byref, wintypes +from typing import IO, NamedTuple, Type, cast + +from pip._vendor.rich.color import ColorSystem +from pip._vendor.rich.style import Style + +STDOUT = -11 +ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4 + +COORD = wintypes._COORD + + +class LegacyWindowsError(Exception): + pass + + +class WindowsCoordinates(NamedTuple): + """Coordinates in the Windows Console API are (y, x), not (x, y). + This class is intended to prevent that confusion. + Rows and columns are indexed from 0. + This class can be used in place of wintypes._COORD in arguments and argtypes. + """ + + row: int + col: int + + @classmethod + def from_param(cls, value: "WindowsCoordinates") -> COORD: + """Converts a WindowsCoordinates into a wintypes _COORD structure. + This classmethod is internally called by ctypes to perform the conversion. + + Args: + value (WindowsCoordinates): The input coordinates to convert. + + Returns: + wintypes._COORD: The converted coordinates struct. + """ + return COORD(value.col, value.row) + + +class CONSOLE_SCREEN_BUFFER_INFO(Structure): + _fields_ = [ + ("dwSize", COORD), + ("dwCursorPosition", COORD), + ("wAttributes", wintypes.WORD), + ("srWindow", wintypes.SMALL_RECT), + ("dwMaximumWindowSize", COORD), + ] + + +class CONSOLE_CURSOR_INFO(ctypes.Structure): + _fields_ = [("dwSize", wintypes.DWORD), ("bVisible", wintypes.BOOL)] + + +_GetStdHandle = windll.kernel32.GetStdHandle +_GetStdHandle.argtypes = [ + wintypes.DWORD, +] +_GetStdHandle.restype = wintypes.HANDLE + + +def GetStdHandle(handle: int = STDOUT) -> wintypes.HANDLE: + """Retrieves a handle to the specified standard device (standard input, standard output, or standard error). + + Args: + handle (int): Integer identifier for the handle. Defaults to -11 (stdout). + + Returns: + wintypes.HANDLE: The handle + """ + return cast(wintypes.HANDLE, _GetStdHandle(handle)) + + +_GetConsoleMode = windll.kernel32.GetConsoleMode +_GetConsoleMode.argtypes = [wintypes.HANDLE, wintypes.LPDWORD] +_GetConsoleMode.restype = wintypes.BOOL + + +def GetConsoleMode(std_handle: wintypes.HANDLE) -> int: + """Retrieves the current input mode of a console's input buffer + or the current output mode of a console screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + + Raises: + LegacyWindowsError: If any error occurs while calling the Windows console API. + + Returns: + int: Value representing the current console mode as documented at + https://docs.microsoft.com/en-us/windows/console/getconsolemode#parameters + """ + + console_mode = wintypes.DWORD() + success = bool(_GetConsoleMode(std_handle, console_mode)) + if not success: + raise LegacyWindowsError("Unable to get legacy Windows Console Mode") + return console_mode.value + + +_FillConsoleOutputCharacterW = windll.kernel32.FillConsoleOutputCharacterW +_FillConsoleOutputCharacterW.argtypes = [ + wintypes.HANDLE, + ctypes.c_char, + wintypes.DWORD, + cast(Type[COORD], WindowsCoordinates), + ctypes.POINTER(wintypes.DWORD), +] +_FillConsoleOutputCharacterW.restype = wintypes.BOOL + + +def FillConsoleOutputCharacter( + std_handle: wintypes.HANDLE, + char: str, + length: int, + start: WindowsCoordinates, +) -> int: + """Writes a character to the console screen buffer a specified number of times, beginning at the specified coordinates. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + char (str): The character to write. Must be a string of length 1. + length (int): The number of times to write the character. + start (WindowsCoordinates): The coordinates to start writing at. + + Returns: + int: The number of characters written. + """ + character = ctypes.c_char(char.encode()) + num_characters = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + _FillConsoleOutputCharacterW( + std_handle, + character, + num_characters, + start, + byref(num_written), + ) + return num_written.value + + +_FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute +_FillConsoleOutputAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + wintypes.DWORD, + cast(Type[COORD], WindowsCoordinates), + ctypes.POINTER(wintypes.DWORD), +] +_FillConsoleOutputAttribute.restype = wintypes.BOOL + + +def FillConsoleOutputAttribute( + std_handle: wintypes.HANDLE, + attributes: int, + length: int, + start: WindowsCoordinates, +) -> int: + """Sets the character attributes for a specified number of character cells, + beginning at the specified coordinates in a screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + attributes (int): Integer value representing the foreground and background colours of the cells. + length (int): The number of cells to set the output attribute of. + start (WindowsCoordinates): The coordinates of the first cell whose attributes are to be set. + + Returns: + int: The number of cells whose attributes were actually set. + """ + num_cells = wintypes.DWORD(length) + style_attrs = wintypes.WORD(attributes) + num_written = wintypes.DWORD(0) + _FillConsoleOutputAttribute( + std_handle, style_attrs, num_cells, start, byref(num_written) + ) + return num_written.value + + +_SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute +_SetConsoleTextAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, +] +_SetConsoleTextAttribute.restype = wintypes.BOOL + + +def SetConsoleTextAttribute( + std_handle: wintypes.HANDLE, attributes: wintypes.WORD +) -> bool: + """Set the colour attributes for all text written after this function is called. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + attributes (int): Integer value representing the foreground and background colours. + + + Returns: + bool: True if the attribute was set successfully, otherwise False. + """ + return bool(_SetConsoleTextAttribute(std_handle, attributes)) + + +_GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo +_GetConsoleScreenBufferInfo.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO), +] +_GetConsoleScreenBufferInfo.restype = wintypes.BOOL + + +def GetConsoleScreenBufferInfo( + std_handle: wintypes.HANDLE, +) -> CONSOLE_SCREEN_BUFFER_INFO: + """Retrieves information about the specified console screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + + Returns: + CONSOLE_SCREEN_BUFFER_INFO: A CONSOLE_SCREEN_BUFFER_INFO ctype struct contain information about + screen size, cursor position, colour attributes, and more.""" + console_screen_buffer_info = CONSOLE_SCREEN_BUFFER_INFO() + _GetConsoleScreenBufferInfo(std_handle, byref(console_screen_buffer_info)) + return console_screen_buffer_info + + +_SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition +_SetConsoleCursorPosition.argtypes = [ + wintypes.HANDLE, + cast(Type[COORD], WindowsCoordinates), +] +_SetConsoleCursorPosition.restype = wintypes.BOOL + + +def SetConsoleCursorPosition( + std_handle: wintypes.HANDLE, coords: WindowsCoordinates +) -> bool: + """Set the position of the cursor in the console screen + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + coords (WindowsCoordinates): The coordinates to move the cursor to. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleCursorPosition(std_handle, coords)) + + +_GetConsoleCursorInfo = windll.kernel32.GetConsoleCursorInfo +_GetConsoleCursorInfo.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(CONSOLE_CURSOR_INFO), +] +_GetConsoleCursorInfo.restype = wintypes.BOOL + + +def GetConsoleCursorInfo( + std_handle: wintypes.HANDLE, cursor_info: CONSOLE_CURSOR_INFO +) -> bool: + """Get the cursor info - used to get cursor visibility and width + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + cursor_info (CONSOLE_CURSOR_INFO): CONSOLE_CURSOR_INFO ctype struct that receives information + about the console's cursor. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_GetConsoleCursorInfo(std_handle, byref(cursor_info))) + + +_SetConsoleCursorInfo = windll.kernel32.SetConsoleCursorInfo +_SetConsoleCursorInfo.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(CONSOLE_CURSOR_INFO), +] +_SetConsoleCursorInfo.restype = wintypes.BOOL + + +def SetConsoleCursorInfo( + std_handle: wintypes.HANDLE, cursor_info: CONSOLE_CURSOR_INFO +) -> bool: + """Set the cursor info - used for adjusting cursor visibility and width + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + cursor_info (CONSOLE_CURSOR_INFO): CONSOLE_CURSOR_INFO ctype struct containing the new cursor info. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleCursorInfo(std_handle, byref(cursor_info))) + + +_SetConsoleTitle = windll.kernel32.SetConsoleTitleW +_SetConsoleTitle.argtypes = [wintypes.LPCWSTR] +_SetConsoleTitle.restype = wintypes.BOOL + + +def SetConsoleTitle(title: str) -> bool: + """Sets the title of the current console window + + Args: + title (str): The new title of the console window. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleTitle(title)) + + +class LegacyWindowsTerm: + """This class allows interaction with the legacy Windows Console API. It should only be used in the context + of environments where virtual terminal processing is not available. However, if it is used in a Windows environment, + the entire API should work. + + Args: + file (IO[str]): The file which the Windows Console API HANDLE is retrieved from, defaults to sys.stdout. + """ + + BRIGHT_BIT = 8 + + # Indices are ANSI color numbers, values are the corresponding Windows Console API color numbers + ANSI_TO_WINDOWS = [ + 0, # black The Windows colours are defined in wincon.h as follows: + 4, # red define FOREGROUND_BLUE 0x0001 -- 0000 0001 + 2, # green define FOREGROUND_GREEN 0x0002 -- 0000 0010 + 6, # yellow define FOREGROUND_RED 0x0004 -- 0000 0100 + 1, # blue define FOREGROUND_INTENSITY 0x0008 -- 0000 1000 + 5, # magenta define BACKGROUND_BLUE 0x0010 -- 0001 0000 + 3, # cyan define BACKGROUND_GREEN 0x0020 -- 0010 0000 + 7, # white define BACKGROUND_RED 0x0040 -- 0100 0000 + 8, # bright black (grey) define BACKGROUND_INTENSITY 0x0080 -- 1000 0000 + 12, # bright red + 10, # bright green + 14, # bright yellow + 9, # bright blue + 13, # bright magenta + 11, # bright cyan + 15, # bright white + ] + + def __init__(self, file: "IO[str]") -> None: + handle = GetStdHandle(STDOUT) + self._handle = handle + default_text = GetConsoleScreenBufferInfo(handle).wAttributes + self._default_text = default_text + + self._default_fore = default_text & 7 + self._default_back = (default_text >> 4) & 7 + self._default_attrs = self._default_fore | (self._default_back << 4) + + self._file = file + self.write = file.write + self.flush = file.flush + + @property + def cursor_position(self) -> WindowsCoordinates: + """Returns the current position of the cursor (0-based) + + Returns: + WindowsCoordinates: The current cursor position. + """ + coord: COORD = GetConsoleScreenBufferInfo(self._handle).dwCursorPosition + return WindowsCoordinates(row=cast(int, coord.Y), col=cast(int, coord.X)) + + @property + def screen_size(self) -> WindowsCoordinates: + """Returns the current size of the console screen buffer, in character columns and rows + + Returns: + WindowsCoordinates: The width and height of the screen as WindowsCoordinates. + """ + screen_size: COORD = GetConsoleScreenBufferInfo(self._handle).dwSize + return WindowsCoordinates( + row=cast(int, screen_size.Y), col=cast(int, screen_size.X) + ) + + def write_text(self, text: str) -> None: + """Write text directly to the terminal without any modification of styles + + Args: + text (str): The text to write to the console + """ + self.write(text) + self.flush() + + def write_styled(self, text: str, style: Style) -> None: + """Write styled text to the terminal. + + Args: + text (str): The text to write + style (Style): The style of the text + """ + color = style.color + bgcolor = style.bgcolor + if style.reverse: + color, bgcolor = bgcolor, color + + if color: + fore = color.downgrade(ColorSystem.WINDOWS).number + fore = fore if fore is not None else 7 # Default to ANSI 7: White + if style.bold: + fore = fore | self.BRIGHT_BIT + if style.dim: + fore = fore & ~self.BRIGHT_BIT + fore = self.ANSI_TO_WINDOWS[fore] + else: + fore = self._default_fore + + if bgcolor: + back = bgcolor.downgrade(ColorSystem.WINDOWS).number + back = back if back is not None else 0 # Default to ANSI 0: Black + back = self.ANSI_TO_WINDOWS[back] + else: + back = self._default_back + + assert fore is not None + assert back is not None + + SetConsoleTextAttribute( + self._handle, attributes=ctypes.c_ushort(fore | (back << 4)) + ) + self.write_text(text) + SetConsoleTextAttribute(self._handle, attributes=self._default_text) + + def move_cursor_to(self, new_position: WindowsCoordinates) -> None: + """Set the position of the cursor + + Args: + new_position (WindowsCoordinates): The WindowsCoordinates representing the new position of the cursor. + """ + if new_position.col < 0 or new_position.row < 0: + return + SetConsoleCursorPosition(self._handle, coords=new_position) + + def erase_line(self) -> None: + """Erase all content on the line the cursor is currently located at""" + screen_size = self.screen_size + cursor_position = self.cursor_position + cells_to_erase = screen_size.col + start_coordinates = WindowsCoordinates(row=cursor_position.row, col=0) + FillConsoleOutputCharacter( + self._handle, " ", length=cells_to_erase, start=start_coordinates + ) + FillConsoleOutputAttribute( + self._handle, + self._default_attrs, + length=cells_to_erase, + start=start_coordinates, + ) + + def erase_end_of_line(self) -> None: + """Erase all content from the cursor position to the end of that line""" + cursor_position = self.cursor_position + cells_to_erase = self.screen_size.col - cursor_position.col + FillConsoleOutputCharacter( + self._handle, " ", length=cells_to_erase, start=cursor_position + ) + FillConsoleOutputAttribute( + self._handle, + self._default_attrs, + length=cells_to_erase, + start=cursor_position, + ) + + def erase_start_of_line(self) -> None: + """Erase all content from the cursor position to the start of that line""" + row, col = self.cursor_position + start = WindowsCoordinates(row, 0) + FillConsoleOutputCharacter(self._handle, " ", length=col, start=start) + FillConsoleOutputAttribute( + self._handle, self._default_attrs, length=col, start=start + ) + + def move_cursor_up(self) -> None: + """Move the cursor up a single cell""" + cursor_position = self.cursor_position + SetConsoleCursorPosition( + self._handle, + coords=WindowsCoordinates( + row=cursor_position.row - 1, col=cursor_position.col + ), + ) + + def move_cursor_down(self) -> None: + """Move the cursor down a single cell""" + cursor_position = self.cursor_position + SetConsoleCursorPosition( + self._handle, + coords=WindowsCoordinates( + row=cursor_position.row + 1, + col=cursor_position.col, + ), + ) + + def move_cursor_forward(self) -> None: + """Move the cursor forward a single cell. Wrap to the next line if required.""" + row, col = self.cursor_position + if col == self.screen_size.col - 1: + row += 1 + col = 0 + else: + col += 1 + SetConsoleCursorPosition( + self._handle, coords=WindowsCoordinates(row=row, col=col) + ) + + def move_cursor_to_column(self, column: int) -> None: + """Move cursor to the column specified by the zero-based column index, staying on the same row + + Args: + column (int): The zero-based column index to move the cursor to. + """ + row, _ = self.cursor_position + SetConsoleCursorPosition(self._handle, coords=WindowsCoordinates(row, column)) + + def move_cursor_backward(self) -> None: + """Move the cursor backward a single cell. Wrap to the previous line if required.""" + row, col = self.cursor_position + if col == 0: + row -= 1 + col = self.screen_size.col - 1 + else: + col -= 1 + SetConsoleCursorPosition( + self._handle, coords=WindowsCoordinates(row=row, col=col) + ) + + def hide_cursor(self) -> None: + """Hide the cursor""" + current_cursor_size = self._get_cursor_size() + invisible_cursor = CONSOLE_CURSOR_INFO(dwSize=current_cursor_size, bVisible=0) + SetConsoleCursorInfo(self._handle, cursor_info=invisible_cursor) + + def show_cursor(self) -> None: + """Show the cursor""" + current_cursor_size = self._get_cursor_size() + visible_cursor = CONSOLE_CURSOR_INFO(dwSize=current_cursor_size, bVisible=1) + SetConsoleCursorInfo(self._handle, cursor_info=visible_cursor) + + def set_title(self, title: str) -> None: + """Set the title of the terminal window + + Args: + title (str): The new title of the console window + """ + assert len(title) < 255, "Console title must be less than 255 characters" + SetConsoleTitle(title) + + def _get_cursor_size(self) -> int: + """Get the percentage of the character cell that is filled by the cursor""" + cursor_info = CONSOLE_CURSOR_INFO() + GetConsoleCursorInfo(self._handle, cursor_info=cursor_info) + return int(cursor_info.dwSize) + + +if __name__ == "__main__": + handle = GetStdHandle() + + from pip._vendor.rich.console import Console + + console = Console() + + term = LegacyWindowsTerm(sys.stdout) + term.set_title("Win32 Console Examples") + + style = Style(color="black", bgcolor="red") + + heading = Style.parse("black on green") + + # Check colour output + console.rule("Checking colour output") + console.print("[on red]on red!") + console.print("[blue]blue!") + console.print("[yellow]yellow!") + console.print("[bold yellow]bold yellow!") + console.print("[bright_yellow]bright_yellow!") + console.print("[dim bright_yellow]dim bright_yellow!") + console.print("[italic cyan]italic cyan!") + console.print("[bold white on blue]bold white on blue!") + console.print("[reverse bold white on blue]reverse bold white on blue!") + console.print("[bold black on cyan]bold black on cyan!") + console.print("[black on green]black on green!") + console.print("[blue on green]blue on green!") + console.print("[white on black]white on black!") + console.print("[black on white]black on white!") + console.print("[#1BB152 on #DA812D]#1BB152 on #DA812D!") + + # Check cursor movement + console.rule("Checking cursor movement") + console.print() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("went back and wrapped to prev line") + time.sleep(1) + term.move_cursor_up() + term.write_text("we go up") + time.sleep(1) + term.move_cursor_down() + term.write_text("and down") + time.sleep(1) + term.move_cursor_up() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("we went up and back 2") + time.sleep(1) + term.move_cursor_down() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("we went down and back 2") + time.sleep(1) + + # Check erasing of lines + term.hide_cursor() + console.print() + console.rule("Checking line erasing") + console.print("\n...Deleting to the start of the line...") + term.write_text("The red arrow shows the cursor location, and direction of erase") + time.sleep(1) + term.move_cursor_to_column(16) + term.write_styled("<", Style.parse("black on red")) + term.move_cursor_backward() + time.sleep(1) + term.erase_start_of_line() + time.sleep(1) + + console.print("\n\n...And to the end of the line...") + term.write_text("The red arrow shows the cursor location, and direction of erase") + time.sleep(1) + + term.move_cursor_to_column(16) + term.write_styled(">", Style.parse("black on red")) + time.sleep(1) + term.erase_end_of_line() + time.sleep(1) + + console.print("\n\n...Now the whole line will be erased...") + term.write_styled("I'm going to disappear!", style=Style.parse("black on cyan")) + time.sleep(1) + term.erase_line() + + term.show_cursor() + print("\n") diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_windows.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_windows.py new file mode 100644 index 0000000..10fc0d7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_windows.py @@ -0,0 +1,72 @@ +import sys +from dataclasses import dataclass + + +@dataclass +class WindowsConsoleFeatures: + """Windows features available.""" + + vt: bool = False + """The console supports VT codes.""" + truecolor: bool = False + """The console supports truecolor.""" + + +try: + import ctypes + from ctypes import LibraryLoader + + if sys.platform == "win32": + windll = LibraryLoader(ctypes.WinDLL) + else: + windll = None + raise ImportError("Not windows") + + from pip._vendor.rich._win32_console import ( + ENABLE_VIRTUAL_TERMINAL_PROCESSING, + GetConsoleMode, + GetStdHandle, + LegacyWindowsError, + ) + +except (AttributeError, ImportError, ValueError): + + # Fallback if we can't load the Windows DLL + def get_windows_console_features() -> WindowsConsoleFeatures: + features = WindowsConsoleFeatures() + return features + +else: + + def get_windows_console_features() -> WindowsConsoleFeatures: + """Get windows console features. + + Returns: + WindowsConsoleFeatures: An instance of WindowsConsoleFeatures. + """ + handle = GetStdHandle() + try: + console_mode = GetConsoleMode(handle) + success = True + except LegacyWindowsError: + console_mode = 0 + success = False + vt = bool(success and console_mode & ENABLE_VIRTUAL_TERMINAL_PROCESSING) + truecolor = False + if vt: + win_version = sys.getwindowsversion() + truecolor = win_version.major > 10 or ( + win_version.major == 10 and win_version.build >= 15063 + ) + features = WindowsConsoleFeatures(vt=vt, truecolor=truecolor) + return features + + +if __name__ == "__main__": + import platform + + features = get_windows_console_features() + from pip._vendor.rich import print + + print(f'platform="{platform.system()}"') + print(repr(features)) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_windows_renderer.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_windows_renderer.py new file mode 100644 index 0000000..5ece056 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_windows_renderer.py @@ -0,0 +1,56 @@ +from typing import Iterable, Sequence, Tuple, cast + +from pip._vendor.rich._win32_console import LegacyWindowsTerm, WindowsCoordinates +from pip._vendor.rich.segment import ControlCode, ControlType, Segment + + +def legacy_windows_render(buffer: Iterable[Segment], term: LegacyWindowsTerm) -> None: + """Makes appropriate Windows Console API calls based on the segments in the buffer. + + Args: + buffer (Iterable[Segment]): Iterable of Segments to convert to Win32 API calls. + term (LegacyWindowsTerm): Used to call the Windows Console API. + """ + for text, style, control in buffer: + if not control: + if style: + term.write_styled(text, style) + else: + term.write_text(text) + else: + control_codes: Sequence[ControlCode] = control + for control_code in control_codes: + control_type = control_code[0] + if control_type == ControlType.CURSOR_MOVE_TO: + _, x, y = cast(Tuple[ControlType, int, int], control_code) + term.move_cursor_to(WindowsCoordinates(row=y - 1, col=x - 1)) + elif control_type == ControlType.CARRIAGE_RETURN: + term.write_text("\r") + elif control_type == ControlType.HOME: + term.move_cursor_to(WindowsCoordinates(0, 0)) + elif control_type == ControlType.CURSOR_UP: + term.move_cursor_up() + elif control_type == ControlType.CURSOR_DOWN: + term.move_cursor_down() + elif control_type == ControlType.CURSOR_FORWARD: + term.move_cursor_forward() + elif control_type == ControlType.CURSOR_BACKWARD: + term.move_cursor_backward() + elif control_type == ControlType.CURSOR_MOVE_TO_COLUMN: + _, column = cast(Tuple[ControlType, int], control_code) + term.move_cursor_to_column(column - 1) + elif control_type == ControlType.HIDE_CURSOR: + term.hide_cursor() + elif control_type == ControlType.SHOW_CURSOR: + term.show_cursor() + elif control_type == ControlType.ERASE_IN_LINE: + _, mode = cast(Tuple[ControlType, int], control_code) + if mode == 0: + term.erase_end_of_line() + elif mode == 1: + term.erase_start_of_line() + elif mode == 2: + term.erase_line() + elif control_type == ControlType.SET_WINDOW_TITLE: + _, title = cast(Tuple[ControlType, str], control_code) + term.set_title(title) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_wrap.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_wrap.py new file mode 100644 index 0000000..c45f193 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/_wrap.py @@ -0,0 +1,56 @@ +import re +from typing import Iterable, List, Tuple + +from ._loop import loop_last +from .cells import cell_len, chop_cells + +re_word = re.compile(r"\s*\S+\s*") + + +def words(text: str) -> Iterable[Tuple[int, int, str]]: + position = 0 + word_match = re_word.match(text, position) + while word_match is not None: + start, end = word_match.span() + word = word_match.group(0) + yield start, end, word + word_match = re_word.match(text, end) + + +def divide_line(text: str, width: int, fold: bool = True) -> List[int]: + divides: List[int] = [] + append = divides.append + line_position = 0 + _cell_len = cell_len + for start, _end, word in words(text): + word_length = _cell_len(word.rstrip()) + if line_position + word_length > width: + if word_length > width: + if fold: + chopped_words = chop_cells(word, max_size=width, position=0) + for last, line in loop_last(chopped_words): + if start: + append(start) + + if last: + line_position = _cell_len(line) + else: + start += len(line) + else: + if start: + append(start) + line_position = _cell_len(word) + elif line_position and start: + append(start) + line_position = _cell_len(word) + else: + line_position += _cell_len(word) + return divides + + +if __name__ == "__main__": # pragma: no cover + from .console import Console + + console = Console(width=10) + console.print("12345 abcdefghijklmnopqrstuvwyxzABCDEFGHIJKLMNOPQRSTUVWXYZ 12345") + print(chop_cells("abcdefghijklmnopqrstuvwxyz", 10, position=2)) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/abc.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/abc.py new file mode 100644 index 0000000..e6e498e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/abc.py @@ -0,0 +1,33 @@ +from abc import ABC + + +class RichRenderable(ABC): + """An abstract base class for Rich renderables. + + Note that there is no need to extend this class, the intended use is to check if an + object supports the Rich renderable protocol. For example:: + + if isinstance(my_object, RichRenderable): + console.print(my_object) + + """ + + @classmethod + def __subclasshook__(cls, other: type) -> bool: + """Check if this class supports the rich render protocol.""" + return hasattr(other, "__rich_console__") or hasattr(other, "__rich__") + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.text import Text + + t = Text() + print(isinstance(Text, RichRenderable)) + print(isinstance(t, RichRenderable)) + + class Foo: + pass + + f = Foo() + print(isinstance(f, RichRenderable)) + print(isinstance("", RichRenderable)) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/align.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/align.py new file mode 100644 index 0000000..c310b66 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/align.py @@ -0,0 +1,311 @@ +import sys +from itertools import chain +from typing import TYPE_CHECKING, Iterable, Optional + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from pip._vendor.typing_extensions import Literal # pragma: no cover + +from .constrain import Constrain +from .jupyter import JupyterMixin +from .measure import Measurement +from .segment import Segment +from .style import StyleType + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderableType, RenderResult + +AlignMethod = Literal["left", "center", "right"] +VerticalAlignMethod = Literal["top", "middle", "bottom"] + + +class Align(JupyterMixin): + """Align a renderable by adding spaces if necessary. + + Args: + renderable (RenderableType): A console renderable. + align (AlignMethod): One of "left", "center", or "right"" + style (StyleType, optional): An optional style to apply to the background. + vertical (Optional[VerticalAlginMethod], optional): Optional vertical align, one of "top", "middle", or "bottom". Defaults to None. + pad (bool, optional): Pad the right with spaces. Defaults to True. + width (int, optional): Restrict contents to given width, or None to use default width. Defaults to None. + height (int, optional): Set height of align renderable, or None to fit to contents. Defaults to None. + + Raises: + ValueError: if ``align`` is not one of the expected values. + """ + + def __init__( + self, + renderable: "RenderableType", + align: AlignMethod = "left", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> None: + if align not in ("left", "center", "right"): + raise ValueError( + f'invalid value for align, expected "left", "center", or "right" (not {align!r})' + ) + if vertical is not None and vertical not in ("top", "middle", "bottom"): + raise ValueError( + f'invalid value for vertical, expected "top", "middle", or "bottom" (not {vertical!r})' + ) + self.renderable = renderable + self.align = align + self.style = style + self.vertical = vertical + self.pad = pad + self.width = width + self.height = height + + def __repr__(self) -> str: + return f"Align({self.renderable!r}, {self.align!r})" + + @classmethod + def left( + cls, + renderable: "RenderableType", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> "Align": + """Align a renderable to the left.""" + return cls( + renderable, + "left", + style=style, + vertical=vertical, + pad=pad, + width=width, + height=height, + ) + + @classmethod + def center( + cls, + renderable: "RenderableType", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> "Align": + """Align a renderable to the center.""" + return cls( + renderable, + "center", + style=style, + vertical=vertical, + pad=pad, + width=width, + height=height, + ) + + @classmethod + def right( + cls, + renderable: "RenderableType", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> "Align": + """Align a renderable to the right.""" + return cls( + renderable, + "right", + style=style, + vertical=vertical, + pad=pad, + width=width, + height=height, + ) + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + align = self.align + width = console.measure(self.renderable, options=options).maximum + rendered = console.render( + Constrain( + self.renderable, width if self.width is None else min(width, self.width) + ), + options.update(height=None), + ) + lines = list(Segment.split_lines(rendered)) + width, height = Segment.get_shape(lines) + lines = Segment.set_shape(lines, width, height) + new_line = Segment.line() + excess_space = options.max_width - width + style = console.get_style(self.style) if self.style is not None else None + + def generate_segments() -> Iterable[Segment]: + if excess_space <= 0: + # Exact fit + for line in lines: + yield from line + yield new_line + + elif align == "left": + # Pad on the right + pad = Segment(" " * excess_space, style) if self.pad else None + for line in lines: + yield from line + if pad: + yield pad + yield new_line + + elif align == "center": + # Pad left and right + left = excess_space // 2 + pad = Segment(" " * left, style) + pad_right = ( + Segment(" " * (excess_space - left), style) if self.pad else None + ) + for line in lines: + if left: + yield pad + yield from line + if pad_right: + yield pad_right + yield new_line + + elif align == "right": + # Padding on left + pad = Segment(" " * excess_space, style) + for line in lines: + yield pad + yield from line + yield new_line + + blank_line = ( + Segment(f"{' ' * (self.width or options.max_width)}\n", style) + if self.pad + else Segment("\n") + ) + + def blank_lines(count: int) -> Iterable[Segment]: + if count > 0: + for _ in range(count): + yield blank_line + + vertical_height = self.height or options.height + iter_segments: Iterable[Segment] + if self.vertical and vertical_height is not None: + if self.vertical == "top": + bottom_space = vertical_height - height + iter_segments = chain(generate_segments(), blank_lines(bottom_space)) + elif self.vertical == "middle": + top_space = (vertical_height - height) // 2 + bottom_space = vertical_height - top_space - height + iter_segments = chain( + blank_lines(top_space), + generate_segments(), + blank_lines(bottom_space), + ) + else: # self.vertical == "bottom": + top_space = vertical_height - height + iter_segments = chain(blank_lines(top_space), generate_segments()) + else: + iter_segments = generate_segments() + if self.style: + style = console.get_style(self.style) + iter_segments = Segment.apply_style(iter_segments, style) + yield from iter_segments + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> Measurement: + measurement = Measurement.get(console, options, self.renderable) + return measurement + + +class VerticalCenter(JupyterMixin): + """Vertically aligns a renderable. + + Warn: + This class is deprecated and may be removed in a future version. Use Align class with + `vertical="middle"`. + + Args: + renderable (RenderableType): A renderable object. + """ + + def __init__( + self, + renderable: "RenderableType", + style: Optional[StyleType] = None, + ) -> None: + self.renderable = renderable + self.style = style + + def __repr__(self) -> str: + return f"VerticalCenter({self.renderable!r})" + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + style = console.get_style(self.style) if self.style is not None else None + lines = console.render_lines( + self.renderable, options.update(height=None), pad=False + ) + width, _height = Segment.get_shape(lines) + new_line = Segment.line() + height = options.height or options.size.height + top_space = (height - len(lines)) // 2 + bottom_space = height - top_space - len(lines) + blank_line = Segment(f"{' ' * width}", style) + + def blank_lines(count: int) -> Iterable[Segment]: + for _ in range(count): + yield blank_line + yield new_line + + if top_space > 0: + yield from blank_lines(top_space) + for line in lines: + yield from line + yield new_line + if bottom_space > 0: + yield from blank_lines(bottom_space) + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> Measurement: + measurement = Measurement.get(console, options, self.renderable) + return measurement + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.console import Console, Group + from pip._vendor.rich.highlighter import ReprHighlighter + from pip._vendor.rich.panel import Panel + + highlighter = ReprHighlighter() + console = Console() + + panel = Panel( + Group( + Align.left(highlighter("align='left'")), + Align.center(highlighter("align='center'")), + Align.right(highlighter("align='right'")), + ), + width=60, + style="on dark_blue", + title="Align", + ) + + console.print( + Align.center(panel, vertical="middle", style="on red", height=console.height) + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/ansi.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/ansi.py new file mode 100644 index 0000000..66365e6 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/ansi.py @@ -0,0 +1,240 @@ +import re +import sys +from contextlib import suppress +from typing import Iterable, NamedTuple, Optional + +from .color import Color +from .style import Style +from .text import Text + +re_ansi = re.compile( + r""" +(?:\x1b\](.*?)\x1b\\)| +(?:\x1b([(@-Z\\-_]|\[[0-?]*[ -/]*[@-~])) +""", + re.VERBOSE, +) + + +class _AnsiToken(NamedTuple): + """Result of ansi tokenized string.""" + + plain: str = "" + sgr: Optional[str] = "" + osc: Optional[str] = "" + + +def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]: + """Tokenize a string in to plain text and ANSI codes. + + Args: + ansi_text (str): A String containing ANSI codes. + + Yields: + AnsiToken: A named tuple of (plain, sgr, osc) + """ + + position = 0 + sgr: Optional[str] + osc: Optional[str] + for match in re_ansi.finditer(ansi_text): + start, end = match.span(0) + osc, sgr = match.groups() + if start > position: + yield _AnsiToken(ansi_text[position:start]) + if sgr: + if sgr == "(": + position = end + 1 + continue + if sgr.endswith("m"): + yield _AnsiToken("", sgr[1:-1], osc) + else: + yield _AnsiToken("", sgr, osc) + position = end + if position < len(ansi_text): + yield _AnsiToken(ansi_text[position:]) + + +SGR_STYLE_MAP = { + 1: "bold", + 2: "dim", + 3: "italic", + 4: "underline", + 5: "blink", + 6: "blink2", + 7: "reverse", + 8: "conceal", + 9: "strike", + 21: "underline2", + 22: "not dim not bold", + 23: "not italic", + 24: "not underline", + 25: "not blink", + 26: "not blink2", + 27: "not reverse", + 28: "not conceal", + 29: "not strike", + 30: "color(0)", + 31: "color(1)", + 32: "color(2)", + 33: "color(3)", + 34: "color(4)", + 35: "color(5)", + 36: "color(6)", + 37: "color(7)", + 39: "default", + 40: "on color(0)", + 41: "on color(1)", + 42: "on color(2)", + 43: "on color(3)", + 44: "on color(4)", + 45: "on color(5)", + 46: "on color(6)", + 47: "on color(7)", + 49: "on default", + 51: "frame", + 52: "encircle", + 53: "overline", + 54: "not frame not encircle", + 55: "not overline", + 90: "color(8)", + 91: "color(9)", + 92: "color(10)", + 93: "color(11)", + 94: "color(12)", + 95: "color(13)", + 96: "color(14)", + 97: "color(15)", + 100: "on color(8)", + 101: "on color(9)", + 102: "on color(10)", + 103: "on color(11)", + 104: "on color(12)", + 105: "on color(13)", + 106: "on color(14)", + 107: "on color(15)", +} + + +class AnsiDecoder: + """Translate ANSI code in to styled Text.""" + + def __init__(self) -> None: + self.style = Style.null() + + def decode(self, terminal_text: str) -> Iterable[Text]: + """Decode ANSI codes in an iterable of lines. + + Args: + lines (Iterable[str]): An iterable of lines of terminal output. + + Yields: + Text: Marked up Text. + """ + for line in terminal_text.splitlines(): + yield self.decode_line(line) + + def decode_line(self, line: str) -> Text: + """Decode a line containing ansi codes. + + Args: + line (str): A line of terminal output. + + Returns: + Text: A Text instance marked up according to ansi codes. + """ + from_ansi = Color.from_ansi + from_rgb = Color.from_rgb + _Style = Style + text = Text() + append = text.append + line = line.rsplit("\r", 1)[-1] + for plain_text, sgr, osc in _ansi_tokenize(line): + if plain_text: + append(plain_text, self.style or None) + elif osc is not None: + if osc.startswith("8;"): + _params, semicolon, link = osc[2:].partition(";") + if semicolon: + self.style = self.style.update_link(link or None) + elif sgr is not None: + # Translate in to semi-colon separated codes + # Ignore invalid codes, because we want to be lenient + codes = [ + min(255, int(_code) if _code else 0) + for _code in sgr.split(";") + if _code.isdigit() or _code == "" + ] + iter_codes = iter(codes) + for code in iter_codes: + if code == 0: + # reset + self.style = _Style.null() + elif code in SGR_STYLE_MAP: + # styles + self.style += _Style.parse(SGR_STYLE_MAP[code]) + elif code == 38: + #  Foreground + with suppress(StopIteration): + color_type = next(iter_codes) + if color_type == 5: + self.style += _Style.from_color( + from_ansi(next(iter_codes)) + ) + elif color_type == 2: + self.style += _Style.from_color( + from_rgb( + next(iter_codes), + next(iter_codes), + next(iter_codes), + ) + ) + elif code == 48: + # Background + with suppress(StopIteration): + color_type = next(iter_codes) + if color_type == 5: + self.style += _Style.from_color( + None, from_ansi(next(iter_codes)) + ) + elif color_type == 2: + self.style += _Style.from_color( + None, + from_rgb( + next(iter_codes), + next(iter_codes), + next(iter_codes), + ), + ) + + return text + + +if sys.platform != "win32" and __name__ == "__main__": # pragma: no cover + import io + import os + import pty + import sys + + decoder = AnsiDecoder() + + stdout = io.BytesIO() + + def read(fd: int) -> bytes: + data = os.read(fd, 1024) + stdout.write(data) + return data + + pty.spawn(sys.argv[1:], read) + + from .console import Console + + console = Console(record=True) + + stdout_result = stdout.getvalue().decode("utf-8") + print(stdout_result) + + for line in decoder.decode(stdout_result): + console.print(line) + + console.save_html("stdout.html") diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/bar.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/bar.py new file mode 100644 index 0000000..ed86a55 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/bar.py @@ -0,0 +1,94 @@ +from typing import Optional, Union + +from .color import Color +from .console import Console, ConsoleOptions, RenderResult +from .jupyter import JupyterMixin +from .measure import Measurement +from .segment import Segment +from .style import Style + +# There are left-aligned characters for 1/8 to 7/8, but +# the right-aligned characters exist only for 1/8 and 4/8. +BEGIN_BLOCK_ELEMENTS = ["█", "█", "█", "▐", "▐", "▐", "▕", "▕"] +END_BLOCK_ELEMENTS = [" ", "▏", "▎", "▍", "▌", "▋", "▊", "▉"] +FULL_BLOCK = "█" + + +class Bar(JupyterMixin): + """Renders a solid block bar. + + Args: + size (float): Value for the end of the bar. + begin (float): Begin point (between 0 and size, inclusive). + end (float): End point (between 0 and size, inclusive). + width (int, optional): Width of the bar, or ``None`` for maximum width. Defaults to None. + color (Union[Color, str], optional): Color of the bar. Defaults to "default". + bgcolor (Union[Color, str], optional): Color of bar background. Defaults to "default". + """ + + def __init__( + self, + size: float, + begin: float, + end: float, + *, + width: Optional[int] = None, + color: Union[Color, str] = "default", + bgcolor: Union[Color, str] = "default", + ): + self.size = size + self.begin = max(begin, 0) + self.end = min(end, size) + self.width = width + self.style = Style(color=color, bgcolor=bgcolor) + + def __repr__(self) -> str: + return f"Bar({self.size}, {self.begin}, {self.end})" + + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + + width = min( + self.width if self.width is not None else options.max_width, + options.max_width, + ) + + if self.begin >= self.end: + yield Segment(" " * width, self.style) + yield Segment.line() + return + + prefix_complete_eights = int(width * 8 * self.begin / self.size) + prefix_bar_count = prefix_complete_eights // 8 + prefix_eights_count = prefix_complete_eights % 8 + + body_complete_eights = int(width * 8 * self.end / self.size) + body_bar_count = body_complete_eights // 8 + body_eights_count = body_complete_eights % 8 + + # When start and end fall into the same cell, we ideally should render + # a symbol that's "center-aligned", but there is no good symbol in Unicode. + # In this case, we fall back to right-aligned block symbol for simplicity. + + prefix = " " * prefix_bar_count + if prefix_eights_count: + prefix += BEGIN_BLOCK_ELEMENTS[prefix_eights_count] + + body = FULL_BLOCK * body_bar_count + if body_eights_count: + body += END_BLOCK_ELEMENTS[body_eights_count] + + suffix = " " * (width - len(body)) + + yield Segment(prefix + body[len(prefix) :] + suffix, self.style) + yield Segment.line() + + def __rich_measure__( + self, console: Console, options: ConsoleOptions + ) -> Measurement: + return ( + Measurement(self.width, self.width) + if self.width is not None + else Measurement(4, options.max_width) + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/box.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/box.py new file mode 100644 index 0000000..97d2a94 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/box.py @@ -0,0 +1,517 @@ +import sys +from typing import TYPE_CHECKING, Iterable, List + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from pip._vendor.typing_extensions import Literal # pragma: no cover + + +from ._loop import loop_last + +if TYPE_CHECKING: + from pip._vendor.rich.console import ConsoleOptions + + +class Box: + """Defines characters to render boxes. + + ┌─┬┐ top + │ ││ head + ├─┼┤ head_row + │ ││ mid + ├─┼┤ row + ├─┼┤ foot_row + │ ││ foot + └─┴┘ bottom + + Args: + box (str): Characters making up box. + ascii (bool, optional): True if this box uses ascii characters only. Default is False. + """ + + def __init__(self, box: str, *, ascii: bool = False) -> None: + self._box = box + self.ascii = ascii + line1, line2, line3, line4, line5, line6, line7, line8 = box.splitlines() + # top + self.top_left, self.top, self.top_divider, self.top_right = iter(line1) + # head + self.head_left, _, self.head_vertical, self.head_right = iter(line2) + # head_row + ( + self.head_row_left, + self.head_row_horizontal, + self.head_row_cross, + self.head_row_right, + ) = iter(line3) + + # mid + self.mid_left, _, self.mid_vertical, self.mid_right = iter(line4) + # row + self.row_left, self.row_horizontal, self.row_cross, self.row_right = iter(line5) + # foot_row + ( + self.foot_row_left, + self.foot_row_horizontal, + self.foot_row_cross, + self.foot_row_right, + ) = iter(line6) + # foot + self.foot_left, _, self.foot_vertical, self.foot_right = iter(line7) + # bottom + self.bottom_left, self.bottom, self.bottom_divider, self.bottom_right = iter( + line8 + ) + + def __repr__(self) -> str: + return "Box(...)" + + def __str__(self) -> str: + return self._box + + def substitute(self, options: "ConsoleOptions", safe: bool = True) -> "Box": + """Substitute this box for another if it won't render due to platform issues. + + Args: + options (ConsoleOptions): Console options used in rendering. + safe (bool, optional): Substitute this for another Box if there are known problems + displaying on the platform (currently only relevant on Windows). Default is True. + + Returns: + Box: A different Box or the same Box. + """ + box = self + if options.legacy_windows and safe: + box = LEGACY_WINDOWS_SUBSTITUTIONS.get(box, box) + if options.ascii_only and not box.ascii: + box = ASCII + return box + + def get_plain_headed_box(self) -> "Box": + """If this box uses special characters for the borders of the header, then + return the equivalent box that does not. + + Returns: + Box: The most similar Box that doesn't use header-specific box characters. + If the current Box already satisfies this criterion, then it's returned. + """ + return PLAIN_HEADED_SUBSTITUTIONS.get(self, self) + + def get_top(self, widths: Iterable[int]) -> str: + """Get the top of a simple box. + + Args: + widths (List[int]): Widths of columns. + + Returns: + str: A string of box characters. + """ + + parts: List[str] = [] + append = parts.append + append(self.top_left) + for last, width in loop_last(widths): + append(self.top * width) + if not last: + append(self.top_divider) + append(self.top_right) + return "".join(parts) + + def get_row( + self, + widths: Iterable[int], + level: Literal["head", "row", "foot", "mid"] = "row", + edge: bool = True, + ) -> str: + """Get the top of a simple box. + + Args: + width (List[int]): Widths of columns. + + Returns: + str: A string of box characters. + """ + if level == "head": + left = self.head_row_left + horizontal = self.head_row_horizontal + cross = self.head_row_cross + right = self.head_row_right + elif level == "row": + left = self.row_left + horizontal = self.row_horizontal + cross = self.row_cross + right = self.row_right + elif level == "mid": + left = self.mid_left + horizontal = " " + cross = self.mid_vertical + right = self.mid_right + elif level == "foot": + left = self.foot_row_left + horizontal = self.foot_row_horizontal + cross = self.foot_row_cross + right = self.foot_row_right + else: + raise ValueError("level must be 'head', 'row' or 'foot'") + + parts: List[str] = [] + append = parts.append + if edge: + append(left) + for last, width in loop_last(widths): + append(horizontal * width) + if not last: + append(cross) + if edge: + append(right) + return "".join(parts) + + def get_bottom(self, widths: Iterable[int]) -> str: + """Get the bottom of a simple box. + + Args: + widths (List[int]): Widths of columns. + + Returns: + str: A string of box characters. + """ + + parts: List[str] = [] + append = parts.append + append(self.bottom_left) + for last, width in loop_last(widths): + append(self.bottom * width) + if not last: + append(self.bottom_divider) + append(self.bottom_right) + return "".join(parts) + + +ASCII: Box = Box( + """\ ++--+ +| || +|-+| +| || +|-+| +|-+| +| || ++--+ +""", + ascii=True, +) + +ASCII2: Box = Box( + """\ ++-++ +| || ++-++ +| || ++-++ ++-++ +| || ++-++ +""", + ascii=True, +) + +ASCII_DOUBLE_HEAD: Box = Box( + """\ ++-++ +| || ++=++ +| || ++-++ ++-++ +| || ++-++ +""", + ascii=True, +) + +SQUARE: Box = Box( + """\ +┌─┬┐ +│ ││ +├─┼┤ +│ ││ +├─┼┤ +├─┼┤ +│ ││ +└─┴┘ +""" +) + +SQUARE_DOUBLE_HEAD: Box = Box( + """\ +┌─┬┐ +│ ││ +╞═╪╡ +│ ││ +├─┼┤ +├─┼┤ +│ ││ +└─┴┘ +""" +) + +MINIMAL: Box = Box( + """\ + ╷ + │ +╶─┼╴ + │ +╶─┼╴ +╶─┼╴ + │ + ╵ +""" +) + + +MINIMAL_HEAVY_HEAD: Box = Box( + """\ + ╷ + │ +╺━┿╸ + │ +╶─┼╴ +╶─┼╴ + │ + ╵ +""" +) + +MINIMAL_DOUBLE_HEAD: Box = Box( + """\ + ╷ + │ + ═╪ + │ + ─┼ + ─┼ + │ + ╵ +""" +) + + +SIMPLE: Box = Box( + """\ + + + ── + + + ── + + +""" +) + +SIMPLE_HEAD: Box = Box( + """\ + + + ── + + + + + +""" +) + + +SIMPLE_HEAVY: Box = Box( + """\ + + + ━━ + + + ━━ + + +""" +) + + +HORIZONTALS: Box = Box( + """\ + ── + + ── + + ── + ── + + ── +""" +) + +ROUNDED: Box = Box( + """\ +╭─┬╮ +│ ││ +├─┼┤ +│ ││ +├─┼┤ +├─┼┤ +│ ││ +╰─┴╯ +""" +) + +HEAVY: Box = Box( + """\ +┏━┳┓ +┃ ┃┃ +┣━╋┫ +┃ ┃┃ +┣━╋┫ +┣━╋┫ +┃ ┃┃ +┗━┻┛ +""" +) + +HEAVY_EDGE: Box = Box( + """\ +┏━┯┓ +┃ │┃ +┠─┼┨ +┃ │┃ +┠─┼┨ +┠─┼┨ +┃ │┃ +┗━┷┛ +""" +) + +HEAVY_HEAD: Box = Box( + """\ +┏━┳┓ +┃ ┃┃ +┡━╇┩ +│ ││ +├─┼┤ +├─┼┤ +│ ││ +└─┴┘ +""" +) + +DOUBLE: Box = Box( + """\ +╔═╦╗ +║ ║║ +╠═╬╣ +║ ║║ +╠═╬╣ +╠═╬╣ +║ ║║ +╚═╩╝ +""" +) + +DOUBLE_EDGE: Box = Box( + """\ +╔═╤╗ +║ │║ +╟─┼╢ +║ │║ +╟─┼╢ +╟─┼╢ +║ │║ +╚═╧╝ +""" +) + +MARKDOWN: Box = Box( + """\ + +| || +|-|| +| || +|-|| +|-|| +| || + +""", + ascii=True, +) + +# Map Boxes that don't render with raster fonts on to equivalent that do +LEGACY_WINDOWS_SUBSTITUTIONS = { + ROUNDED: SQUARE, + MINIMAL_HEAVY_HEAD: MINIMAL, + SIMPLE_HEAVY: SIMPLE, + HEAVY: SQUARE, + HEAVY_EDGE: SQUARE, + HEAVY_HEAD: SQUARE, +} + +# Map headed boxes to their headerless equivalents +PLAIN_HEADED_SUBSTITUTIONS = { + HEAVY_HEAD: SQUARE, + SQUARE_DOUBLE_HEAD: SQUARE, + MINIMAL_DOUBLE_HEAD: MINIMAL, + MINIMAL_HEAVY_HEAD: MINIMAL, + ASCII_DOUBLE_HEAD: ASCII2, +} + + +if __name__ == "__main__": # pragma: no cover + + from pip._vendor.rich.columns import Columns + from pip._vendor.rich.panel import Panel + + from . import box as box + from .console import Console + from .table import Table + from .text import Text + + console = Console(record=True) + + BOXES = [ + "ASCII", + "ASCII2", + "ASCII_DOUBLE_HEAD", + "SQUARE", + "SQUARE_DOUBLE_HEAD", + "MINIMAL", + "MINIMAL_HEAVY_HEAD", + "MINIMAL_DOUBLE_HEAD", + "SIMPLE", + "SIMPLE_HEAD", + "SIMPLE_HEAVY", + "HORIZONTALS", + "ROUNDED", + "HEAVY", + "HEAVY_EDGE", + "HEAVY_HEAD", + "DOUBLE", + "DOUBLE_EDGE", + "MARKDOWN", + ] + + console.print(Panel("[bold green]Box Constants", style="green"), justify="center") + console.print() + + columns = Columns(expand=True, padding=2) + for box_name in sorted(BOXES): + table = Table( + show_footer=True, style="dim", border_style="not dim", expand=True + ) + table.add_column("Header 1", "Footer 1") + table.add_column("Header 2", "Footer 2") + table.add_row("Cell", "Cell") + table.add_row("Cell", "Cell") + table.box = getattr(box, box_name) + table.title = Text(f"box.{box_name}", style="magenta") + columns.add_renderable(table) + console.print(columns) + + # console.save_svg("box.svg") diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/cells.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/cells.py new file mode 100644 index 0000000..9354f9e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/cells.py @@ -0,0 +1,154 @@ +import re +from functools import lru_cache +from typing import Callable, List + +from ._cell_widths import CELL_WIDTHS + +# Regex to match sequence of the most common character ranges +_is_single_cell_widths = re.compile("^[\u0020-\u006f\u00a0\u02ff\u0370-\u0482]*$").match + + +@lru_cache(4096) +def cached_cell_len(text: str) -> int: + """Get the number of cells required to display text. + + This method always caches, which may use up a lot of memory. It is recommended to use + `cell_len` over this method. + + Args: + text (str): Text to display. + + Returns: + int: Get the number of cells required to display text. + """ + _get_size = get_character_cell_size + total_size = sum(_get_size(character) for character in text) + return total_size + + +def cell_len(text: str, _cell_len: Callable[[str], int] = cached_cell_len) -> int: + """Get the number of cells required to display text. + + Args: + text (str): Text to display. + + Returns: + int: Get the number of cells required to display text. + """ + if len(text) < 512: + return _cell_len(text) + _get_size = get_character_cell_size + total_size = sum(_get_size(character) for character in text) + return total_size + + +@lru_cache(maxsize=4096) +def get_character_cell_size(character: str) -> int: + """Get the cell size of a character. + + Args: + character (str): A single character. + + Returns: + int: Number of cells (0, 1 or 2) occupied by that character. + """ + return _get_codepoint_cell_size(ord(character)) + + +@lru_cache(maxsize=4096) +def _get_codepoint_cell_size(codepoint: int) -> int: + """Get the cell size of a character. + + Args: + codepoint (int): Codepoint of a character. + + Returns: + int: Number of cells (0, 1 or 2) occupied by that character. + """ + + _table = CELL_WIDTHS + lower_bound = 0 + upper_bound = len(_table) - 1 + index = (lower_bound + upper_bound) // 2 + while True: + start, end, width = _table[index] + if codepoint < start: + upper_bound = index - 1 + elif codepoint > end: + lower_bound = index + 1 + else: + return 0 if width == -1 else width + if upper_bound < lower_bound: + break + index = (lower_bound + upper_bound) // 2 + return 1 + + +def set_cell_size(text: str, total: int) -> str: + """Set the length of a string to fit within given number of cells.""" + + if _is_single_cell_widths(text): + size = len(text) + if size < total: + return text + " " * (total - size) + return text[:total] + + if total <= 0: + return "" + cell_size = cell_len(text) + if cell_size == total: + return text + if cell_size < total: + return text + " " * (total - cell_size) + + start = 0 + end = len(text) + + # Binary search until we find the right size + while True: + pos = (start + end) // 2 + before = text[: pos + 1] + before_len = cell_len(before) + if before_len == total + 1 and cell_len(before[-1]) == 2: + return before[:-1] + " " + if before_len == total: + return before + if before_len > total: + end = pos + else: + start = pos + + +# TODO: This is inefficient +# TODO: This might not work with CWJ type characters +def chop_cells(text: str, max_size: int, position: int = 0) -> List[str]: + """Break text in to equal (cell) length strings, returning the characters in reverse + order""" + _get_character_cell_size = get_character_cell_size + characters = [ + (character, _get_character_cell_size(character)) for character in text + ] + total_size = position + lines: List[List[str]] = [[]] + append = lines[-1].append + + for character, size in reversed(characters): + if total_size + size > max_size: + lines.append([character]) + append = lines[-1].append + total_size = size + else: + total_size += size + append(character) + + return ["".join(line) for line in lines] + + +if __name__ == "__main__": # pragma: no cover + + print(get_character_cell_size("😽")) + for line in chop_cells("""这是对亚洲语言支持的测试。面对模棱两可的想法,拒绝猜测的诱惑。""", 8): + print(line) + for n in range(80, 1, -1): + print(set_cell_size("""这是对亚洲语言支持的测试。面对模棱两可的想法,拒绝猜测的诱惑。""", n) + "|") + print("x" * n) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/color.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/color.py new file mode 100644 index 0000000..dfe4559 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/color.py @@ -0,0 +1,622 @@ +import platform +import re +from colorsys import rgb_to_hls +from enum import IntEnum +from functools import lru_cache +from typing import TYPE_CHECKING, NamedTuple, Optional, Tuple + +from ._palettes import EIGHT_BIT_PALETTE, STANDARD_PALETTE, WINDOWS_PALETTE +from .color_triplet import ColorTriplet +from .repr import Result, rich_repr +from .terminal_theme import DEFAULT_TERMINAL_THEME + +if TYPE_CHECKING: # pragma: no cover + from .terminal_theme import TerminalTheme + from .text import Text + + +WINDOWS = platform.system() == "Windows" + + +class ColorSystem(IntEnum): + """One of the 3 color system supported by terminals.""" + + STANDARD = 1 + EIGHT_BIT = 2 + TRUECOLOR = 3 + WINDOWS = 4 + + def __repr__(self) -> str: + return f"ColorSystem.{self.name}" + + def __str__(self) -> str: + return repr(self) + + +class ColorType(IntEnum): + """Type of color stored in Color class.""" + + DEFAULT = 0 + STANDARD = 1 + EIGHT_BIT = 2 + TRUECOLOR = 3 + WINDOWS = 4 + + def __repr__(self) -> str: + return f"ColorType.{self.name}" + + +ANSI_COLOR_NAMES = { + "black": 0, + "red": 1, + "green": 2, + "yellow": 3, + "blue": 4, + "magenta": 5, + "cyan": 6, + "white": 7, + "bright_black": 8, + "bright_red": 9, + "bright_green": 10, + "bright_yellow": 11, + "bright_blue": 12, + "bright_magenta": 13, + "bright_cyan": 14, + "bright_white": 15, + "grey0": 16, + "gray0": 16, + "navy_blue": 17, + "dark_blue": 18, + "blue3": 20, + "blue1": 21, + "dark_green": 22, + "deep_sky_blue4": 25, + "dodger_blue3": 26, + "dodger_blue2": 27, + "green4": 28, + "spring_green4": 29, + "turquoise4": 30, + "deep_sky_blue3": 32, + "dodger_blue1": 33, + "green3": 40, + "spring_green3": 41, + "dark_cyan": 36, + "light_sea_green": 37, + "deep_sky_blue2": 38, + "deep_sky_blue1": 39, + "spring_green2": 47, + "cyan3": 43, + "dark_turquoise": 44, + "turquoise2": 45, + "green1": 46, + "spring_green1": 48, + "medium_spring_green": 49, + "cyan2": 50, + "cyan1": 51, + "dark_red": 88, + "deep_pink4": 125, + "purple4": 55, + "purple3": 56, + "blue_violet": 57, + "orange4": 94, + "grey37": 59, + "gray37": 59, + "medium_purple4": 60, + "slate_blue3": 62, + "royal_blue1": 63, + "chartreuse4": 64, + "dark_sea_green4": 71, + "pale_turquoise4": 66, + "steel_blue": 67, + "steel_blue3": 68, + "cornflower_blue": 69, + "chartreuse3": 76, + "cadet_blue": 73, + "sky_blue3": 74, + "steel_blue1": 81, + "pale_green3": 114, + "sea_green3": 78, + "aquamarine3": 79, + "medium_turquoise": 80, + "chartreuse2": 112, + "sea_green2": 83, + "sea_green1": 85, + "aquamarine1": 122, + "dark_slate_gray2": 87, + "dark_magenta": 91, + "dark_violet": 128, + "purple": 129, + "light_pink4": 95, + "plum4": 96, + "medium_purple3": 98, + "slate_blue1": 99, + "yellow4": 106, + "wheat4": 101, + "grey53": 102, + "gray53": 102, + "light_slate_grey": 103, + "light_slate_gray": 103, + "medium_purple": 104, + "light_slate_blue": 105, + "dark_olive_green3": 149, + "dark_sea_green": 108, + "light_sky_blue3": 110, + "sky_blue2": 111, + "dark_sea_green3": 150, + "dark_slate_gray3": 116, + "sky_blue1": 117, + "chartreuse1": 118, + "light_green": 120, + "pale_green1": 156, + "dark_slate_gray1": 123, + "red3": 160, + "medium_violet_red": 126, + "magenta3": 164, + "dark_orange3": 166, + "indian_red": 167, + "hot_pink3": 168, + "medium_orchid3": 133, + "medium_orchid": 134, + "medium_purple2": 140, + "dark_goldenrod": 136, + "light_salmon3": 173, + "rosy_brown": 138, + "grey63": 139, + "gray63": 139, + "medium_purple1": 141, + "gold3": 178, + "dark_khaki": 143, + "navajo_white3": 144, + "grey69": 145, + "gray69": 145, + "light_steel_blue3": 146, + "light_steel_blue": 147, + "yellow3": 184, + "dark_sea_green2": 157, + "light_cyan3": 152, + "light_sky_blue1": 153, + "green_yellow": 154, + "dark_olive_green2": 155, + "dark_sea_green1": 193, + "pale_turquoise1": 159, + "deep_pink3": 162, + "magenta2": 200, + "hot_pink2": 169, + "orchid": 170, + "medium_orchid1": 207, + "orange3": 172, + "light_pink3": 174, + "pink3": 175, + "plum3": 176, + "violet": 177, + "light_goldenrod3": 179, + "tan": 180, + "misty_rose3": 181, + "thistle3": 182, + "plum2": 183, + "khaki3": 185, + "light_goldenrod2": 222, + "light_yellow3": 187, + "grey84": 188, + "gray84": 188, + "light_steel_blue1": 189, + "yellow2": 190, + "dark_olive_green1": 192, + "honeydew2": 194, + "light_cyan1": 195, + "red1": 196, + "deep_pink2": 197, + "deep_pink1": 199, + "magenta1": 201, + "orange_red1": 202, + "indian_red1": 204, + "hot_pink": 206, + "dark_orange": 208, + "salmon1": 209, + "light_coral": 210, + "pale_violet_red1": 211, + "orchid2": 212, + "orchid1": 213, + "orange1": 214, + "sandy_brown": 215, + "light_salmon1": 216, + "light_pink1": 217, + "pink1": 218, + "plum1": 219, + "gold1": 220, + "navajo_white1": 223, + "misty_rose1": 224, + "thistle1": 225, + "yellow1": 226, + "light_goldenrod1": 227, + "khaki1": 228, + "wheat1": 229, + "cornsilk1": 230, + "grey100": 231, + "gray100": 231, + "grey3": 232, + "gray3": 232, + "grey7": 233, + "gray7": 233, + "grey11": 234, + "gray11": 234, + "grey15": 235, + "gray15": 235, + "grey19": 236, + "gray19": 236, + "grey23": 237, + "gray23": 237, + "grey27": 238, + "gray27": 238, + "grey30": 239, + "gray30": 239, + "grey35": 240, + "gray35": 240, + "grey39": 241, + "gray39": 241, + "grey42": 242, + "gray42": 242, + "grey46": 243, + "gray46": 243, + "grey50": 244, + "gray50": 244, + "grey54": 245, + "gray54": 245, + "grey58": 246, + "gray58": 246, + "grey62": 247, + "gray62": 247, + "grey66": 248, + "gray66": 248, + "grey70": 249, + "gray70": 249, + "grey74": 250, + "gray74": 250, + "grey78": 251, + "gray78": 251, + "grey82": 252, + "gray82": 252, + "grey85": 253, + "gray85": 253, + "grey89": 254, + "gray89": 254, + "grey93": 255, + "gray93": 255, +} + + +class ColorParseError(Exception): + """The color could not be parsed.""" + + +RE_COLOR = re.compile( + r"""^ +\#([0-9a-f]{6})$| +color\(([0-9]{1,3})\)$| +rgb\(([\d\s,]+)\)$ +""", + re.VERBOSE, +) + + +@rich_repr +class Color(NamedTuple): + """Terminal color definition.""" + + name: str + """The name of the color (typically the input to Color.parse).""" + type: ColorType + """The type of the color.""" + number: Optional[int] = None + """The color number, if a standard color, or None.""" + triplet: Optional[ColorTriplet] = None + """A triplet of color components, if an RGB color.""" + + def __rich__(self) -> "Text": + """Displays the actual color if Rich printed.""" + from .style import Style + from .text import Text + + return Text.assemble( + f"", + ) + + def __rich_repr__(self) -> Result: + yield self.name + yield self.type + yield "number", self.number, None + yield "triplet", self.triplet, None + + @property + def system(self) -> ColorSystem: + """Get the native color system for this color.""" + if self.type == ColorType.DEFAULT: + return ColorSystem.STANDARD + return ColorSystem(int(self.type)) + + @property + def is_system_defined(self) -> bool: + """Check if the color is ultimately defined by the system.""" + return self.system not in (ColorSystem.EIGHT_BIT, ColorSystem.TRUECOLOR) + + @property + def is_default(self) -> bool: + """Check if the color is a default color.""" + return self.type == ColorType.DEFAULT + + def get_truecolor( + self, theme: Optional["TerminalTheme"] = None, foreground: bool = True + ) -> ColorTriplet: + """Get an equivalent color triplet for this color. + + Args: + theme (TerminalTheme, optional): Optional terminal theme, or None to use default. Defaults to None. + foreground (bool, optional): True for a foreground color, or False for background. Defaults to True. + + Returns: + ColorTriplet: A color triplet containing RGB components. + """ + + if theme is None: + theme = DEFAULT_TERMINAL_THEME + if self.type == ColorType.TRUECOLOR: + assert self.triplet is not None + return self.triplet + elif self.type == ColorType.EIGHT_BIT: + assert self.number is not None + return EIGHT_BIT_PALETTE[self.number] + elif self.type == ColorType.STANDARD: + assert self.number is not None + return theme.ansi_colors[self.number] + elif self.type == ColorType.WINDOWS: + assert self.number is not None + return WINDOWS_PALETTE[self.number] + else: # self.type == ColorType.DEFAULT: + assert self.number is None + return theme.foreground_color if foreground else theme.background_color + + @classmethod + def from_ansi(cls, number: int) -> "Color": + """Create a Color number from it's 8-bit ansi number. + + Args: + number (int): A number between 0-255 inclusive. + + Returns: + Color: A new Color instance. + """ + return cls( + name=f"color({number})", + type=(ColorType.STANDARD if number < 16 else ColorType.EIGHT_BIT), + number=number, + ) + + @classmethod + def from_triplet(cls, triplet: "ColorTriplet") -> "Color": + """Create a truecolor RGB color from a triplet of values. + + Args: + triplet (ColorTriplet): A color triplet containing red, green and blue components. + + Returns: + Color: A new color object. + """ + return cls(name=triplet.hex, type=ColorType.TRUECOLOR, triplet=triplet) + + @classmethod + def from_rgb(cls, red: float, green: float, blue: float) -> "Color": + """Create a truecolor from three color components in the range(0->255). + + Args: + red (float): Red component in range 0-255. + green (float): Green component in range 0-255. + blue (float): Blue component in range 0-255. + + Returns: + Color: A new color object. + """ + return cls.from_triplet(ColorTriplet(int(red), int(green), int(blue))) + + @classmethod + def default(cls) -> "Color": + """Get a Color instance representing the default color. + + Returns: + Color: Default color. + """ + return cls(name="default", type=ColorType.DEFAULT) + + @classmethod + @lru_cache(maxsize=1024) + def parse(cls, color: str) -> "Color": + """Parse a color definition.""" + original_color = color + color = color.lower().strip() + + if color == "default": + return cls(color, type=ColorType.DEFAULT) + + color_number = ANSI_COLOR_NAMES.get(color) + if color_number is not None: + return cls( + color, + type=(ColorType.STANDARD if color_number < 16 else ColorType.EIGHT_BIT), + number=color_number, + ) + + color_match = RE_COLOR.match(color) + if color_match is None: + raise ColorParseError(f"{original_color!r} is not a valid color") + + color_24, color_8, color_rgb = color_match.groups() + if color_24: + triplet = ColorTriplet( + int(color_24[0:2], 16), int(color_24[2:4], 16), int(color_24[4:6], 16) + ) + return cls(color, ColorType.TRUECOLOR, triplet=triplet) + + elif color_8: + number = int(color_8) + if number > 255: + raise ColorParseError(f"color number must be <= 255 in {color!r}") + return cls( + color, + type=(ColorType.STANDARD if number < 16 else ColorType.EIGHT_BIT), + number=number, + ) + + else: # color_rgb: + components = color_rgb.split(",") + if len(components) != 3: + raise ColorParseError( + f"expected three components in {original_color!r}" + ) + red, green, blue = components + triplet = ColorTriplet(int(red), int(green), int(blue)) + if not all(component <= 255 for component in triplet): + raise ColorParseError( + f"color components must be <= 255 in {original_color!r}" + ) + return cls(color, ColorType.TRUECOLOR, triplet=triplet) + + @lru_cache(maxsize=1024) + def get_ansi_codes(self, foreground: bool = True) -> Tuple[str, ...]: + """Get the ANSI escape codes for this color.""" + _type = self.type + if _type == ColorType.DEFAULT: + return ("39" if foreground else "49",) + + elif _type == ColorType.WINDOWS: + number = self.number + assert number is not None + fore, back = (30, 40) if number < 8 else (82, 92) + return (str(fore + number if foreground else back + number),) + + elif _type == ColorType.STANDARD: + number = self.number + assert number is not None + fore, back = (30, 40) if number < 8 else (82, 92) + return (str(fore + number if foreground else back + number),) + + elif _type == ColorType.EIGHT_BIT: + assert self.number is not None + return ("38" if foreground else "48", "5", str(self.number)) + + else: # self.standard == ColorStandard.TRUECOLOR: + assert self.triplet is not None + red, green, blue = self.triplet + return ("38" if foreground else "48", "2", str(red), str(green), str(blue)) + + @lru_cache(maxsize=1024) + def downgrade(self, system: ColorSystem) -> "Color": + """Downgrade a color system to a system with fewer colors.""" + + if self.type in (ColorType.DEFAULT, system): + return self + # Convert to 8-bit color from truecolor color + if system == ColorSystem.EIGHT_BIT and self.system == ColorSystem.TRUECOLOR: + assert self.triplet is not None + _h, l, s = rgb_to_hls(*self.triplet.normalized) + # If saturation is under 15% assume it is grayscale + if s < 0.15: + gray = round(l * 25.0) + if gray == 0: + color_number = 16 + elif gray == 25: + color_number = 231 + else: + color_number = 231 + gray + return Color(self.name, ColorType.EIGHT_BIT, number=color_number) + + red, green, blue = self.triplet + six_red = red / 95 if red < 95 else 1 + (red - 95) / 40 + six_green = green / 95 if green < 95 else 1 + (green - 95) / 40 + six_blue = blue / 95 if blue < 95 else 1 + (blue - 95) / 40 + + color_number = ( + 16 + 36 * round(six_red) + 6 * round(six_green) + round(six_blue) + ) + return Color(self.name, ColorType.EIGHT_BIT, number=color_number) + + # Convert to standard from truecolor or 8-bit + elif system == ColorSystem.STANDARD: + if self.system == ColorSystem.TRUECOLOR: + assert self.triplet is not None + triplet = self.triplet + else: # self.system == ColorSystem.EIGHT_BIT + assert self.number is not None + triplet = ColorTriplet(*EIGHT_BIT_PALETTE[self.number]) + + color_number = STANDARD_PALETTE.match(triplet) + return Color(self.name, ColorType.STANDARD, number=color_number) + + elif system == ColorSystem.WINDOWS: + if self.system == ColorSystem.TRUECOLOR: + assert self.triplet is not None + triplet = self.triplet + else: # self.system == ColorSystem.EIGHT_BIT + assert self.number is not None + if self.number < 16: + return Color(self.name, ColorType.WINDOWS, number=self.number) + triplet = ColorTriplet(*EIGHT_BIT_PALETTE[self.number]) + + color_number = WINDOWS_PALETTE.match(triplet) + return Color(self.name, ColorType.WINDOWS, number=color_number) + + return self + + +def parse_rgb_hex(hex_color: str) -> ColorTriplet: + """Parse six hex characters in to RGB triplet.""" + assert len(hex_color) == 6, "must be 6 characters" + color = ColorTriplet( + int(hex_color[0:2], 16), int(hex_color[2:4], 16), int(hex_color[4:6], 16) + ) + return color + + +def blend_rgb( + color1: ColorTriplet, color2: ColorTriplet, cross_fade: float = 0.5 +) -> ColorTriplet: + """Blend one RGB color in to another.""" + r1, g1, b1 = color1 + r2, g2, b2 = color2 + new_color = ColorTriplet( + int(r1 + (r2 - r1) * cross_fade), + int(g1 + (g2 - g1) * cross_fade), + int(b1 + (b2 - b1) * cross_fade), + ) + return new_color + + +if __name__ == "__main__": # pragma: no cover + + from .console import Console + from .table import Table + from .text import Text + + console = Console() + + table = Table(show_footer=False, show_edge=True) + table.add_column("Color", width=10, overflow="ellipsis") + table.add_column("Number", justify="right", style="yellow") + table.add_column("Name", style="green") + table.add_column("Hex", style="blue") + table.add_column("RGB", style="magenta") + + colors = sorted((v, k) for k, v in ANSI_COLOR_NAMES.items()) + for color_number, name in colors: + if "grey" in name: + continue + color_cell = Text(" " * 10, style=f"on {name}") + if color_number < 16: + table.add_row(color_cell, f"{color_number}", Text(f'"{name}"')) + else: + color = EIGHT_BIT_PALETTE[color_number] # type: ignore[has-type] + table.add_row( + color_cell, str(color_number), Text(f'"{name}"'), color.hex, color.rgb + ) + + console.print(table) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/color_triplet.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/color_triplet.py new file mode 100644 index 0000000..02cab32 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/color_triplet.py @@ -0,0 +1,38 @@ +from typing import NamedTuple, Tuple + + +class ColorTriplet(NamedTuple): + """The red, green, and blue components of a color.""" + + red: int + """Red component in 0 to 255 range.""" + green: int + """Green component in 0 to 255 range.""" + blue: int + """Blue component in 0 to 255 range.""" + + @property + def hex(self) -> str: + """get the color triplet in CSS style.""" + red, green, blue = self + return f"#{red:02x}{green:02x}{blue:02x}" + + @property + def rgb(self) -> str: + """The color in RGB format. + + Returns: + str: An rgb color, e.g. ``"rgb(100,23,255)"``. + """ + red, green, blue = self + return f"rgb({red},{green},{blue})" + + @property + def normalized(self) -> Tuple[float, float, float]: + """Convert components into floats between 0 and 1. + + Returns: + Tuple[float, float, float]: A tuple of three normalized colour components. + """ + red, green, blue = self + return red / 255.0, green / 255.0, blue / 255.0 diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/columns.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/columns.py new file mode 100644 index 0000000..669a3a7 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/columns.py @@ -0,0 +1,187 @@ +from collections import defaultdict +from itertools import chain +from operator import itemgetter +from typing import Dict, Iterable, List, Optional, Tuple + +from .align import Align, AlignMethod +from .console import Console, ConsoleOptions, RenderableType, RenderResult +from .constrain import Constrain +from .measure import Measurement +from .padding import Padding, PaddingDimensions +from .table import Table +from .text import TextType +from .jupyter import JupyterMixin + + +class Columns(JupyterMixin): + """Display renderables in neat columns. + + Args: + renderables (Iterable[RenderableType]): Any number of Rich renderables (including str). + width (int, optional): The desired width of the columns, or None to auto detect. Defaults to None. + padding (PaddingDimensions, optional): Optional padding around cells. Defaults to (0, 1). + expand (bool, optional): Expand columns to full width. Defaults to False. + equal (bool, optional): Arrange in to equal sized columns. Defaults to False. + column_first (bool, optional): Align items from top to bottom (rather than left to right). Defaults to False. + right_to_left (bool, optional): Start column from right hand side. Defaults to False. + align (str, optional): Align value ("left", "right", or "center") or None for default. Defaults to None. + title (TextType, optional): Optional title for Columns. + """ + + def __init__( + self, + renderables: Optional[Iterable[RenderableType]] = None, + padding: PaddingDimensions = (0, 1), + *, + width: Optional[int] = None, + expand: bool = False, + equal: bool = False, + column_first: bool = False, + right_to_left: bool = False, + align: Optional[AlignMethod] = None, + title: Optional[TextType] = None, + ) -> None: + self.renderables = list(renderables or []) + self.width = width + self.padding = padding + self.expand = expand + self.equal = equal + self.column_first = column_first + self.right_to_left = right_to_left + self.align: Optional[AlignMethod] = align + self.title = title + + def add_renderable(self, renderable: RenderableType) -> None: + """Add a renderable to the columns. + + Args: + renderable (RenderableType): Any renderable object. + """ + self.renderables.append(renderable) + + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + render_str = console.render_str + renderables = [ + render_str(renderable) if isinstance(renderable, str) else renderable + for renderable in self.renderables + ] + if not renderables: + return + _top, right, _bottom, left = Padding.unpack(self.padding) + width_padding = max(left, right) + max_width = options.max_width + widths: Dict[int, int] = defaultdict(int) + column_count = len(renderables) + + get_measurement = Measurement.get + renderable_widths = [ + get_measurement(console, options, renderable).maximum + for renderable in renderables + ] + if self.equal: + renderable_widths = [max(renderable_widths)] * len(renderable_widths) + + def iter_renderables( + column_count: int, + ) -> Iterable[Tuple[int, Optional[RenderableType]]]: + item_count = len(renderables) + if self.column_first: + width_renderables = list(zip(renderable_widths, renderables)) + + column_lengths: List[int] = [item_count // column_count] * column_count + for col_no in range(item_count % column_count): + column_lengths[col_no] += 1 + + row_count = (item_count + column_count - 1) // column_count + cells = [[-1] * column_count for _ in range(row_count)] + row = col = 0 + for index in range(item_count): + cells[row][col] = index + column_lengths[col] -= 1 + if column_lengths[col]: + row += 1 + else: + col += 1 + row = 0 + for index in chain.from_iterable(cells): + if index == -1: + break + yield width_renderables[index] + else: + yield from zip(renderable_widths, renderables) + # Pad odd elements with spaces + if item_count % column_count: + for _ in range(column_count - (item_count % column_count)): + yield 0, None + + table = Table.grid(padding=self.padding, collapse_padding=True, pad_edge=False) + table.expand = self.expand + table.title = self.title + + if self.width is not None: + column_count = (max_width) // (self.width + width_padding) + for _ in range(column_count): + table.add_column(width=self.width) + else: + while column_count > 1: + widths.clear() + column_no = 0 + for renderable_width, _ in iter_renderables(column_count): + widths[column_no] = max(widths[column_no], renderable_width) + total_width = sum(widths.values()) + width_padding * ( + len(widths) - 1 + ) + if total_width > max_width: + column_count = len(widths) - 1 + break + else: + column_no = (column_no + 1) % column_count + else: + break + + get_renderable = itemgetter(1) + _renderables = [ + get_renderable(_renderable) + for _renderable in iter_renderables(column_count) + ] + if self.equal: + _renderables = [ + None + if renderable is None + else Constrain(renderable, renderable_widths[0]) + for renderable in _renderables + ] + if self.align: + align = self.align + _Align = Align + _renderables = [ + None if renderable is None else _Align(renderable, align) + for renderable in _renderables + ] + + right_to_left = self.right_to_left + add_row = table.add_row + for start in range(0, len(_renderables), column_count): + row = _renderables[start : start + column_count] + if right_to_left: + row = row[::-1] + add_row(*row) + yield table + + +if __name__ == "__main__": # pragma: no cover + import os + + console = Console() + + files = [f"{i} {s}" for i, s in enumerate(sorted(os.listdir()))] + columns = Columns(files, padding=(0, 1), expand=False, equal=False) + console.print(columns) + console.rule() + columns.column_first = True + console.print(columns) + columns.right_to_left = True + console.rule() + console.print(columns) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/console.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/console.py new file mode 100644 index 0000000..e559cbb --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/console.py @@ -0,0 +1,2633 @@ +import inspect +import os +import platform +import sys +import threading +import zlib +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from datetime import datetime +from functools import wraps +from getpass import getpass +from html import escape +from inspect import isclass +from itertools import islice +from math import ceil +from time import monotonic +from types import FrameType, ModuleType, TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + List, + Mapping, + NamedTuple, + Optional, + TextIO, + Tuple, + Type, + Union, + cast, +) + +from pip._vendor.rich._null_file import NULL_FILE + +if sys.version_info >= (3, 8): + from typing import Literal, Protocol, runtime_checkable +else: + from pip._vendor.typing_extensions import ( + Literal, + Protocol, + runtime_checkable, + ) # pragma: no cover + +from . import errors, themes +from ._emoji_replace import _emoji_replace +from ._export_format import CONSOLE_HTML_FORMAT, CONSOLE_SVG_FORMAT +from ._fileno import get_fileno +from ._log_render import FormatTimeCallable, LogRender +from .align import Align, AlignMethod +from .color import ColorSystem, blend_rgb +from .control import Control +from .emoji import EmojiVariant +from .highlighter import NullHighlighter, ReprHighlighter +from .markup import render as render_markup +from .measure import Measurement, measure_renderables +from .pager import Pager, SystemPager +from .pretty import Pretty, is_expandable +from .protocol import rich_cast +from .region import Region +from .scope import render_scope +from .screen import Screen +from .segment import Segment +from .style import Style, StyleType +from .styled import Styled +from .terminal_theme import DEFAULT_TERMINAL_THEME, SVG_EXPORT_THEME, TerminalTheme +from .text import Text, TextType +from .theme import Theme, ThemeStack + +if TYPE_CHECKING: + from ._windows import WindowsConsoleFeatures + from .live import Live + from .status import Status + +JUPYTER_DEFAULT_COLUMNS = 115 +JUPYTER_DEFAULT_LINES = 100 +WINDOWS = platform.system() == "Windows" + +HighlighterType = Callable[[Union[str, "Text"]], "Text"] +JustifyMethod = Literal["default", "left", "center", "right", "full"] +OverflowMethod = Literal["fold", "crop", "ellipsis", "ignore"] + + +class NoChange: + pass + + +NO_CHANGE = NoChange() + +try: + _STDIN_FILENO = sys.__stdin__.fileno() +except Exception: + _STDIN_FILENO = 0 +try: + _STDOUT_FILENO = sys.__stdout__.fileno() +except Exception: + _STDOUT_FILENO = 1 +try: + _STDERR_FILENO = sys.__stderr__.fileno() +except Exception: + _STDERR_FILENO = 2 + +_STD_STREAMS = (_STDIN_FILENO, _STDOUT_FILENO, _STDERR_FILENO) +_STD_STREAMS_OUTPUT = (_STDOUT_FILENO, _STDERR_FILENO) + + +_TERM_COLORS = { + "kitty": ColorSystem.EIGHT_BIT, + "256color": ColorSystem.EIGHT_BIT, + "16color": ColorSystem.STANDARD, +} + + +class ConsoleDimensions(NamedTuple): + """Size of the terminal.""" + + width: int + """The width of the console in 'cells'.""" + height: int + """The height of the console in lines.""" + + +@dataclass +class ConsoleOptions: + """Options for __rich_console__ method.""" + + size: ConsoleDimensions + """Size of console.""" + legacy_windows: bool + """legacy_windows: flag for legacy windows.""" + min_width: int + """Minimum width of renderable.""" + max_width: int + """Maximum width of renderable.""" + is_terminal: bool + """True if the target is a terminal, otherwise False.""" + encoding: str + """Encoding of terminal.""" + max_height: int + """Height of container (starts as terminal)""" + justify: Optional[JustifyMethod] = None + """Justify value override for renderable.""" + overflow: Optional[OverflowMethod] = None + """Overflow value override for renderable.""" + no_wrap: Optional[bool] = False + """Disable wrapping for text.""" + highlight: Optional[bool] = None + """Highlight override for render_str.""" + markup: Optional[bool] = None + """Enable markup when rendering strings.""" + height: Optional[int] = None + + @property + def ascii_only(self) -> bool: + """Check if renderables should use ascii only.""" + return not self.encoding.startswith("utf") + + def copy(self) -> "ConsoleOptions": + """Return a copy of the options. + + Returns: + ConsoleOptions: a copy of self. + """ + options: ConsoleOptions = ConsoleOptions.__new__(ConsoleOptions) + options.__dict__ = self.__dict__.copy() + return options + + def update( + self, + *, + width: Union[int, NoChange] = NO_CHANGE, + min_width: Union[int, NoChange] = NO_CHANGE, + max_width: Union[int, NoChange] = NO_CHANGE, + justify: Union[Optional[JustifyMethod], NoChange] = NO_CHANGE, + overflow: Union[Optional[OverflowMethod], NoChange] = NO_CHANGE, + no_wrap: Union[Optional[bool], NoChange] = NO_CHANGE, + highlight: Union[Optional[bool], NoChange] = NO_CHANGE, + markup: Union[Optional[bool], NoChange] = NO_CHANGE, + height: Union[Optional[int], NoChange] = NO_CHANGE, + ) -> "ConsoleOptions": + """Update values, return a copy.""" + options = self.copy() + if not isinstance(width, NoChange): + options.min_width = options.max_width = max(0, width) + if not isinstance(min_width, NoChange): + options.min_width = min_width + if not isinstance(max_width, NoChange): + options.max_width = max_width + if not isinstance(justify, NoChange): + options.justify = justify + if not isinstance(overflow, NoChange): + options.overflow = overflow + if not isinstance(no_wrap, NoChange): + options.no_wrap = no_wrap + if not isinstance(highlight, NoChange): + options.highlight = highlight + if not isinstance(markup, NoChange): + options.markup = markup + if not isinstance(height, NoChange): + if height is not None: + options.max_height = height + options.height = None if height is None else max(0, height) + return options + + def update_width(self, width: int) -> "ConsoleOptions": + """Update just the width, return a copy. + + Args: + width (int): New width (sets both min_width and max_width) + + Returns: + ~ConsoleOptions: New console options instance. + """ + options = self.copy() + options.min_width = options.max_width = max(0, width) + return options + + def update_height(self, height: int) -> "ConsoleOptions": + """Update the height, and return a copy. + + Args: + height (int): New height + + Returns: + ~ConsoleOptions: New Console options instance. + """ + options = self.copy() + options.max_height = options.height = height + return options + + def reset_height(self) -> "ConsoleOptions": + """Return a copy of the options with height set to ``None``. + + Returns: + ~ConsoleOptions: New console options instance. + """ + options = self.copy() + options.height = None + return options + + def update_dimensions(self, width: int, height: int) -> "ConsoleOptions": + """Update the width and height, and return a copy. + + Args: + width (int): New width (sets both min_width and max_width). + height (int): New height. + + Returns: + ~ConsoleOptions: New console options instance. + """ + options = self.copy() + options.min_width = options.max_width = max(0, width) + options.height = options.max_height = height + return options + + +@runtime_checkable +class RichCast(Protocol): + """An object that may be 'cast' to a console renderable.""" + + def __rich__( + self, + ) -> Union["ConsoleRenderable", "RichCast", str]: # pragma: no cover + ... + + +@runtime_checkable +class ConsoleRenderable(Protocol): + """An object that supports the console protocol.""" + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": # pragma: no cover + ... + + +# A type that may be rendered by Console. +RenderableType = Union[ConsoleRenderable, RichCast, str] + +# The result of calling a __rich_console__ method. +RenderResult = Iterable[Union[RenderableType, Segment]] + +_null_highlighter = NullHighlighter() + + +class CaptureError(Exception): + """An error in the Capture context manager.""" + + +class NewLine: + """A renderable to generate new line(s)""" + + def __init__(self, count: int = 1) -> None: + self.count = count + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> Iterable[Segment]: + yield Segment("\n" * self.count) + + +class ScreenUpdate: + """Render a list of lines at a given offset.""" + + def __init__(self, lines: List[List[Segment]], x: int, y: int) -> None: + self._lines = lines + self.x = x + self.y = y + + def __rich_console__( + self, console: "Console", options: ConsoleOptions + ) -> RenderResult: + x = self.x + move_to = Control.move_to + for offset, line in enumerate(self._lines, self.y): + yield move_to(x, offset) + yield from line + + +class Capture: + """Context manager to capture the result of printing to the console. + See :meth:`~rich.console.Console.capture` for how to use. + + Args: + console (Console): A console instance to capture output. + """ + + def __init__(self, console: "Console") -> None: + self._console = console + self._result: Optional[str] = None + + def __enter__(self) -> "Capture": + self._console.begin_capture() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self._result = self._console.end_capture() + + def get(self) -> str: + """Get the result of the capture.""" + if self._result is None: + raise CaptureError( + "Capture result is not available until context manager exits." + ) + return self._result + + +class ThemeContext: + """A context manager to use a temporary theme. See :meth:`~rich.console.Console.use_theme` for usage.""" + + def __init__(self, console: "Console", theme: Theme, inherit: bool = True) -> None: + self.console = console + self.theme = theme + self.inherit = inherit + + def __enter__(self) -> "ThemeContext": + self.console.push_theme(self.theme) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.console.pop_theme() + + +class PagerContext: + """A context manager that 'pages' content. See :meth:`~rich.console.Console.pager` for usage.""" + + def __init__( + self, + console: "Console", + pager: Optional[Pager] = None, + styles: bool = False, + links: bool = False, + ) -> None: + self._console = console + self.pager = SystemPager() if pager is None else pager + self.styles = styles + self.links = links + + def __enter__(self) -> "PagerContext": + self._console._enter_buffer() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + if exc_type is None: + with self._console._lock: + buffer: List[Segment] = self._console._buffer[:] + del self._console._buffer[:] + segments: Iterable[Segment] = buffer + if not self.styles: + segments = Segment.strip_styles(segments) + elif not self.links: + segments = Segment.strip_links(segments) + content = self._console._render_buffer(segments) + self.pager.show(content) + self._console._exit_buffer() + + +class ScreenContext: + """A context manager that enables an alternative screen. See :meth:`~rich.console.Console.screen` for usage.""" + + def __init__( + self, console: "Console", hide_cursor: bool, style: StyleType = "" + ) -> None: + self.console = console + self.hide_cursor = hide_cursor + self.screen = Screen(style=style) + self._changed = False + + def update( + self, *renderables: RenderableType, style: Optional[StyleType] = None + ) -> None: + """Update the screen. + + Args: + renderable (RenderableType, optional): Optional renderable to replace current renderable, + or None for no change. Defaults to None. + style: (Style, optional): Replacement style, or None for no change. Defaults to None. + """ + if renderables: + self.screen.renderable = ( + Group(*renderables) if len(renderables) > 1 else renderables[0] + ) + if style is not None: + self.screen.style = style + self.console.print(self.screen, end="") + + def __enter__(self) -> "ScreenContext": + self._changed = self.console.set_alt_screen(True) + if self._changed and self.hide_cursor: + self.console.show_cursor(False) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + if self._changed: + self.console.set_alt_screen(False) + if self.hide_cursor: + self.console.show_cursor(True) + + +class Group: + """Takes a group of renderables and returns a renderable object that renders the group. + + Args: + renderables (Iterable[RenderableType]): An iterable of renderable objects. + fit (bool, optional): Fit dimension of group to contents, or fill available space. Defaults to True. + """ + + def __init__(self, *renderables: "RenderableType", fit: bool = True) -> None: + self._renderables = renderables + self.fit = fit + self._render: Optional[List[RenderableType]] = None + + @property + def renderables(self) -> List["RenderableType"]: + if self._render is None: + self._render = list(self._renderables) + return self._render + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> "Measurement": + if self.fit: + return measure_renderables(console, options, self.renderables) + else: + return Measurement(options.max_width, options.max_width) + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> RenderResult: + yield from self.renderables + + +def group(fit: bool = True) -> Callable[..., Callable[..., Group]]: + """A decorator that turns an iterable of renderables in to a group. + + Args: + fit (bool, optional): Fit dimension of group to contents, or fill available space. Defaults to True. + """ + + def decorator( + method: Callable[..., Iterable[RenderableType]] + ) -> Callable[..., Group]: + """Convert a method that returns an iterable of renderables in to a Group.""" + + @wraps(method) + def _replace(*args: Any, **kwargs: Any) -> Group: + renderables = method(*args, **kwargs) + return Group(*renderables, fit=fit) + + return _replace + + return decorator + + +def _is_jupyter() -> bool: # pragma: no cover + """Check if we're running in a Jupyter notebook.""" + try: + get_ipython # type: ignore[name-defined] + except NameError: + return False + ipython = get_ipython() # type: ignore[name-defined] + shell = ipython.__class__.__name__ + if ( + "google.colab" in str(ipython.__class__) + or os.getenv("DATABRICKS_RUNTIME_VERSION") + or shell == "ZMQInteractiveShell" + ): + return True # Jupyter notebook or qtconsole + elif shell == "TerminalInteractiveShell": + return False # Terminal running IPython + else: + return False # Other type (?) + + +COLOR_SYSTEMS = { + "standard": ColorSystem.STANDARD, + "256": ColorSystem.EIGHT_BIT, + "truecolor": ColorSystem.TRUECOLOR, + "windows": ColorSystem.WINDOWS, +} + +_COLOR_SYSTEMS_NAMES = {system: name for name, system in COLOR_SYSTEMS.items()} + + +@dataclass +class ConsoleThreadLocals(threading.local): + """Thread local values for Console context.""" + + theme_stack: ThemeStack + buffer: List[Segment] = field(default_factory=list) + buffer_index: int = 0 + + +class RenderHook(ABC): + """Provides hooks in to the render process.""" + + @abstractmethod + def process_renderables( + self, renderables: List[ConsoleRenderable] + ) -> List[ConsoleRenderable]: + """Called with a list of objects to render. + + This method can return a new list of renderables, or modify and return the same list. + + Args: + renderables (List[ConsoleRenderable]): A number of renderable objects. + + Returns: + List[ConsoleRenderable]: A replacement list of renderables. + """ + + +_windows_console_features: Optional["WindowsConsoleFeatures"] = None + + +def get_windows_console_features() -> "WindowsConsoleFeatures": # pragma: no cover + global _windows_console_features + if _windows_console_features is not None: + return _windows_console_features + from ._windows import get_windows_console_features + + _windows_console_features = get_windows_console_features() + return _windows_console_features + + +def detect_legacy_windows() -> bool: + """Detect legacy Windows.""" + return WINDOWS and not get_windows_console_features().vt + + +class Console: + """A high level console interface. + + Args: + color_system (str, optional): The color system supported by your terminal, + either ``"standard"``, ``"256"`` or ``"truecolor"``. Leave as ``"auto"`` to autodetect. + force_terminal (Optional[bool], optional): Enable/disable terminal control codes, or None to auto-detect terminal. Defaults to None. + force_jupyter (Optional[bool], optional): Enable/disable Jupyter rendering, or None to auto-detect Jupyter. Defaults to None. + force_interactive (Optional[bool], optional): Enable/disable interactive mode, or None to auto detect. Defaults to None. + soft_wrap (Optional[bool], optional): Set soft wrap default on print method. Defaults to False. + theme (Theme, optional): An optional style theme object, or ``None`` for default theme. + stderr (bool, optional): Use stderr rather than stdout if ``file`` is not specified. Defaults to False. + file (IO, optional): A file object where the console should write to. Defaults to stdout. + quiet (bool, Optional): Boolean to suppress all output. Defaults to False. + width (int, optional): The width of the terminal. Leave as default to auto-detect width. + height (int, optional): The height of the terminal. Leave as default to auto-detect height. + style (StyleType, optional): Style to apply to all output, or None for no style. Defaults to None. + no_color (Optional[bool], optional): Enabled no color mode, or None to auto detect. Defaults to None. + tab_size (int, optional): Number of spaces used to replace a tab character. Defaults to 8. + record (bool, optional): Boolean to enable recording of terminal output, + required to call :meth:`export_html`, :meth:`export_svg`, and :meth:`export_text`. Defaults to False. + markup (bool, optional): Boolean to enable :ref:`console_markup`. Defaults to True. + emoji (bool, optional): Enable emoji code. Defaults to True. + emoji_variant (str, optional): Optional emoji variant, either "text" or "emoji". Defaults to None. + highlight (bool, optional): Enable automatic highlighting. Defaults to True. + log_time (bool, optional): Boolean to enable logging of time by :meth:`log` methods. Defaults to True. + log_path (bool, optional): Boolean to enable the logging of the caller by :meth:`log`. Defaults to True. + log_time_format (Union[str, TimeFormatterCallable], optional): If ``log_time`` is enabled, either string for strftime or callable that formats the time. Defaults to "[%X] ". + highlighter (HighlighterType, optional): Default highlighter. + legacy_windows (bool, optional): Enable legacy Windows mode, or ``None`` to auto detect. Defaults to ``None``. + safe_box (bool, optional): Restrict box options that don't render on legacy Windows. + get_datetime (Callable[[], datetime], optional): Callable that gets the current time as a datetime.datetime object (used by Console.log), + or None for datetime.now. + get_time (Callable[[], time], optional): Callable that gets the current time in seconds, default uses time.monotonic. + """ + + _environ: Mapping[str, str] = os.environ + + def __init__( + self, + *, + color_system: Optional[ + Literal["auto", "standard", "256", "truecolor", "windows"] + ] = "auto", + force_terminal: Optional[bool] = None, + force_jupyter: Optional[bool] = None, + force_interactive: Optional[bool] = None, + soft_wrap: bool = False, + theme: Optional[Theme] = None, + stderr: bool = False, + file: Optional[IO[str]] = None, + quiet: bool = False, + width: Optional[int] = None, + height: Optional[int] = None, + style: Optional[StyleType] = None, + no_color: Optional[bool] = None, + tab_size: int = 8, + record: bool = False, + markup: bool = True, + emoji: bool = True, + emoji_variant: Optional[EmojiVariant] = None, + highlight: bool = True, + log_time: bool = True, + log_path: bool = True, + log_time_format: Union[str, FormatTimeCallable] = "[%X]", + highlighter: Optional["HighlighterType"] = ReprHighlighter(), + legacy_windows: Optional[bool] = None, + safe_box: bool = True, + get_datetime: Optional[Callable[[], datetime]] = None, + get_time: Optional[Callable[[], float]] = None, + _environ: Optional[Mapping[str, str]] = None, + ): + # Copy of os.environ allows us to replace it for testing + if _environ is not None: + self._environ = _environ + + self.is_jupyter = _is_jupyter() if force_jupyter is None else force_jupyter + if self.is_jupyter: + if width is None: + jupyter_columns = self._environ.get("JUPYTER_COLUMNS") + if jupyter_columns is not None and jupyter_columns.isdigit(): + width = int(jupyter_columns) + else: + width = JUPYTER_DEFAULT_COLUMNS + if height is None: + jupyter_lines = self._environ.get("JUPYTER_LINES") + if jupyter_lines is not None and jupyter_lines.isdigit(): + height = int(jupyter_lines) + else: + height = JUPYTER_DEFAULT_LINES + + self.tab_size = tab_size + self.record = record + self._markup = markup + self._emoji = emoji + self._emoji_variant: Optional[EmojiVariant] = emoji_variant + self._highlight = highlight + self.legacy_windows: bool = ( + (detect_legacy_windows() and not self.is_jupyter) + if legacy_windows is None + else legacy_windows + ) + + if width is None: + columns = self._environ.get("COLUMNS") + if columns is not None and columns.isdigit(): + width = int(columns) - self.legacy_windows + if height is None: + lines = self._environ.get("LINES") + if lines is not None and lines.isdigit(): + height = int(lines) + + self.soft_wrap = soft_wrap + self._width = width + self._height = height + + self._color_system: Optional[ColorSystem] + + self._force_terminal = None + if force_terminal is not None: + self._force_terminal = force_terminal + + self._file = file + self.quiet = quiet + self.stderr = stderr + + if color_system is None: + self._color_system = None + elif color_system == "auto": + self._color_system = self._detect_color_system() + else: + self._color_system = COLOR_SYSTEMS[color_system] + + self._lock = threading.RLock() + self._log_render = LogRender( + show_time=log_time, + show_path=log_path, + time_format=log_time_format, + ) + self.highlighter: HighlighterType = highlighter or _null_highlighter + self.safe_box = safe_box + self.get_datetime = get_datetime or datetime.now + self.get_time = get_time or monotonic + self.style = style + self.no_color = ( + no_color if no_color is not None else "NO_COLOR" in self._environ + ) + self.is_interactive = ( + (self.is_terminal and not self.is_dumb_terminal) + if force_interactive is None + else force_interactive + ) + + self._record_buffer_lock = threading.RLock() + self._thread_locals = ConsoleThreadLocals( + theme_stack=ThemeStack(themes.DEFAULT if theme is None else theme) + ) + self._record_buffer: List[Segment] = [] + self._render_hooks: List[RenderHook] = [] + self._live: Optional["Live"] = None + self._is_alt_screen = False + + def __repr__(self) -> str: + return f"" + + @property + def file(self) -> IO[str]: + """Get the file object to write to.""" + file = self._file or (sys.stderr if self.stderr else sys.stdout) + file = getattr(file, "rich_proxied_file", file) + if file is None: + file = NULL_FILE + return file + + @file.setter + def file(self, new_file: IO[str]) -> None: + """Set a new file object.""" + self._file = new_file + + @property + def _buffer(self) -> List[Segment]: + """Get a thread local buffer.""" + return self._thread_locals.buffer + + @property + def _buffer_index(self) -> int: + """Get a thread local buffer.""" + return self._thread_locals.buffer_index + + @_buffer_index.setter + def _buffer_index(self, value: int) -> None: + self._thread_locals.buffer_index = value + + @property + def _theme_stack(self) -> ThemeStack: + """Get the thread local theme stack.""" + return self._thread_locals.theme_stack + + def _detect_color_system(self) -> Optional[ColorSystem]: + """Detect color system from env vars.""" + if self.is_jupyter: + return ColorSystem.TRUECOLOR + if not self.is_terminal or self.is_dumb_terminal: + return None + if WINDOWS: # pragma: no cover + if self.legacy_windows: # pragma: no cover + return ColorSystem.WINDOWS + windows_console_features = get_windows_console_features() + return ( + ColorSystem.TRUECOLOR + if windows_console_features.truecolor + else ColorSystem.EIGHT_BIT + ) + else: + color_term = self._environ.get("COLORTERM", "").strip().lower() + if color_term in ("truecolor", "24bit"): + return ColorSystem.TRUECOLOR + term = self._environ.get("TERM", "").strip().lower() + _term_name, _hyphen, colors = term.rpartition("-") + color_system = _TERM_COLORS.get(colors, ColorSystem.STANDARD) + return color_system + + def _enter_buffer(self) -> None: + """Enter in to a buffer context, and buffer all output.""" + self._buffer_index += 1 + + def _exit_buffer(self) -> None: + """Leave buffer context, and render content if required.""" + self._buffer_index -= 1 + self._check_buffer() + + def set_live(self, live: "Live") -> None: + """Set Live instance. Used by Live context manager. + + Args: + live (Live): Live instance using this Console. + + Raises: + errors.LiveError: If this Console has a Live context currently active. + """ + with self._lock: + if self._live is not None: + raise errors.LiveError("Only one live display may be active at once") + self._live = live + + def clear_live(self) -> None: + """Clear the Live instance.""" + with self._lock: + self._live = None + + def push_render_hook(self, hook: RenderHook) -> None: + """Add a new render hook to the stack. + + Args: + hook (RenderHook): Render hook instance. + """ + with self._lock: + self._render_hooks.append(hook) + + def pop_render_hook(self) -> None: + """Pop the last renderhook from the stack.""" + with self._lock: + self._render_hooks.pop() + + def __enter__(self) -> "Console": + """Own context manager to enter buffer context.""" + self._enter_buffer() + return self + + def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: + """Exit buffer context.""" + self._exit_buffer() + + def begin_capture(self) -> None: + """Begin capturing console output. Call :meth:`end_capture` to exit capture mode and return output.""" + self._enter_buffer() + + def end_capture(self) -> str: + """End capture mode and return captured string. + + Returns: + str: Console output. + """ + render_result = self._render_buffer(self._buffer) + del self._buffer[:] + self._exit_buffer() + return render_result + + def push_theme(self, theme: Theme, *, inherit: bool = True) -> None: + """Push a new theme on to the top of the stack, replacing the styles from the previous theme. + Generally speaking, you should call :meth:`~rich.console.Console.use_theme` to get a context manager, rather + than calling this method directly. + + Args: + theme (Theme): A theme instance. + inherit (bool, optional): Inherit existing styles. Defaults to True. + """ + self._theme_stack.push_theme(theme, inherit=inherit) + + def pop_theme(self) -> None: + """Remove theme from top of stack, restoring previous theme.""" + self._theme_stack.pop_theme() + + def use_theme(self, theme: Theme, *, inherit: bool = True) -> ThemeContext: + """Use a different theme for the duration of the context manager. + + Args: + theme (Theme): Theme instance to user. + inherit (bool, optional): Inherit existing console styles. Defaults to True. + + Returns: + ThemeContext: [description] + """ + return ThemeContext(self, theme, inherit) + + @property + def color_system(self) -> Optional[str]: + """Get color system string. + + Returns: + Optional[str]: "standard", "256" or "truecolor". + """ + + if self._color_system is not None: + return _COLOR_SYSTEMS_NAMES[self._color_system] + else: + return None + + @property + def encoding(self) -> str: + """Get the encoding of the console file, e.g. ``"utf-8"``. + + Returns: + str: A standard encoding string. + """ + return (getattr(self.file, "encoding", "utf-8") or "utf-8").lower() + + @property + def is_terminal(self) -> bool: + """Check if the console is writing to a terminal. + + Returns: + bool: True if the console writing to a device capable of + understanding terminal codes, otherwise False. + """ + if self._force_terminal is not None: + return self._force_terminal + + if hasattr(sys.stdin, "__module__") and sys.stdin.__module__.startswith( + "idlelib" + ): + # Return False for Idle which claims to be a tty but can't handle ansi codes + return False + + if self.is_jupyter: + # return False for Jupyter, which may have FORCE_COLOR set + return False + + # If FORCE_COLOR env var has any value at all, we assume a terminal. + force_color = self._environ.get("FORCE_COLOR") + if force_color is not None: + self._force_terminal = True + return True + + isatty: Optional[Callable[[], bool]] = getattr(self.file, "isatty", None) + try: + return False if isatty is None else isatty() + except ValueError: + # in some situation (at the end of a pytest run for example) isatty() can raise + # ValueError: I/O operation on closed file + # return False because we aren't in a terminal anymore + return False + + @property + def is_dumb_terminal(self) -> bool: + """Detect dumb terminal. + + Returns: + bool: True if writing to a dumb terminal, otherwise False. + + """ + _term = self._environ.get("TERM", "") + is_dumb = _term.lower() in ("dumb", "unknown") + return self.is_terminal and is_dumb + + @property + def options(self) -> ConsoleOptions: + """Get default console options.""" + return ConsoleOptions( + max_height=self.size.height, + size=self.size, + legacy_windows=self.legacy_windows, + min_width=1, + max_width=self.width, + encoding=self.encoding, + is_terminal=self.is_terminal, + ) + + @property + def size(self) -> ConsoleDimensions: + """Get the size of the console. + + Returns: + ConsoleDimensions: A named tuple containing the dimensions. + """ + + if self._width is not None and self._height is not None: + return ConsoleDimensions(self._width - self.legacy_windows, self._height) + + if self.is_dumb_terminal: + return ConsoleDimensions(80, 25) + + width: Optional[int] = None + height: Optional[int] = None + + if WINDOWS: # pragma: no cover + try: + width, height = os.get_terminal_size() + except (AttributeError, ValueError, OSError): # Probably not a terminal + pass + else: + for file_descriptor in _STD_STREAMS: + try: + width, height = os.get_terminal_size(file_descriptor) + except (AttributeError, ValueError, OSError): + pass + else: + break + + columns = self._environ.get("COLUMNS") + if columns is not None and columns.isdigit(): + width = int(columns) + lines = self._environ.get("LINES") + if lines is not None and lines.isdigit(): + height = int(lines) + + # get_terminal_size can report 0, 0 if run from pseudo-terminal + width = width or 80 + height = height or 25 + return ConsoleDimensions( + width - self.legacy_windows if self._width is None else self._width, + height if self._height is None else self._height, + ) + + @size.setter + def size(self, new_size: Tuple[int, int]) -> None: + """Set a new size for the terminal. + + Args: + new_size (Tuple[int, int]): New width and height. + """ + width, height = new_size + self._width = width + self._height = height + + @property + def width(self) -> int: + """Get the width of the console. + + Returns: + int: The width (in characters) of the console. + """ + return self.size.width + + @width.setter + def width(self, width: int) -> None: + """Set width. + + Args: + width (int): New width. + """ + self._width = width + + @property + def height(self) -> int: + """Get the height of the console. + + Returns: + int: The height (in lines) of the console. + """ + return self.size.height + + @height.setter + def height(self, height: int) -> None: + """Set height. + + Args: + height (int): new height. + """ + self._height = height + + def bell(self) -> None: + """Play a 'bell' sound (if supported by the terminal).""" + self.control(Control.bell()) + + def capture(self) -> Capture: + """A context manager to *capture* the result of print() or log() in a string, + rather than writing it to the console. + + Example: + >>> from rich.console import Console + >>> console = Console() + >>> with console.capture() as capture: + ... console.print("[bold magenta]Hello World[/]") + >>> print(capture.get()) + + Returns: + Capture: Context manager with disables writing to the terminal. + """ + capture = Capture(self) + return capture + + def pager( + self, pager: Optional[Pager] = None, styles: bool = False, links: bool = False + ) -> PagerContext: + """A context manager to display anything printed within a "pager". The pager application + is defined by the system and will typically support at least pressing a key to scroll. + + Args: + pager (Pager, optional): A pager object, or None to use :class:`~rich.pager.SystemPager`. Defaults to None. + styles (bool, optional): Show styles in pager. Defaults to False. + links (bool, optional): Show links in pager. Defaults to False. + + Example: + >>> from rich.console import Console + >>> from rich.__main__ import make_test_card + >>> console = Console() + >>> with console.pager(): + console.print(make_test_card()) + + Returns: + PagerContext: A context manager. + """ + return PagerContext(self, pager=pager, styles=styles, links=links) + + def line(self, count: int = 1) -> None: + """Write new line(s). + + Args: + count (int, optional): Number of new lines. Defaults to 1. + """ + + assert count >= 0, "count must be >= 0" + self.print(NewLine(count)) + + def clear(self, home: bool = True) -> None: + """Clear the screen. + + Args: + home (bool, optional): Also move the cursor to 'home' position. Defaults to True. + """ + if home: + self.control(Control.clear(), Control.home()) + else: + self.control(Control.clear()) + + def status( + self, + status: RenderableType, + *, + spinner: str = "dots", + spinner_style: StyleType = "status.spinner", + speed: float = 1.0, + refresh_per_second: float = 12.5, + ) -> "Status": + """Display a status and spinner. + + Args: + status (RenderableType): A status renderable (str or Text typically). + spinner (str, optional): Name of spinner animation (see python -m rich.spinner). Defaults to "dots". + spinner_style (StyleType, optional): Style of spinner. Defaults to "status.spinner". + speed (float, optional): Speed factor for spinner animation. Defaults to 1.0. + refresh_per_second (float, optional): Number of refreshes per second. Defaults to 12.5. + + Returns: + Status: A Status object that may be used as a context manager. + """ + from .status import Status + + status_renderable = Status( + status, + console=self, + spinner=spinner, + spinner_style=spinner_style, + speed=speed, + refresh_per_second=refresh_per_second, + ) + return status_renderable + + def show_cursor(self, show: bool = True) -> bool: + """Show or hide the cursor. + + Args: + show (bool, optional): Set visibility of the cursor. + """ + if self.is_terminal: + self.control(Control.show_cursor(show)) + return True + return False + + def set_alt_screen(self, enable: bool = True) -> bool: + """Enables alternative screen mode. + + Note, if you enable this mode, you should ensure that is disabled before + the application exits. See :meth:`~rich.Console.screen` for a context manager + that handles this for you. + + Args: + enable (bool, optional): Enable (True) or disable (False) alternate screen. Defaults to True. + + Returns: + bool: True if the control codes were written. + + """ + changed = False + if self.is_terminal and not self.legacy_windows: + self.control(Control.alt_screen(enable)) + changed = True + self._is_alt_screen = enable + return changed + + @property + def is_alt_screen(self) -> bool: + """Check if the alt screen was enabled. + + Returns: + bool: True if the alt screen was enabled, otherwise False. + """ + return self._is_alt_screen + + def set_window_title(self, title: str) -> bool: + """Set the title of the console terminal window. + + Warning: There is no means within Rich of "resetting" the window title to its + previous value, meaning the title you set will persist even after your application + exits. + + ``fish`` shell resets the window title before and after each command by default, + negating this issue. Windows Terminal and command prompt will also reset the title for you. + Most other shells and terminals, however, do not do this. + + Some terminals may require configuration changes before you can set the title. + Some terminals may not support setting the title at all. + + Other software (including the terminal itself, the shell, custom prompts, plugins, etc.) + may also set the terminal window title. This could result in whatever value you write + using this method being overwritten. + + Args: + title (str): The new title of the terminal window. + + Returns: + bool: True if the control code to change the terminal title was + written, otherwise False. Note that a return value of True + does not guarantee that the window title has actually changed, + since the feature may be unsupported/disabled in some terminals. + """ + if self.is_terminal: + self.control(Control.title(title)) + return True + return False + + def screen( + self, hide_cursor: bool = True, style: Optional[StyleType] = None + ) -> "ScreenContext": + """Context manager to enable and disable 'alternative screen' mode. + + Args: + hide_cursor (bool, optional): Also hide the cursor. Defaults to False. + style (Style, optional): Optional style for screen. Defaults to None. + + Returns: + ~ScreenContext: Context which enables alternate screen on enter, and disables it on exit. + """ + return ScreenContext(self, hide_cursor=hide_cursor, style=style or "") + + def measure( + self, renderable: RenderableType, *, options: Optional[ConsoleOptions] = None + ) -> Measurement: + """Measure a renderable. Returns a :class:`~rich.measure.Measurement` object which contains + information regarding the number of characters required to print the renderable. + + Args: + renderable (RenderableType): Any renderable or string. + options (Optional[ConsoleOptions], optional): Options to use when measuring, or None + to use default options. Defaults to None. + + Returns: + Measurement: A measurement of the renderable. + """ + measurement = Measurement.get(self, options or self.options, renderable) + return measurement + + def render( + self, renderable: RenderableType, options: Optional[ConsoleOptions] = None + ) -> Iterable[Segment]: + """Render an object in to an iterable of `Segment` instances. + + This method contains the logic for rendering objects with the console protocol. + You are unlikely to need to use it directly, unless you are extending the library. + + Args: + renderable (RenderableType): An object supporting the console protocol, or + an object that may be converted to a string. + options (ConsoleOptions, optional): An options object, or None to use self.options. Defaults to None. + + Returns: + Iterable[Segment]: An iterable of segments that may be rendered. + """ + + _options = options or self.options + if _options.max_width < 1: + # No space to render anything. This prevents potential recursion errors. + return + render_iterable: RenderResult + + renderable = rich_cast(renderable) + if hasattr(renderable, "__rich_console__") and not isclass(renderable): + render_iterable = renderable.__rich_console__(self, _options) # type: ignore[union-attr] + elif isinstance(renderable, str): + text_renderable = self.render_str( + renderable, highlight=_options.highlight, markup=_options.markup + ) + render_iterable = text_renderable.__rich_console__(self, _options) + else: + raise errors.NotRenderableError( + f"Unable to render {renderable!r}; " + "A str, Segment or object with __rich_console__ method is required" + ) + + try: + iter_render = iter(render_iterable) + except TypeError: + raise errors.NotRenderableError( + f"object {render_iterable!r} is not renderable" + ) + _Segment = Segment + _options = _options.reset_height() + for render_output in iter_render: + if isinstance(render_output, _Segment): + yield render_output + else: + yield from self.render(render_output, _options) + + def render_lines( + self, + renderable: RenderableType, + options: Optional[ConsoleOptions] = None, + *, + style: Optional[Style] = None, + pad: bool = True, + new_lines: bool = False, + ) -> List[List[Segment]]: + """Render objects in to a list of lines. + + The output of render_lines is useful when further formatting of rendered console text + is required, such as the Panel class which draws a border around any renderable object. + + Args: + renderable (RenderableType): Any object renderable in the console. + options (Optional[ConsoleOptions], optional): Console options, or None to use self.options. Default to ``None``. + style (Style, optional): Optional style to apply to renderables. Defaults to ``None``. + pad (bool, optional): Pad lines shorter than render width. Defaults to ``True``. + new_lines (bool, optional): Include "\n" characters at end of lines. + + Returns: + List[List[Segment]]: A list of lines, where a line is a list of Segment objects. + """ + with self._lock: + render_options = options or self.options + _rendered = self.render(renderable, render_options) + if style: + _rendered = Segment.apply_style(_rendered, style) + + render_height = render_options.height + if render_height is not None: + render_height = max(0, render_height) + + lines = list( + islice( + Segment.split_and_crop_lines( + _rendered, + render_options.max_width, + include_new_lines=new_lines, + pad=pad, + style=style, + ), + None, + render_height, + ) + ) + if render_options.height is not None: + extra_lines = render_options.height - len(lines) + if extra_lines > 0: + pad_line = [ + [Segment(" " * render_options.max_width, style), Segment("\n")] + if new_lines + else [Segment(" " * render_options.max_width, style)] + ] + lines.extend(pad_line * extra_lines) + + return lines + + def render_str( + self, + text: str, + *, + style: Union[str, Style] = "", + justify: Optional[JustifyMethod] = None, + overflow: Optional[OverflowMethod] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + highlighter: Optional[HighlighterType] = None, + ) -> "Text": + """Convert a string to a Text instance. This is called automatically if + you print or log a string. + + Args: + text (str): Text to render. + style (Union[str, Style], optional): Style to apply to rendered text. + justify (str, optional): Justify method: "default", "left", "center", "full", or "right". Defaults to ``None``. + overflow (str, optional): Overflow method: "crop", "fold", or "ellipsis". Defaults to ``None``. + emoji (Optional[bool], optional): Enable emoji, or ``None`` to use Console default. + markup (Optional[bool], optional): Enable markup, or ``None`` to use Console default. + highlight (Optional[bool], optional): Enable highlighting, or ``None`` to use Console default. + highlighter (HighlighterType, optional): Optional highlighter to apply. + Returns: + ConsoleRenderable: Renderable object. + + """ + emoji_enabled = emoji or (emoji is None and self._emoji) + markup_enabled = markup or (markup is None and self._markup) + highlight_enabled = highlight or (highlight is None and self._highlight) + + if markup_enabled: + rich_text = render_markup( + text, + style=style, + emoji=emoji_enabled, + emoji_variant=self._emoji_variant, + ) + rich_text.justify = justify + rich_text.overflow = overflow + else: + rich_text = Text( + _emoji_replace(text, default_variant=self._emoji_variant) + if emoji_enabled + else text, + justify=justify, + overflow=overflow, + style=style, + ) + + _highlighter = (highlighter or self.highlighter) if highlight_enabled else None + if _highlighter is not None: + highlight_text = _highlighter(str(rich_text)) + highlight_text.copy_styles(rich_text) + return highlight_text + + return rich_text + + def get_style( + self, name: Union[str, Style], *, default: Optional[Union[Style, str]] = None + ) -> Style: + """Get a Style instance by its theme name or parse a definition. + + Args: + name (str): The name of a style or a style definition. + + Returns: + Style: A Style object. + + Raises: + MissingStyle: If no style could be parsed from name. + + """ + if isinstance(name, Style): + return name + + try: + style = self._theme_stack.get(name) + if style is None: + style = Style.parse(name) + return style.copy() if style.link else style + except errors.StyleSyntaxError as error: + if default is not None: + return self.get_style(default) + raise errors.MissingStyle( + f"Failed to get style {name!r}; {error}" + ) from None + + def _collect_renderables( + self, + objects: Iterable[Any], + sep: str, + end: str, + *, + justify: Optional[JustifyMethod] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + ) -> List[ConsoleRenderable]: + """Combine a number of renderables and text into one renderable. + + Args: + objects (Iterable[Any]): Anything that Rich can render. + sep (str): String to write between print data. + end (str): String to write at end of print data. + justify (str, optional): One of "left", "right", "center", or "full". Defaults to ``None``. + emoji (Optional[bool], optional): Enable emoji code, or ``None`` to use console default. + markup (Optional[bool], optional): Enable markup, or ``None`` to use console default. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use console default. + + Returns: + List[ConsoleRenderable]: A list of things to render. + """ + renderables: List[ConsoleRenderable] = [] + _append = renderables.append + text: List[Text] = [] + append_text = text.append + + append = _append + if justify in ("left", "center", "right"): + + def align_append(renderable: RenderableType) -> None: + _append(Align(renderable, cast(AlignMethod, justify))) + + append = align_append + + _highlighter: HighlighterType = _null_highlighter + if highlight or (highlight is None and self._highlight): + _highlighter = self.highlighter + + def check_text() -> None: + if text: + sep_text = Text(sep, justify=justify, end=end) + append(sep_text.join(text)) + text.clear() + + for renderable in objects: + renderable = rich_cast(renderable) + if isinstance(renderable, str): + append_text( + self.render_str( + renderable, emoji=emoji, markup=markup, highlighter=_highlighter + ) + ) + elif isinstance(renderable, Text): + append_text(renderable) + elif isinstance(renderable, ConsoleRenderable): + check_text() + append(renderable) + elif is_expandable(renderable): + check_text() + append(Pretty(renderable, highlighter=_highlighter)) + else: + append_text(_highlighter(str(renderable))) + + check_text() + + if self.style is not None: + style = self.get_style(self.style) + renderables = [Styled(renderable, style) for renderable in renderables] + + return renderables + + def rule( + self, + title: TextType = "", + *, + characters: str = "─", + style: Union[str, Style] = "rule.line", + align: AlignMethod = "center", + ) -> None: + """Draw a line with optional centered title. + + Args: + title (str, optional): Text to render over the rule. Defaults to "". + characters (str, optional): Character(s) to form the line. Defaults to "─". + style (str, optional): Style of line. Defaults to "rule.line". + align (str, optional): How to align the title, one of "left", "center", or "right". Defaults to "center". + """ + from .rule import Rule + + rule = Rule(title=title, characters=characters, style=style, align=align) + self.print(rule) + + def control(self, *control: Control) -> None: + """Insert non-printing control codes. + + Args: + control_codes (str): Control codes, such as those that may move the cursor. + """ + if not self.is_dumb_terminal: + with self: + self._buffer.extend(_control.segment for _control in control) + + def out( + self, + *objects: Any, + sep: str = " ", + end: str = "\n", + style: Optional[Union[str, Style]] = None, + highlight: Optional[bool] = None, + ) -> None: + """Output to the terminal. This is a low-level way of writing to the terminal which unlike + :meth:`~rich.console.Console.print` won't pretty print, wrap text, or apply markup, but will + optionally apply highlighting and a basic style. + + Args: + sep (str, optional): String to write between print data. Defaults to " ". + end (str, optional): String to write at end of print data. Defaults to "\\\\n". + style (Union[str, Style], optional): A style to apply to output. Defaults to None. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use + console default. Defaults to ``None``. + """ + raw_output: str = sep.join(str(_object) for _object in objects) + self.print( + raw_output, + style=style, + highlight=highlight, + emoji=False, + markup=False, + no_wrap=True, + overflow="ignore", + crop=False, + end=end, + ) + + def print( + self, + *objects: Any, + sep: str = " ", + end: str = "\n", + style: Optional[Union[str, Style]] = None, + justify: Optional[JustifyMethod] = None, + overflow: Optional[OverflowMethod] = None, + no_wrap: Optional[bool] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + width: Optional[int] = None, + height: Optional[int] = None, + crop: bool = True, + soft_wrap: Optional[bool] = None, + new_line_start: bool = False, + ) -> None: + """Print to the console. + + Args: + objects (positional args): Objects to log to the terminal. + sep (str, optional): String to write between print data. Defaults to " ". + end (str, optional): String to write at end of print data. Defaults to "\\\\n". + style (Union[str, Style], optional): A style to apply to output. Defaults to None. + justify (str, optional): Justify method: "default", "left", "right", "center", or "full". Defaults to ``None``. + overflow (str, optional): Overflow method: "ignore", "crop", "fold", or "ellipsis". Defaults to None. + no_wrap (Optional[bool], optional): Disable word wrapping. Defaults to None. + emoji (Optional[bool], optional): Enable emoji code, or ``None`` to use console default. Defaults to ``None``. + markup (Optional[bool], optional): Enable markup, or ``None`` to use console default. Defaults to ``None``. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use console default. Defaults to ``None``. + width (Optional[int], optional): Width of output, or ``None`` to auto-detect. Defaults to ``None``. + crop (Optional[bool], optional): Crop output to width of terminal. Defaults to True. + soft_wrap (bool, optional): Enable soft wrap mode which disables word wrapping and cropping of text or ``None`` for + Console default. Defaults to ``None``. + new_line_start (bool, False): Insert a new line at the start if the output contains more than one line. Defaults to ``False``. + """ + if not objects: + objects = (NewLine(),) + + if soft_wrap is None: + soft_wrap = self.soft_wrap + if soft_wrap: + if no_wrap is None: + no_wrap = True + if overflow is None: + overflow = "ignore" + crop = False + render_hooks = self._render_hooks[:] + with self: + renderables = self._collect_renderables( + objects, + sep, + end, + justify=justify, + emoji=emoji, + markup=markup, + highlight=highlight, + ) + for hook in render_hooks: + renderables = hook.process_renderables(renderables) + render_options = self.options.update( + justify=justify, + overflow=overflow, + width=min(width, self.width) if width is not None else NO_CHANGE, + height=height, + no_wrap=no_wrap, + markup=markup, + highlight=highlight, + ) + + new_segments: List[Segment] = [] + extend = new_segments.extend + render = self.render + if style is None: + for renderable in renderables: + extend(render(renderable, render_options)) + else: + for renderable in renderables: + extend( + Segment.apply_style( + render(renderable, render_options), self.get_style(style) + ) + ) + if new_line_start: + if ( + len("".join(segment.text for segment in new_segments).splitlines()) + > 1 + ): + new_segments.insert(0, Segment.line()) + if crop: + buffer_extend = self._buffer.extend + for line in Segment.split_and_crop_lines( + new_segments, self.width, pad=False + ): + buffer_extend(line) + else: + self._buffer.extend(new_segments) + + def print_json( + self, + json: Optional[str] = None, + *, + data: Any = None, + indent: Union[None, int, str] = 2, + highlight: bool = True, + skip_keys: bool = False, + ensure_ascii: bool = False, + check_circular: bool = True, + allow_nan: bool = True, + default: Optional[Callable[[Any], Any]] = None, + sort_keys: bool = False, + ) -> None: + """Pretty prints JSON. Output will be valid JSON. + + Args: + json (Optional[str]): A string containing JSON. + data (Any): If json is not supplied, then encode this data. + indent (Union[None, int, str], optional): Number of spaces to indent. Defaults to 2. + highlight (bool, optional): Enable highlighting of output: Defaults to True. + skip_keys (bool, optional): Skip keys not of a basic type. Defaults to False. + ensure_ascii (bool, optional): Escape all non-ascii characters. Defaults to False. + check_circular (bool, optional): Check for circular references. Defaults to True. + allow_nan (bool, optional): Allow NaN and Infinity values. Defaults to True. + default (Callable, optional): A callable that converts values that can not be encoded + in to something that can be JSON encoded. Defaults to None. + sort_keys (bool, optional): Sort dictionary keys. Defaults to False. + """ + from pip._vendor.rich.json import JSON + + if json is None: + json_renderable = JSON.from_data( + data, + indent=indent, + highlight=highlight, + skip_keys=skip_keys, + ensure_ascii=ensure_ascii, + check_circular=check_circular, + allow_nan=allow_nan, + default=default, + sort_keys=sort_keys, + ) + else: + if not isinstance(json, str): + raise TypeError( + f"json must be str. Did you mean print_json(data={json!r}) ?" + ) + json_renderable = JSON( + json, + indent=indent, + highlight=highlight, + skip_keys=skip_keys, + ensure_ascii=ensure_ascii, + check_circular=check_circular, + allow_nan=allow_nan, + default=default, + sort_keys=sort_keys, + ) + self.print(json_renderable, soft_wrap=True) + + def update_screen( + self, + renderable: RenderableType, + *, + region: Optional[Region] = None, + options: Optional[ConsoleOptions] = None, + ) -> None: + """Update the screen at a given offset. + + Args: + renderable (RenderableType): A Rich renderable. + region (Region, optional): Region of screen to update, or None for entire screen. Defaults to None. + x (int, optional): x offset. Defaults to 0. + y (int, optional): y offset. Defaults to 0. + + Raises: + errors.NoAltScreen: If the Console isn't in alt screen mode. + + """ + if not self.is_alt_screen: + raise errors.NoAltScreen("Alt screen must be enabled to call update_screen") + render_options = options or self.options + if region is None: + x = y = 0 + render_options = render_options.update_dimensions( + render_options.max_width, render_options.height or self.height + ) + else: + x, y, width, height = region + render_options = render_options.update_dimensions(width, height) + + lines = self.render_lines(renderable, options=render_options) + self.update_screen_lines(lines, x, y) + + def update_screen_lines( + self, lines: List[List[Segment]], x: int = 0, y: int = 0 + ) -> None: + """Update lines of the screen at a given offset. + + Args: + lines (List[List[Segment]]): Rendered lines (as produced by :meth:`~rich.Console.render_lines`). + x (int, optional): x offset (column no). Defaults to 0. + y (int, optional): y offset (column no). Defaults to 0. + + Raises: + errors.NoAltScreen: If the Console isn't in alt screen mode. + """ + if not self.is_alt_screen: + raise errors.NoAltScreen("Alt screen must be enabled to call update_screen") + screen_update = ScreenUpdate(lines, x, y) + segments = self.render(screen_update) + self._buffer.extend(segments) + self._check_buffer() + + def print_exception( + self, + *, + width: Optional[int] = 100, + extra_lines: int = 3, + theme: Optional[str] = None, + word_wrap: bool = False, + show_locals: bool = False, + suppress: Iterable[Union[str, ModuleType]] = (), + max_frames: int = 100, + ) -> None: + """Prints a rich render of the last exception and traceback. + + Args: + width (Optional[int], optional): Number of characters used to render code. Defaults to 100. + extra_lines (int, optional): Additional lines of code to render. Defaults to 3. + theme (str, optional): Override pygments theme used in traceback + word_wrap (bool, optional): Enable word wrapping of long lines. Defaults to False. + show_locals (bool, optional): Enable display of local variables. Defaults to False. + suppress (Iterable[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback. + max_frames (int): Maximum number of frames to show in a traceback, 0 for no maximum. Defaults to 100. + """ + from .traceback import Traceback + + traceback = Traceback( + width=width, + extra_lines=extra_lines, + theme=theme, + word_wrap=word_wrap, + show_locals=show_locals, + suppress=suppress, + max_frames=max_frames, + ) + self.print(traceback) + + @staticmethod + def _caller_frame_info( + offset: int, + currentframe: Callable[[], Optional[FrameType]] = inspect.currentframe, + ) -> Tuple[str, int, Dict[str, Any]]: + """Get caller frame information. + + Args: + offset (int): the caller offset within the current frame stack. + currentframe (Callable[[], Optional[FrameType]], optional): the callable to use to + retrieve the current frame. Defaults to ``inspect.currentframe``. + + Returns: + Tuple[str, int, Dict[str, Any]]: A tuple containing the filename, the line number and + the dictionary of local variables associated with the caller frame. + + Raises: + RuntimeError: If the stack offset is invalid. + """ + # Ignore the frame of this local helper + offset += 1 + + frame = currentframe() + if frame is not None: + # Use the faster currentframe where implemented + while offset and frame is not None: + frame = frame.f_back + offset -= 1 + assert frame is not None + return frame.f_code.co_filename, frame.f_lineno, frame.f_locals + else: + # Fallback to the slower stack + frame_info = inspect.stack()[offset] + return frame_info.filename, frame_info.lineno, frame_info.frame.f_locals + + def log( + self, + *objects: Any, + sep: str = " ", + end: str = "\n", + style: Optional[Union[str, Style]] = None, + justify: Optional[JustifyMethod] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + log_locals: bool = False, + _stack_offset: int = 1, + ) -> None: + """Log rich content to the terminal. + + Args: + objects (positional args): Objects to log to the terminal. + sep (str, optional): String to write between print data. Defaults to " ". + end (str, optional): String to write at end of print data. Defaults to "\\\\n". + style (Union[str, Style], optional): A style to apply to output. Defaults to None. + justify (str, optional): One of "left", "right", "center", or "full". Defaults to ``None``. + overflow (str, optional): Overflow method: "crop", "fold", or "ellipsis". Defaults to None. + emoji (Optional[bool], optional): Enable emoji code, or ``None`` to use console default. Defaults to None. + markup (Optional[bool], optional): Enable markup, or ``None`` to use console default. Defaults to None. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use console default. Defaults to None. + log_locals (bool, optional): Boolean to enable logging of locals where ``log()`` + was called. Defaults to False. + _stack_offset (int, optional): Offset of caller from end of call stack. Defaults to 1. + """ + if not objects: + objects = (NewLine(),) + + render_hooks = self._render_hooks[:] + + with self: + renderables = self._collect_renderables( + objects, + sep, + end, + justify=justify, + emoji=emoji, + markup=markup, + highlight=highlight, + ) + if style is not None: + renderables = [Styled(renderable, style) for renderable in renderables] + + filename, line_no, locals = self._caller_frame_info(_stack_offset) + link_path = None if filename.startswith("<") else os.path.abspath(filename) + path = filename.rpartition(os.sep)[-1] + if log_locals: + locals_map = { + key: value + for key, value in locals.items() + if not key.startswith("__") + } + renderables.append(render_scope(locals_map, title="[i]locals")) + + renderables = [ + self._log_render( + self, + renderables, + log_time=self.get_datetime(), + path=path, + line_no=line_no, + link_path=link_path, + ) + ] + for hook in render_hooks: + renderables = hook.process_renderables(renderables) + new_segments: List[Segment] = [] + extend = new_segments.extend + render = self.render + render_options = self.options + for renderable in renderables: + extend(render(renderable, render_options)) + buffer_extend = self._buffer.extend + for line in Segment.split_and_crop_lines( + new_segments, self.width, pad=False + ): + buffer_extend(line) + + def _check_buffer(self) -> None: + """Check if the buffer may be rendered. Render it if it can (e.g. Console.quiet is False) + Rendering is supported on Windows, Unix and Jupyter environments. For + legacy Windows consoles, the win32 API is called directly. + This method will also record what it renders if recording is enabled via Console.record. + """ + if self.quiet: + del self._buffer[:] + return + with self._lock: + if self.record: + with self._record_buffer_lock: + self._record_buffer.extend(self._buffer[:]) + + if self._buffer_index == 0: + if self.is_jupyter: # pragma: no cover + from .jupyter import display + + display(self._buffer, self._render_buffer(self._buffer[:])) + del self._buffer[:] + else: + if WINDOWS: + use_legacy_windows_render = False + if self.legacy_windows: + fileno = get_fileno(self.file) + if fileno is not None: + use_legacy_windows_render = ( + fileno in _STD_STREAMS_OUTPUT + ) + + if use_legacy_windows_render: + from pip._vendor.rich._win32_console import LegacyWindowsTerm + from pip._vendor.rich._windows_renderer import legacy_windows_render + + buffer = self._buffer[:] + if self.no_color and self._color_system: + buffer = list(Segment.remove_color(buffer)) + + legacy_windows_render(buffer, LegacyWindowsTerm(self.file)) + else: + # Either a non-std stream on legacy Windows, or modern Windows. + text = self._render_buffer(self._buffer[:]) + # https://bugs.python.org/issue37871 + # https://github.com/python/cpython/issues/82052 + # We need to avoid writing more than 32Kb in a single write, due to the above bug + write = self.file.write + # Worse case scenario, every character is 4 bytes of utf-8 + MAX_WRITE = 32 * 1024 // 4 + try: + if len(text) <= MAX_WRITE: + write(text) + else: + batch: List[str] = [] + batch_append = batch.append + size = 0 + for line in text.splitlines(True): + if size + len(line) > MAX_WRITE and batch: + write("".join(batch)) + batch.clear() + size = 0 + batch_append(line) + size += len(line) + if batch: + write("".join(batch)) + batch.clear() + except UnicodeEncodeError as error: + error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***" + raise + else: + text = self._render_buffer(self._buffer[:]) + try: + self.file.write(text) + except UnicodeEncodeError as error: + error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***" + raise + + self.file.flush() + del self._buffer[:] + + def _render_buffer(self, buffer: Iterable[Segment]) -> str: + """Render buffered output, and clear buffer.""" + output: List[str] = [] + append = output.append + color_system = self._color_system + legacy_windows = self.legacy_windows + not_terminal = not self.is_terminal + if self.no_color and color_system: + buffer = Segment.remove_color(buffer) + for text, style, control in buffer: + if style: + append( + style.render( + text, + color_system=color_system, + legacy_windows=legacy_windows, + ) + ) + elif not (not_terminal and control): + append(text) + + rendered = "".join(output) + return rendered + + def input( + self, + prompt: TextType = "", + *, + markup: bool = True, + emoji: bool = True, + password: bool = False, + stream: Optional[TextIO] = None, + ) -> str: + """Displays a prompt and waits for input from the user. The prompt may contain color / style. + + It works in the same way as Python's builtin :func:`input` function and provides elaborate line editing and history features if Python's builtin :mod:`readline` module is previously loaded. + + Args: + prompt (Union[str, Text]): Text to render in the prompt. + markup (bool, optional): Enable console markup (requires a str prompt). Defaults to True. + emoji (bool, optional): Enable emoji (requires a str prompt). Defaults to True. + password: (bool, optional): Hide typed text. Defaults to False. + stream: (TextIO, optional): Optional file to read input from (rather than stdin). Defaults to None. + + Returns: + str: Text read from stdin. + """ + if prompt: + self.print(prompt, markup=markup, emoji=emoji, end="") + if password: + result = getpass("", stream=stream) + else: + if stream: + result = stream.readline() + else: + result = input() + return result + + def export_text(self, *, clear: bool = True, styles: bool = False) -> str: + """Generate text from console contents (requires record=True argument in constructor). + + Args: + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + styles (bool, optional): If ``True``, ansi escape codes will be included. ``False`` for plain text. + Defaults to ``False``. + + Returns: + str: String containing console contents. + + """ + assert ( + self.record + ), "To export console contents set record=True in the constructor or instance" + + with self._record_buffer_lock: + if styles: + text = "".join( + (style.render(text) if style else text) + for text, style, _ in self._record_buffer + ) + else: + text = "".join( + segment.text + for segment in self._record_buffer + if not segment.control + ) + if clear: + del self._record_buffer[:] + return text + + def save_text(self, path: str, *, clear: bool = True, styles: bool = False) -> None: + """Generate text from console and save to a given location (requires record=True argument in constructor). + + Args: + path (str): Path to write text files. + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + styles (bool, optional): If ``True``, ansi style codes will be included. ``False`` for plain text. + Defaults to ``False``. + + """ + text = self.export_text(clear=clear, styles=styles) + with open(path, "wt", encoding="utf-8") as write_file: + write_file.write(text) + + def export_html( + self, + *, + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: Optional[str] = None, + inline_styles: bool = False, + ) -> str: + """Generate HTML from console contents (requires record=True argument in constructor). + + Args: + theme (TerminalTheme, optional): TerminalTheme object containing console colors. + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + code_format (str, optional): Format string to render HTML. In addition to '{foreground}', + '{background}', and '{code}', should contain '{stylesheet}' if inline_styles is ``False``. + inline_styles (bool, optional): If ``True`` styles will be inlined in to spans, which makes files + larger but easier to cut and paste markup. If ``False``, styles will be embedded in a style tag. + Defaults to False. + + Returns: + str: String containing console contents as HTML. + """ + assert ( + self.record + ), "To export console contents set record=True in the constructor or instance" + fragments: List[str] = [] + append = fragments.append + _theme = theme or DEFAULT_TERMINAL_THEME + stylesheet = "" + + render_code_format = CONSOLE_HTML_FORMAT if code_format is None else code_format + + with self._record_buffer_lock: + if inline_styles: + for text, style, _ in Segment.filter_control( + Segment.simplify(self._record_buffer) + ): + text = escape(text) + if style: + rule = style.get_html_style(_theme) + if style.link: + text = f'{text}' + text = f'{text}' if rule else text + append(text) + else: + styles: Dict[str, int] = {} + for text, style, _ in Segment.filter_control( + Segment.simplify(self._record_buffer) + ): + text = escape(text) + if style: + rule = style.get_html_style(_theme) + style_number = styles.setdefault(rule, len(styles) + 1) + if style.link: + text = f'{text}' + else: + text = f'{text}' + append(text) + stylesheet_rules: List[str] = [] + stylesheet_append = stylesheet_rules.append + for style_rule, style_number in styles.items(): + if style_rule: + stylesheet_append(f".r{style_number} {{{style_rule}}}") + stylesheet = "\n".join(stylesheet_rules) + + rendered_code = render_code_format.format( + code="".join(fragments), + stylesheet=stylesheet, + foreground=_theme.foreground_color.hex, + background=_theme.background_color.hex, + ) + if clear: + del self._record_buffer[:] + return rendered_code + + def save_html( + self, + path: str, + *, + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: str = CONSOLE_HTML_FORMAT, + inline_styles: bool = False, + ) -> None: + """Generate HTML from console contents and write to a file (requires record=True argument in constructor). + + Args: + path (str): Path to write html file. + theme (TerminalTheme, optional): TerminalTheme object containing console colors. + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + code_format (str, optional): Format string to render HTML. In addition to '{foreground}', + '{background}', and '{code}', should contain '{stylesheet}' if inline_styles is ``False``. + inline_styles (bool, optional): If ``True`` styles will be inlined in to spans, which makes files + larger but easier to cut and paste markup. If ``False``, styles will be embedded in a style tag. + Defaults to False. + + """ + html = self.export_html( + theme=theme, + clear=clear, + code_format=code_format, + inline_styles=inline_styles, + ) + with open(path, "wt", encoding="utf-8") as write_file: + write_file.write(html) + + def export_svg( + self, + *, + title: str = "Rich", + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: str = CONSOLE_SVG_FORMAT, + font_aspect_ratio: float = 0.61, + unique_id: Optional[str] = None, + ) -> str: + """ + Generate an SVG from the console contents (requires record=True in Console constructor). + + Args: + title (str, optional): The title of the tab in the output image + theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True`` + code_format (str, optional): Format string used to generate the SVG. Rich will inject a number of variables + into the string in order to form the final SVG output. The default template used and the variables + injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable. + font_aspect_ratio (float, optional): The width to height ratio of the font used in the ``code_format`` + string. Defaults to 0.61, which is the width to height ratio of Fira Code (the default font). + If you aren't specifying a different font inside ``code_format``, you probably don't need this. + unique_id (str, optional): unique id that is used as the prefix for various elements (CSS styles, node + ids). If not set, this defaults to a computed value based on the recorded content. + """ + + from pip._vendor.rich.cells import cell_len + + style_cache: Dict[Style, str] = {} + + def get_svg_style(style: Style) -> str: + """Convert a Style to CSS rules for SVG.""" + if style in style_cache: + return style_cache[style] + css_rules = [] + color = ( + _theme.foreground_color + if (style.color is None or style.color.is_default) + else style.color.get_truecolor(_theme) + ) + bgcolor = ( + _theme.background_color + if (style.bgcolor is None or style.bgcolor.is_default) + else style.bgcolor.get_truecolor(_theme) + ) + if style.reverse: + color, bgcolor = bgcolor, color + if style.dim: + color = blend_rgb(color, bgcolor, 0.4) + css_rules.append(f"fill: {color.hex}") + if style.bold: + css_rules.append("font-weight: bold") + if style.italic: + css_rules.append("font-style: italic;") + if style.underline: + css_rules.append("text-decoration: underline;") + if style.strike: + css_rules.append("text-decoration: line-through;") + + css = ";".join(css_rules) + style_cache[style] = css + return css + + _theme = theme or SVG_EXPORT_THEME + + width = self.width + char_height = 20 + char_width = char_height * font_aspect_ratio + line_height = char_height * 1.22 + + margin_top = 1 + margin_right = 1 + margin_bottom = 1 + margin_left = 1 + + padding_top = 40 + padding_right = 8 + padding_bottom = 8 + padding_left = 8 + + padding_width = padding_left + padding_right + padding_height = padding_top + padding_bottom + margin_width = margin_left + margin_right + margin_height = margin_top + margin_bottom + + text_backgrounds: List[str] = [] + text_group: List[str] = [] + classes: Dict[str, int] = {} + style_no = 1 + + def escape_text(text: str) -> str: + """HTML escape text and replace spaces with nbsp.""" + return escape(text).replace(" ", " ") + + def make_tag( + name: str, content: Optional[str] = None, **attribs: object + ) -> str: + """Make a tag from name, content, and attributes.""" + + def stringify(value: object) -> str: + if isinstance(value, (float)): + return format(value, "g") + return str(value) + + tag_attribs = " ".join( + f'{k.lstrip("_").replace("_", "-")}="{stringify(v)}"' + for k, v in attribs.items() + ) + return ( + f"<{name} {tag_attribs}>{content}" + if content + else f"<{name} {tag_attribs}/>" + ) + + with self._record_buffer_lock: + segments = list(Segment.filter_control(self._record_buffer)) + if clear: + self._record_buffer.clear() + + if unique_id is None: + unique_id = "terminal-" + str( + zlib.adler32( + ("".join(repr(segment) for segment in segments)).encode( + "utf-8", + "ignore", + ) + + title.encode("utf-8", "ignore") + ) + ) + y = 0 + for y, line in enumerate(Segment.split_and_crop_lines(segments, length=width)): + x = 0 + for text, style, _control in line: + style = style or Style() + rules = get_svg_style(style) + if rules not in classes: + classes[rules] = style_no + style_no += 1 + class_name = f"r{classes[rules]}" + + if style.reverse: + has_background = True + background = ( + _theme.foreground_color.hex + if style.color is None + else style.color.get_truecolor(_theme).hex + ) + else: + bgcolor = style.bgcolor + has_background = bgcolor is not None and not bgcolor.is_default + background = ( + _theme.background_color.hex + if style.bgcolor is None + else style.bgcolor.get_truecolor(_theme).hex + ) + + text_length = cell_len(text) + if has_background: + text_backgrounds.append( + make_tag( + "rect", + fill=background, + x=x * char_width, + y=y * line_height + 1.5, + width=char_width * text_length, + height=line_height + 0.25, + shape_rendering="crispEdges", + ) + ) + + if text != " " * len(text): + text_group.append( + make_tag( + "text", + escape_text(text), + _class=f"{unique_id}-{class_name}", + x=x * char_width, + y=y * line_height + char_height, + textLength=char_width * len(text), + clip_path=f"url(#{unique_id}-line-{y})", + ) + ) + x += cell_len(text) + + line_offsets = [line_no * line_height + 1.5 for line_no in range(y)] + lines = "\n".join( + f""" + {make_tag("rect", x=0, y=offset, width=char_width * width, height=line_height + 0.25)} + """ + for line_no, offset in enumerate(line_offsets) + ) + + styles = "\n".join( + f".{unique_id}-r{rule_no} {{ {css} }}" for css, rule_no in classes.items() + ) + backgrounds = "".join(text_backgrounds) + matrix = "".join(text_group) + + terminal_width = ceil(width * char_width + padding_width) + terminal_height = (y + 1) * line_height + padding_height + chrome = make_tag( + "rect", + fill=_theme.background_color.hex, + stroke="rgba(255,255,255,0.35)", + stroke_width="1", + x=margin_left, + y=margin_top, + width=terminal_width, + height=terminal_height, + rx=8, + ) + + title_color = _theme.foreground_color.hex + if title: + chrome += make_tag( + "text", + escape_text(title), + _class=f"{unique_id}-title", + fill=title_color, + text_anchor="middle", + x=terminal_width // 2, + y=margin_top + char_height + 6, + ) + chrome += f""" + + + + + + """ + + svg = code_format.format( + unique_id=unique_id, + char_width=char_width, + char_height=char_height, + line_height=line_height, + terminal_width=char_width * width - 1, + terminal_height=(y + 1) * line_height - 1, + width=terminal_width + margin_width, + height=terminal_height + margin_height, + terminal_x=margin_left + padding_left, + terminal_y=margin_top + padding_top, + styles=styles, + chrome=chrome, + backgrounds=backgrounds, + matrix=matrix, + lines=lines, + ) + return svg + + def save_svg( + self, + path: str, + *, + title: str = "Rich", + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: str = CONSOLE_SVG_FORMAT, + font_aspect_ratio: float = 0.61, + unique_id: Optional[str] = None, + ) -> None: + """Generate an SVG file from the console contents (requires record=True in Console constructor). + + Args: + path (str): The path to write the SVG to. + title (str, optional): The title of the tab in the output image + theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True`` + code_format (str, optional): Format string used to generate the SVG. Rich will inject a number of variables + into the string in order to form the final SVG output. The default template used and the variables + injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable. + font_aspect_ratio (float, optional): The width to height ratio of the font used in the ``code_format`` + string. Defaults to 0.61, which is the width to height ratio of Fira Code (the default font). + If you aren't specifying a different font inside ``code_format``, you probably don't need this. + unique_id (str, optional): unique id that is used as the prefix for various elements (CSS styles, node + ids). If not set, this defaults to a computed value based on the recorded content. + """ + svg = self.export_svg( + title=title, + theme=theme, + clear=clear, + code_format=code_format, + font_aspect_ratio=font_aspect_ratio, + unique_id=unique_id, + ) + with open(path, "wt", encoding="utf-8") as write_file: + write_file.write(svg) + + +def _svg_hash(svg_main_code: str) -> str: + """Returns a unique hash for the given SVG main code. + + Args: + svg_main_code (str): The content we're going to inject in the SVG envelope. + + Returns: + str: a hash of the given content + """ + return str(zlib.adler32(svg_main_code.encode())) + + +if __name__ == "__main__": # pragma: no cover + console = Console(record=True) + + console.log( + "JSONRPC [i]request[/i]", + 5, + 1.3, + True, + False, + None, + { + "jsonrpc": "2.0", + "method": "subtract", + "params": {"minuend": 42, "subtrahend": 23}, + "id": 3, + }, + ) + + console.log("Hello, World!", "{'a': 1}", repr(console)) + + console.print( + { + "name": None, + "empty": [], + "quiz": { + "sport": { + "answered": True, + "q1": { + "question": "Which one is correct team name in NBA?", + "options": [ + "New York Bulls", + "Los Angeles Kings", + "Golden State Warriors", + "Huston Rocket", + ], + "answer": "Huston Rocket", + }, + }, + "maths": { + "answered": False, + "q1": { + "question": "5 + 7 = ?", + "options": [10, 11, 12, 13], + "answer": 12, + }, + "q2": { + "question": "12 - 8 = ?", + "options": [1, 2, 3, 4], + "answer": 4, + }, + }, + }, + } + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/constrain.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/constrain.py new file mode 100644 index 0000000..65fdf56 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/constrain.py @@ -0,0 +1,37 @@ +from typing import Optional, TYPE_CHECKING + +from .jupyter import JupyterMixin +from .measure import Measurement + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderableType, RenderResult + + +class Constrain(JupyterMixin): + """Constrain the width of a renderable to a given number of characters. + + Args: + renderable (RenderableType): A renderable object. + width (int, optional): The maximum width (in characters) to render. Defaults to 80. + """ + + def __init__(self, renderable: "RenderableType", width: Optional[int] = 80) -> None: + self.renderable = renderable + self.width = width + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + if self.width is None: + yield self.renderable + else: + child_options = options.update_width(min(self.width, options.max_width)) + yield from console.render(self.renderable, child_options) + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> "Measurement": + if self.width is not None: + options = options.update_width(self.width) + measurement = Measurement.get(console, options, self.renderable) + return measurement diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/containers.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/containers.py new file mode 100644 index 0000000..e29cf36 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/containers.py @@ -0,0 +1,167 @@ +from itertools import zip_longest +from typing import ( + Iterator, + Iterable, + List, + Optional, + Union, + overload, + TypeVar, + TYPE_CHECKING, +) + +if TYPE_CHECKING: + from .console import ( + Console, + ConsoleOptions, + JustifyMethod, + OverflowMethod, + RenderResult, + RenderableType, + ) + from .text import Text + +from .cells import cell_len +from .measure import Measurement + +T = TypeVar("T") + + +class Renderables: + """A list subclass which renders its contents to the console.""" + + def __init__( + self, renderables: Optional[Iterable["RenderableType"]] = None + ) -> None: + self._renderables: List["RenderableType"] = ( + list(renderables) if renderables is not None else [] + ) + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + """Console render method to insert line-breaks.""" + yield from self._renderables + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> "Measurement": + dimensions = [ + Measurement.get(console, options, renderable) + for renderable in self._renderables + ] + if not dimensions: + return Measurement(1, 1) + _min = max(dimension.minimum for dimension in dimensions) + _max = max(dimension.maximum for dimension in dimensions) + return Measurement(_min, _max) + + def append(self, renderable: "RenderableType") -> None: + self._renderables.append(renderable) + + def __iter__(self) -> Iterable["RenderableType"]: + return iter(self._renderables) + + +class Lines: + """A list subclass which can render to the console.""" + + def __init__(self, lines: Iterable["Text"] = ()) -> None: + self._lines: List["Text"] = list(lines) + + def __repr__(self) -> str: + return f"Lines({self._lines!r})" + + def __iter__(self) -> Iterator["Text"]: + return iter(self._lines) + + @overload + def __getitem__(self, index: int) -> "Text": + ... + + @overload + def __getitem__(self, index: slice) -> List["Text"]: + ... + + def __getitem__(self, index: Union[slice, int]) -> Union["Text", List["Text"]]: + return self._lines[index] + + def __setitem__(self, index: int, value: "Text") -> "Lines": + self._lines[index] = value + return self + + def __len__(self) -> int: + return self._lines.__len__() + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + """Console render method to insert line-breaks.""" + yield from self._lines + + def append(self, line: "Text") -> None: + self._lines.append(line) + + def extend(self, lines: Iterable["Text"]) -> None: + self._lines.extend(lines) + + def pop(self, index: int = -1) -> "Text": + return self._lines.pop(index) + + def justify( + self, + console: "Console", + width: int, + justify: "JustifyMethod" = "left", + overflow: "OverflowMethod" = "fold", + ) -> None: + """Justify and overflow text to a given width. + + Args: + console (Console): Console instance. + width (int): Number of characters per line. + justify (str, optional): Default justify method for text: "left", "center", "full" or "right". Defaults to "left". + overflow (str, optional): Default overflow for text: "crop", "fold", or "ellipsis". Defaults to "fold". + + """ + from .text import Text + + if justify == "left": + for line in self._lines: + line.truncate(width, overflow=overflow, pad=True) + elif justify == "center": + for line in self._lines: + line.rstrip() + line.truncate(width, overflow=overflow) + line.pad_left((width - cell_len(line.plain)) // 2) + line.pad_right(width - cell_len(line.plain)) + elif justify == "right": + for line in self._lines: + line.rstrip() + line.truncate(width, overflow=overflow) + line.pad_left(width - cell_len(line.plain)) + elif justify == "full": + for line_index, line in enumerate(self._lines): + if line_index == len(self._lines) - 1: + break + words = line.split(" ") + words_size = sum(cell_len(word.plain) for word in words) + num_spaces = len(words) - 1 + spaces = [1 for _ in range(num_spaces)] + index = 0 + if spaces: + while words_size + num_spaces < width: + spaces[len(spaces) - index - 1] += 1 + num_spaces += 1 + index = (index + 1) % len(spaces) + tokens: List[Text] = [] + for index, (word, next_word) in enumerate( + zip_longest(words, words[1:]) + ): + tokens.append(word) + if index < len(spaces): + style = word.get_style_at_offset(console, -1) + next_style = next_word.get_style_at_offset(console, 0) + space_style = style if style == next_style else line.style + tokens.append(Text(" " * spaces[index], style=space_style)) + self[line_index] = Text("").join(tokens) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/control.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/control.py new file mode 100644 index 0000000..88fcb92 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/control.py @@ -0,0 +1,225 @@ +import sys +import time +from typing import TYPE_CHECKING, Callable, Dict, Iterable, List, Union + +if sys.version_info >= (3, 8): + from typing import Final +else: + from pip._vendor.typing_extensions import Final # pragma: no cover + +from .segment import ControlCode, ControlType, Segment + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderResult + +STRIP_CONTROL_CODES: Final = [ + 7, # Bell + 8, # Backspace + 11, # Vertical tab + 12, # Form feed + 13, # Carriage return +] +_CONTROL_STRIP_TRANSLATE: Final = { + _codepoint: None for _codepoint in STRIP_CONTROL_CODES +} + +CONTROL_ESCAPE: Final = { + 7: "\\a", + 8: "\\b", + 11: "\\v", + 12: "\\f", + 13: "\\r", +} + +CONTROL_CODES_FORMAT: Dict[int, Callable[..., str]] = { + ControlType.BELL: lambda: "\x07", + ControlType.CARRIAGE_RETURN: lambda: "\r", + ControlType.HOME: lambda: "\x1b[H", + ControlType.CLEAR: lambda: "\x1b[2J", + ControlType.ENABLE_ALT_SCREEN: lambda: "\x1b[?1049h", + ControlType.DISABLE_ALT_SCREEN: lambda: "\x1b[?1049l", + ControlType.SHOW_CURSOR: lambda: "\x1b[?25h", + ControlType.HIDE_CURSOR: lambda: "\x1b[?25l", + ControlType.CURSOR_UP: lambda param: f"\x1b[{param}A", + ControlType.CURSOR_DOWN: lambda param: f"\x1b[{param}B", + ControlType.CURSOR_FORWARD: lambda param: f"\x1b[{param}C", + ControlType.CURSOR_BACKWARD: lambda param: f"\x1b[{param}D", + ControlType.CURSOR_MOVE_TO_COLUMN: lambda param: f"\x1b[{param+1}G", + ControlType.ERASE_IN_LINE: lambda param: f"\x1b[{param}K", + ControlType.CURSOR_MOVE_TO: lambda x, y: f"\x1b[{y+1};{x+1}H", + ControlType.SET_WINDOW_TITLE: lambda title: f"\x1b]0;{title}\x07", +} + + +class Control: + """A renderable that inserts a control code (non printable but may move cursor). + + Args: + *codes (str): Positional arguments are either a :class:`~rich.segment.ControlType` enum or a + tuple of ControlType and an integer parameter + """ + + __slots__ = ["segment"] + + def __init__(self, *codes: Union[ControlType, ControlCode]) -> None: + control_codes: List[ControlCode] = [ + (code,) if isinstance(code, ControlType) else code for code in codes + ] + _format_map = CONTROL_CODES_FORMAT + rendered_codes = "".join( + _format_map[code](*parameters) for code, *parameters in control_codes + ) + self.segment = Segment(rendered_codes, None, control_codes) + + @classmethod + def bell(cls) -> "Control": + """Ring the 'bell'.""" + return cls(ControlType.BELL) + + @classmethod + def home(cls) -> "Control": + """Move cursor to 'home' position.""" + return cls(ControlType.HOME) + + @classmethod + def move(cls, x: int = 0, y: int = 0) -> "Control": + """Move cursor relative to current position. + + Args: + x (int): X offset. + y (int): Y offset. + + Returns: + ~Control: Control object. + + """ + + def get_codes() -> Iterable[ControlCode]: + control = ControlType + if x: + yield ( + control.CURSOR_FORWARD if x > 0 else control.CURSOR_BACKWARD, + abs(x), + ) + if y: + yield ( + control.CURSOR_DOWN if y > 0 else control.CURSOR_UP, + abs(y), + ) + + control = cls(*get_codes()) + return control + + @classmethod + def move_to_column(cls, x: int, y: int = 0) -> "Control": + """Move to the given column, optionally add offset to row. + + Returns: + x (int): absolute x (column) + y (int): optional y offset (row) + + Returns: + ~Control: Control object. + """ + + return ( + cls( + (ControlType.CURSOR_MOVE_TO_COLUMN, x), + ( + ControlType.CURSOR_DOWN if y > 0 else ControlType.CURSOR_UP, + abs(y), + ), + ) + if y + else cls((ControlType.CURSOR_MOVE_TO_COLUMN, x)) + ) + + @classmethod + def move_to(cls, x: int, y: int) -> "Control": + """Move cursor to absolute position. + + Args: + x (int): x offset (column) + y (int): y offset (row) + + Returns: + ~Control: Control object. + """ + return cls((ControlType.CURSOR_MOVE_TO, x, y)) + + @classmethod + def clear(cls) -> "Control": + """Clear the screen.""" + return cls(ControlType.CLEAR) + + @classmethod + def show_cursor(cls, show: bool) -> "Control": + """Show or hide the cursor.""" + return cls(ControlType.SHOW_CURSOR if show else ControlType.HIDE_CURSOR) + + @classmethod + def alt_screen(cls, enable: bool) -> "Control": + """Enable or disable alt screen.""" + if enable: + return cls(ControlType.ENABLE_ALT_SCREEN, ControlType.HOME) + else: + return cls(ControlType.DISABLE_ALT_SCREEN) + + @classmethod + def title(cls, title: str) -> "Control": + """Set the terminal window title + + Args: + title (str): The new terminal window title + """ + return cls((ControlType.SET_WINDOW_TITLE, title)) + + def __str__(self) -> str: + return self.segment.text + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + if self.segment.text: + yield self.segment + + +def strip_control_codes( + text: str, _translate_table: Dict[int, None] = _CONTROL_STRIP_TRANSLATE +) -> str: + """Remove control codes from text. + + Args: + text (str): A string possibly contain control codes. + + Returns: + str: String with control codes removed. + """ + return text.translate(_translate_table) + + +def escape_control_codes( + text: str, + _translate_table: Dict[int, str] = CONTROL_ESCAPE, +) -> str: + """Replace control codes with their "escaped" equivalent in the given text. + (e.g. "\b" becomes "\\b") + + Args: + text (str): A string possibly containing control codes. + + Returns: + str: String with control codes replaced with their escaped version. + """ + return text.translate(_translate_table) + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.console import Console + + console = Console() + console.print("Look at the title of your terminal window ^") + # console.print(Control((ControlType.SET_WINDOW_TITLE, "Hello, world!"))) + for i in range(10): + console.set_window_title("🚀 Loading" + "." * i) + time.sleep(0.5) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/default_styles.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/default_styles.py new file mode 100644 index 0000000..dca3719 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/default_styles.py @@ -0,0 +1,190 @@ +from typing import Dict + +from .style import Style + +DEFAULT_STYLES: Dict[str, Style] = { + "none": Style.null(), + "reset": Style( + color="default", + bgcolor="default", + dim=False, + bold=False, + italic=False, + underline=False, + blink=False, + blink2=False, + reverse=False, + conceal=False, + strike=False, + ), + "dim": Style(dim=True), + "bright": Style(dim=False), + "bold": Style(bold=True), + "strong": Style(bold=True), + "code": Style(reverse=True, bold=True), + "italic": Style(italic=True), + "emphasize": Style(italic=True), + "underline": Style(underline=True), + "blink": Style(blink=True), + "blink2": Style(blink2=True), + "reverse": Style(reverse=True), + "strike": Style(strike=True), + "black": Style(color="black"), + "red": Style(color="red"), + "green": Style(color="green"), + "yellow": Style(color="yellow"), + "magenta": Style(color="magenta"), + "cyan": Style(color="cyan"), + "white": Style(color="white"), + "inspect.attr": Style(color="yellow", italic=True), + "inspect.attr.dunder": Style(color="yellow", italic=True, dim=True), + "inspect.callable": Style(bold=True, color="red"), + "inspect.async_def": Style(italic=True, color="bright_cyan"), + "inspect.def": Style(italic=True, color="bright_cyan"), + "inspect.class": Style(italic=True, color="bright_cyan"), + "inspect.error": Style(bold=True, color="red"), + "inspect.equals": Style(), + "inspect.help": Style(color="cyan"), + "inspect.doc": Style(dim=True), + "inspect.value.border": Style(color="green"), + "live.ellipsis": Style(bold=True, color="red"), + "layout.tree.row": Style(dim=False, color="red"), + "layout.tree.column": Style(dim=False, color="blue"), + "logging.keyword": Style(bold=True, color="yellow"), + "logging.level.notset": Style(dim=True), + "logging.level.debug": Style(color="green"), + "logging.level.info": Style(color="blue"), + "logging.level.warning": Style(color="red"), + "logging.level.error": Style(color="red", bold=True), + "logging.level.critical": Style(color="red", bold=True, reverse=True), + "log.level": Style.null(), + "log.time": Style(color="cyan", dim=True), + "log.message": Style.null(), + "log.path": Style(dim=True), + "repr.ellipsis": Style(color="yellow"), + "repr.indent": Style(color="green", dim=True), + "repr.error": Style(color="red", bold=True), + "repr.str": Style(color="green", italic=False, bold=False), + "repr.brace": Style(bold=True), + "repr.comma": Style(bold=True), + "repr.ipv4": Style(bold=True, color="bright_green"), + "repr.ipv6": Style(bold=True, color="bright_green"), + "repr.eui48": Style(bold=True, color="bright_green"), + "repr.eui64": Style(bold=True, color="bright_green"), + "repr.tag_start": Style(bold=True), + "repr.tag_name": Style(color="bright_magenta", bold=True), + "repr.tag_contents": Style(color="default"), + "repr.tag_end": Style(bold=True), + "repr.attrib_name": Style(color="yellow", italic=False), + "repr.attrib_equal": Style(bold=True), + "repr.attrib_value": Style(color="magenta", italic=False), + "repr.number": Style(color="cyan", bold=True, italic=False), + "repr.number_complex": Style(color="cyan", bold=True, italic=False), # same + "repr.bool_true": Style(color="bright_green", italic=True), + "repr.bool_false": Style(color="bright_red", italic=True), + "repr.none": Style(color="magenta", italic=True), + "repr.url": Style(underline=True, color="bright_blue", italic=False, bold=False), + "repr.uuid": Style(color="bright_yellow", bold=False), + "repr.call": Style(color="magenta", bold=True), + "repr.path": Style(color="magenta"), + "repr.filename": Style(color="bright_magenta"), + "rule.line": Style(color="bright_green"), + "rule.text": Style.null(), + "json.brace": Style(bold=True), + "json.bool_true": Style(color="bright_green", italic=True), + "json.bool_false": Style(color="bright_red", italic=True), + "json.null": Style(color="magenta", italic=True), + "json.number": Style(color="cyan", bold=True, italic=False), + "json.str": Style(color="green", italic=False, bold=False), + "json.key": Style(color="blue", bold=True), + "prompt": Style.null(), + "prompt.choices": Style(color="magenta", bold=True), + "prompt.default": Style(color="cyan", bold=True), + "prompt.invalid": Style(color="red"), + "prompt.invalid.choice": Style(color="red"), + "pretty": Style.null(), + "scope.border": Style(color="blue"), + "scope.key": Style(color="yellow", italic=True), + "scope.key.special": Style(color="yellow", italic=True, dim=True), + "scope.equals": Style(color="red"), + "table.header": Style(bold=True), + "table.footer": Style(bold=True), + "table.cell": Style.null(), + "table.title": Style(italic=True), + "table.caption": Style(italic=True, dim=True), + "traceback.error": Style(color="red", italic=True), + "traceback.border.syntax_error": Style(color="bright_red"), + "traceback.border": Style(color="red"), + "traceback.text": Style.null(), + "traceback.title": Style(color="red", bold=True), + "traceback.exc_type": Style(color="bright_red", bold=True), + "traceback.exc_value": Style.null(), + "traceback.offset": Style(color="bright_red", bold=True), + "bar.back": Style(color="grey23"), + "bar.complete": Style(color="rgb(249,38,114)"), + "bar.finished": Style(color="rgb(114,156,31)"), + "bar.pulse": Style(color="rgb(249,38,114)"), + "progress.description": Style.null(), + "progress.filesize": Style(color="green"), + "progress.filesize.total": Style(color="green"), + "progress.download": Style(color="green"), + "progress.elapsed": Style(color="yellow"), + "progress.percentage": Style(color="magenta"), + "progress.remaining": Style(color="cyan"), + "progress.data.speed": Style(color="red"), + "progress.spinner": Style(color="green"), + "status.spinner": Style(color="green"), + "tree": Style(), + "tree.line": Style(), + "markdown.paragraph": Style(), + "markdown.text": Style(), + "markdown.em": Style(italic=True), + "markdown.emph": Style(italic=True), # For commonmark backwards compatibility + "markdown.strong": Style(bold=True), + "markdown.code": Style(bold=True, color="cyan", bgcolor="black"), + "markdown.code_block": Style(color="cyan", bgcolor="black"), + "markdown.block_quote": Style(color="magenta"), + "markdown.list": Style(color="cyan"), + "markdown.item": Style(), + "markdown.item.bullet": Style(color="yellow", bold=True), + "markdown.item.number": Style(color="yellow", bold=True), + "markdown.hr": Style(color="yellow"), + "markdown.h1.border": Style(), + "markdown.h1": Style(bold=True), + "markdown.h2": Style(bold=True, underline=True), + "markdown.h3": Style(bold=True), + "markdown.h4": Style(bold=True, dim=True), + "markdown.h5": Style(underline=True), + "markdown.h6": Style(italic=True), + "markdown.h7": Style(italic=True, dim=True), + "markdown.link": Style(color="bright_blue"), + "markdown.link_url": Style(color="blue", underline=True), + "markdown.s": Style(strike=True), + "iso8601.date": Style(color="blue"), + "iso8601.time": Style(color="magenta"), + "iso8601.timezone": Style(color="yellow"), +} + + +if __name__ == "__main__": # pragma: no cover + import argparse + import io + + from pip._vendor.rich.console import Console + from pip._vendor.rich.table import Table + from pip._vendor.rich.text import Text + + parser = argparse.ArgumentParser() + parser.add_argument("--html", action="store_true", help="Export as HTML table") + args = parser.parse_args() + html: bool = args.html + console = Console(record=True, width=70, file=io.StringIO()) if html else Console() + + table = Table("Name", "Styling") + + for style_name, style in DEFAULT_STYLES.items(): + table.add_row(Text(style_name, style=style), str(style)) + + console.print(table) + if html: + print(console.export_html(inline_styles=True)) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/diagnose.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/diagnose.py new file mode 100644 index 0000000..ad36183 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/diagnose.py @@ -0,0 +1,37 @@ +import os +import platform + +from pip._vendor.rich import inspect +from pip._vendor.rich.console import Console, get_windows_console_features +from pip._vendor.rich.panel import Panel +from pip._vendor.rich.pretty import Pretty + + +def report() -> None: # pragma: no cover + """Print a report to the terminal with debugging information""" + console = Console() + inspect(console) + features = get_windows_console_features() + inspect(features) + + env_names = ( + "TERM", + "COLORTERM", + "CLICOLOR", + "NO_COLOR", + "TERM_PROGRAM", + "COLUMNS", + "LINES", + "JUPYTER_COLUMNS", + "JUPYTER_LINES", + "JPY_PARENT_PID", + "VSCODE_VERBOSE_LOGGING", + ) + env = {name: os.getenv(name) for name in env_names} + console.print(Panel.fit((Pretty(env)), title="[b]Environment Variables")) + + console.print(f'platform="{platform.system()}"') + + +if __name__ == "__main__": # pragma: no cover + report() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/emoji.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/emoji.py new file mode 100644 index 0000000..791f046 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/emoji.py @@ -0,0 +1,96 @@ +import sys +from typing import TYPE_CHECKING, Optional, Union + +from .jupyter import JupyterMixin +from .segment import Segment +from .style import Style +from ._emoji_codes import EMOJI +from ._emoji_replace import _emoji_replace + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from pip._vendor.typing_extensions import Literal # pragma: no cover + + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderResult + + +EmojiVariant = Literal["emoji", "text"] + + +class NoEmoji(Exception): + """No emoji by that name.""" + + +class Emoji(JupyterMixin): + __slots__ = ["name", "style", "_char", "variant"] + + VARIANTS = {"text": "\uFE0E", "emoji": "\uFE0F"} + + def __init__( + self, + name: str, + style: Union[str, Style] = "none", + variant: Optional[EmojiVariant] = None, + ) -> None: + """A single emoji character. + + Args: + name (str): Name of emoji. + style (Union[str, Style], optional): Optional style. Defaults to None. + + Raises: + NoEmoji: If the emoji doesn't exist. + """ + self.name = name + self.style = style + self.variant = variant + try: + self._char = EMOJI[name] + except KeyError: + raise NoEmoji(f"No emoji called {name!r}") + if variant is not None: + self._char += self.VARIANTS.get(variant, "") + + @classmethod + def replace(cls, text: str) -> str: + """Replace emoji markup with corresponding unicode characters. + + Args: + text (str): A string with emojis codes, e.g. "Hello :smiley:!" + + Returns: + str: A string with emoji codes replaces with actual emoji. + """ + return _emoji_replace(text) + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return self._char + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + yield Segment(self._char, console.get_style(self.style)) + + +if __name__ == "__main__": # pragma: no cover + import sys + + from pip._vendor.rich.columns import Columns + from pip._vendor.rich.console import Console + + console = Console(record=True) + + columns = Columns( + (f":{name}: {name}" for name in sorted(EMOJI.keys()) if "\u200D" not in name), + column_first=True, + ) + + console.print(columns) + if len(sys.argv) > 1: + console.save_html(sys.argv[1]) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/errors.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/errors.py new file mode 100644 index 0000000..0bcbe53 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/errors.py @@ -0,0 +1,34 @@ +class ConsoleError(Exception): + """An error in console operation.""" + + +class StyleError(Exception): + """An error in styles.""" + + +class StyleSyntaxError(ConsoleError): + """Style was badly formatted.""" + + +class MissingStyle(StyleError): + """No such style.""" + + +class StyleStackError(ConsoleError): + """Style stack is invalid.""" + + +class NotRenderableError(ConsoleError): + """Object is not renderable.""" + + +class MarkupError(ConsoleError): + """Markup was badly formatted.""" + + +class LiveError(ConsoleError): + """Error related to Live display.""" + + +class NoAltScreen(ConsoleError): + """Alt screen mode was required.""" diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/file_proxy.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/file_proxy.py new file mode 100644 index 0000000..4b0b0da --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/file_proxy.py @@ -0,0 +1,57 @@ +import io +from typing import IO, TYPE_CHECKING, Any, List + +from .ansi import AnsiDecoder +from .text import Text + +if TYPE_CHECKING: + from .console import Console + + +class FileProxy(io.TextIOBase): + """Wraps a file (e.g. sys.stdout) and redirects writes to a console.""" + + def __init__(self, console: "Console", file: IO[str]) -> None: + self.__console = console + self.__file = file + self.__buffer: List[str] = [] + self.__ansi_decoder = AnsiDecoder() + + @property + def rich_proxied_file(self) -> IO[str]: + """Get proxied file.""" + return self.__file + + def __getattr__(self, name: str) -> Any: + return getattr(self.__file, name) + + def write(self, text: str) -> int: + if not isinstance(text, str): + raise TypeError(f"write() argument must be str, not {type(text).__name__}") + buffer = self.__buffer + lines: List[str] = [] + while text: + line, new_line, text = text.partition("\n") + if new_line: + lines.append("".join(buffer) + line) + buffer.clear() + else: + buffer.append(line) + break + if lines: + console = self.__console + with console: + output = Text("\n").join( + self.__ansi_decoder.decode_line(line) for line in lines + ) + console.print(output) + return len(text) + + def flush(self) -> None: + output = "".join(self.__buffer) + if output: + self.__console.print(output) + del self.__buffer[:] + + def fileno(self) -> int: + return self.__file.fileno() diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/filesize.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/filesize.py new file mode 100644 index 0000000..99f118e --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/filesize.py @@ -0,0 +1,89 @@ +# coding: utf-8 +"""Functions for reporting filesizes. Borrowed from https://github.com/PyFilesystem/pyfilesystem2 + +The functions declared in this module should cover the different +use cases needed to generate a string representation of a file size +using several different units. Since there are many standards regarding +file size units, three different functions have been implemented. + +See Also: + * `Wikipedia: Binary prefix `_ + +""" + +__all__ = ["decimal"] + +from typing import Iterable, List, Optional, Tuple + + +def _to_str( + size: int, + suffixes: Iterable[str], + base: int, + *, + precision: Optional[int] = 1, + separator: Optional[str] = " ", +) -> str: + if size == 1: + return "1 byte" + elif size < base: + return "{:,} bytes".format(size) + + for i, suffix in enumerate(suffixes, 2): # noqa: B007 + unit = base**i + if size < unit: + break + return "{:,.{precision}f}{separator}{}".format( + (base * size / unit), + suffix, + precision=precision, + separator=separator, + ) + + +def pick_unit_and_suffix(size: int, suffixes: List[str], base: int) -> Tuple[int, str]: + """Pick a suffix and base for the given size.""" + for i, suffix in enumerate(suffixes): + unit = base**i + if size < unit * base: + break + return unit, suffix + + +def decimal( + size: int, + *, + precision: Optional[int] = 1, + separator: Optional[str] = " ", +) -> str: + """Convert a filesize in to a string (powers of 1000, SI prefixes). + + In this convention, ``1000 B = 1 kB``. + + This is typically the format used to advertise the storage + capacity of USB flash drives and the like (*256 MB* meaning + actually a storage capacity of more than *256 000 000 B*), + or used by **Mac OS X** since v10.6 to report file sizes. + + Arguments: + int (size): A file size. + int (precision): The number of decimal places to include (default = 1). + str (separator): The string to separate the value from the units (default = " "). + + Returns: + `str`: A string containing a abbreviated file size and units. + + Example: + >>> filesize.decimal(30000) + '30.0 kB' + >>> filesize.decimal(30000, precision=2, separator="") + '30.00kB' + + """ + return _to_str( + size, + ("kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"), + 1000, + precision=precision, + separator=separator, + ) diff --git a/myenv/lib/python3.12/site-packages/pip/_vendor/rich/highlighter.py b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/highlighter.py new file mode 100644 index 0000000..c264679 --- /dev/null +++ b/myenv/lib/python3.12/site-packages/pip/_vendor/rich/highlighter.py @@ -0,0 +1,232 @@ +import re +from abc import ABC, abstractmethod +from typing import List, Union + +from .text import Span, Text + + +def _combine_regex(*regexes: str) -> str: + """Combine a number of regexes in to a single regex. + + Returns: + str: New regex with all regexes ORed together. + """ + return "|".join(regexes) + + +class Highlighter(ABC): + """Abstract base class for highlighters.""" + + def __call__(self, text: Union[str, Text]) -> Text: + """Highlight a str or Text instance. + + Args: + text (Union[str, ~Text]): Text to highlight. + + Raises: + TypeError: If not called with text or str. + + Returns: + Text: A test instance with highlighting applied. + """ + if isinstance(text, str): + highlight_text = Text(text) + elif isinstance(text, Text): + highlight_text = text.copy() + else: + raise TypeError(f"str or Text instance required, not {text!r}") + self.highlight(highlight_text) + return highlight_text + + @abstractmethod + def highlight(self, text: Text) -> None: + """Apply highlighting in place to text. + + Args: + text (~Text): A text object highlight. + """ + + +class NullHighlighter(Highlighter): + """A highlighter object that doesn't highlight. + + May be used to disable highlighting entirely. + + """ + + def highlight(self, text: Text) -> None: + """Nothing to do""" + + +class RegexHighlighter(Highlighter): + """Applies highlighting from a list of regular expressions.""" + + highlights: List[str] = [] + base_style: str = "" + + def highlight(self, text: Text) -> None: + """Highlight :class:`rich.text.Text` using regular expressions. + + Args: + text (~Text): Text to highlighted. + + """ + + highlight_regex = text.highlight_regex + for re_highlight in self.highlights: + highlight_regex(re_highlight, style_prefix=self.base_style) + + +class ReprHighlighter(RegexHighlighter): + """Highlights the text typically produced from ``__repr__`` methods.""" + + base_style = "repr." + highlights = [ + r"(?P<)(?P[-\w.:|]*)(?P[\w\W]*)(?P>)", + r'(?P[\w_]{1,50})=(?P"?[\w_]+"?)?', + r"(?P[][{}()])", + _combine_regex( + r"(?P[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})", + r"(?P([A-Fa-f0-9]{1,4}::?){1,7}[A-Fa-f0-9]{1,4})", + r"(?P(?:[0-9A-Fa-f]{1,2}-){7}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{1,2}:){7}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{4}\.){3}[0-9A-Fa-f]{4})", + r"(?P(?:[0-9A-Fa-f]{1,2}-){5}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{1,2}:){5}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{4}\.){2}[0-9A-Fa-f]{4})", + r"(?P[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})", + r"(?P[\w.]*?)\(", + r"\b(?PTrue)\b|\b(?PFalse)\b|\b(?PNone)\b", + r"(?P\.\.\.)", + r"(?P(?(?\B(/[-\w._+]+)*\/)(?P[-\w._+]*)?", + r"(?b?'''.*?(?(file|https|http|ws|wss)://[-0-9a-zA-Z$_+!`(),.?/;:&=%#]*)", + ), + ] + + +class JSONHighlighter(RegexHighlighter): + """Highlights JSON""" + + # Captures the start and end of JSON strings, handling escaped quotes + JSON_STR = r"(?b?\".*?(?[\{\[\(\)\]\}])", + r"\b(?Ptrue)\b|\b(?Pfalse)\b|\b(?Pnull)\b", + r"(?P(? None: + super().highlight(text) + + # Additional work to handle highlighting JSON keys + plain = text.plain + append = text.spans.append + whitespace = self.JSON_WHITESPACE + for match in re.finditer(self.JSON_STR, plain): + start, end = match.span() + cursor = end + while cursor < len(plain): + char = plain[cursor] + cursor += 1 + if char == ":": + append(Span(start, end, "json.key")) + elif char in whitespace: + continue + break + + +class ISO8601Highlighter(RegexHighlighter): + """Highlights the ISO8601 date time strings. + Regex reference: https://www.oreilly.com/library/view/regular-expressions-cookbook/9781449327453/ch04s07.html + """ + + base_style = "iso8601." + highlights = [ + # + # Dates + # + # Calendar month (e.g. 2008-08). The hyphen is required + r"^(?P[0-9]{4})-(?P1[0-2]|0[1-9])$", + # Calendar date w/o hyphens (e.g. 20080830) + r"^(?P(?P[0-9]{4})(?P1[0-2]|0[1-9])(?P3[01]|0[1-9]|[12][0-9]))$", + # Ordinal date (e.g. 2008-243). The hyphen is optional + r"^(?P(?P[0-9]{4})-?(?P36[0-6]|3[0-5][0-9]|[12][0-9]{2}|0[1-9][0-9]|00[1-9]))$", + # + # Weeks + # + # Week of the year (e.g., 2008-W35). The hyphen is optional + r"^(?P(?P[0-9]{4})-?W(?P5[0-3]|[1-4][0-9]|0[1-9]))$", + # Week date (e.g., 2008-W35-6). The hyphens are optional + r"^(?P(?P[0-9]{4})-?W(?P5[0-3]|[1-4][0-9]|0[1-9])-?(?P[1-7]))$", + # + # Times + # + # Hours and minutes (e.g., 17:21). The colon is optional + r"^(?P

' : '\U0001d4ab', + '\\' : '\U0001d4ac', + '\\' : '\U0000211b', + '\\' : '\U0001d4ae', + '\\' : '\U0001d4af', + '\\' : '\U0001d4b0', + '\\' : '\U0001d4b1', + '\\' : '\U0001d4b2', + '\\' : '\U0001d4b3', + '\\' : '\U0001d4b4', + '\\' : '\U0001d4b5', + '\\' : '\U0001d5ba', + '\\' : '\U0001d5bb', + '\\' : '\U0001d5bc', + '\\' : '\U0001d5bd', + '\\' : '\U0001d5be', + '\\' : '\U0001d5bf', + '\\' : '\U0001d5c0', + '\\' : '\U0001d5c1', + '\\' : '\U0001d5c2', + '\\' : '\U0001d5c3', + '\\' : '\U0001d5c4', + '\\' : '\U0001d5c5', + '\\' : '\U0001d5c6', + '\\' : '\U0001d5c7', + '\\' : '\U0001d5c8', + '\\